Compare commits

...

15 Commits

22 changed files with 541 additions and 280 deletions

View File

@ -1,16 +1,18 @@
TZ=UTC TZ=UTC
PORT=3333 PORT=3333
HOST=localhost HOST=0.0.0.0
LOG_LEVEL=info LOG_LEVEL=info
APP_KEY= APP_KEY=sMoYEqixvC3sgJO4WM9ej9ctlcVtAdCE
NODE_ENV=development NODE_ENV=development
SESSION_DRIVER=cookie SESSION_DRIVER=cookie
PG_USER=postgres
PG_PORT=5432 PG_PORT=5432
PG_HOST=localhost PG_HOST=db
PG_PASSWORD=password PG_PASSWORD=password
SENTRY_TOKEN= SENTRY_TOKEN=
SENTRY_ORG= SENTRY_ORG=
REDIS_HOST=sentry-redis-1 REDIS_HOST=sentry-redis-1
REDIS_PORT=6379 REDIS_PORT=6379
REDIS_PASSWORD= REDIS_PASSWORD=
WEBHOOK_URL= WEBHOOK_URL=
QUERY_FILTER='!user.email:*@mailinator.com !user.email:*@example.com'

1
.gitignore vendored
View File

@ -23,3 +23,4 @@ yarn-error.log
# Platform specific # Platform specific
.DS_Store .DS_Store
*compose-prod.yml

View File

@ -1,4 +1,6 @@
FROM node:20.12.2-alpine3.18 AS base FROM node:lts-alpine3.22 AS base
HEALTHCHECK --interval=5s --timeout=10s --start-period=5s --retries=5 \
CMD sh -c 'wget --no-verbose --tries=1 --spider http://127.0.0.1:3333 || exit 1'
# All deps stage # All deps stage
FROM base AS deps FROM base AS deps
@ -6,11 +8,10 @@ WORKDIR /app
ADD package.json package-lock.json ./ ADD package.json package-lock.json ./
RUN npm ci RUN npm ci
FROM node:22 AS dev-deps FROM deps AS develop
WORKDIR /app WORKDIR /app
ADD package.json package-lock.json ./ COPY --from=deps /app/node_modules /app/node_modules
ENV NODE_ENV=development ENV NODE_ENV=development
RUN npm ci
EXPOSE 3333 EXPOSE 3333
# Production only deps stage # Production only deps stage
@ -33,4 +34,5 @@ WORKDIR /app
COPY --from=production-deps /app/node_modules /app/node_modules COPY --from=production-deps /app/node_modules /app/node_modules
COPY --from=build /app/build /app COPY --from=build /app/build /app
EXPOSE 8080 EXPOSE 8080
CMD ["node", "./bin/server.js"] CMD ["node", "./bin/server.js"]

32
README.md Normal file
View File

@ -0,0 +1,32 @@
# Sentry Toolkit
This project was born out of a simple marketing request. Basically along the lines of "how can we track user engagement in our CRM?", to which I answered "We already use Sentry for Session recording, we can pull that data from the API, aggregate it and push it to the CRM." Hence this project. It is currently pretty simple and includes an API as well as basic web ui.
## Tech Stack
- [AdonisJS](https://adonisjs.com): I decided to use the wonderful AdonisJS framework for this project. Overkill? probably but it has a lot of nicecities built in and I didn't want to reinvent the wheel for this simple project. I also wanted to play around with InertiaJS which comes included.
- [Docker](https://docker.com) - All services have been containerized for convience of developing, testing and deploying. A `compose.yml` and `compose.override.yml` are included for testing and developing locally.
- Redis - Some basic caching because why not?
- Postgresql - Useful for storing historical session data.
- Traefik - Reverse Proxy/Ingress controller Provided for convienent development and local testing.
- Grafana - (Optional) For building pretty dashboards.
## Getting started
```shell
$ cp .env.example .env.develop
# Add/edit values in .env.develop as needed
# The WEBHOOK_URL is not strictly necessary for basic functionality.
# Tested on Linux, I have not had the pleasure of setting up Traefik on Windows/Mac
# recently so suggestions welcome. Also you may need `sudo` depending on how your
# Docker environment is set up.
$ docker compose up -d
```
Once all of the containers come up, you should be able to access the UI/API on [http://sentry.docker.localhost]() (Docker compose magic.) The database migrations should automatically run when you start with `docker compose` but if you are running the backend with node instead you will need to run `node ace migration:run` after starting the app for the first time.
The main page will list any Replay sessions stored in the database.
![](./docs/assets/homepage.jpg)
[http://sentry.docker.localhost/replays]() will fetch session data from Sentry and store it in the database. It will also return the results as JSON.

View File

@ -53,7 +53,7 @@ export default defineConfig({
() => import('@adonisjs/lucid/database_provider'), () => import('@adonisjs/lucid/database_provider'),
() => import('@adonisjs/auth/auth_provider'), () => import('@adonisjs/auth/auth_provider'),
() => import('@adonisjs/inertia/inertia_provider'), () => import('@adonisjs/inertia/inertia_provider'),
() => import('@adonisjs/redis/redis_provider') () => import('@adonisjs/redis/redis_provider'),
], ],
/* /*

52
app/Helpers/Replays.ts Normal file
View File

@ -0,0 +1,52 @@
import Replay from '#models/replay'
import { parseSentryLinkHeader, SentryPagination } from './Sentry.js'
import env from '#start/env'
let recordsUpdated = 0
const SENTRY_TOKEN = env.get('SENTRY_TOKEN')
interface ApiResponse<T> {
data: T
// optionally, you can define `meta`, `errors`, etc. if your API returns them
}
export async function fetchBatch(url: string) {
const options: RequestInit = {
headers: {
Authorization: `Bearer ${SENTRY_TOKEN}`,
},
}
const req = await fetch(url, options)
if (!req.ok) {
throw new Error(`Request failed with status ${req.status}`)
}
const resp = (await req.json()) as ApiResponse<Replay[]>
const replays = resp.data
const headers = req.headers
const cleanedData = replays.map((record) => sanitizeInput(record, Replay.allowedFields))
let updated = await Replay.updateOrCreateMany('id', cleanedData)
recordsUpdated = recordsUpdated + updated.length
const linkHeader = headers.get('link')
if (!linkHeader) {
return { error: 'link header missing from Sentry API response' }
}
const pagination: SentryPagination = parseSentryLinkHeader(linkHeader)
if (pagination.hasNextResults == true) {
console.log('fetching', pagination.next)
await fetchBatch(pagination.next)
}
console.log('no more results')
return { recordsUpdated }
}
function sanitizeInput(data: Record<string, any>, allowedFields: string[]) {
return allowedFields.reduce(
(acc, key) => {
if (key in data) acc[key] = data[key]
return acc
},
{} as Record<string, any>
)
}

27
app/Helpers/Sentry.ts Normal file
View File

@ -0,0 +1,27 @@
export interface SentryPagination {
previous: string
hasPreviousResults: boolean
hasNextResults: boolean
next: string
}
export function parseSentryLinkHeader(header: string): SentryPagination {
const links = header.split(',').map((part) => part.trim())
let result = {} as SentryPagination
for (const link of links) {
const match = link.match(/<([^>]+)>;\s*rel="([^"]+)";\s*results="([^"]+)";\s*cursor="([^"]+)"/)
if (!match) continue
const [, url, rel, results] = match
if (rel === 'previous') {
result.previous = url
result.hasPreviousResults = results === 'true'
} else if (rel === 'next') {
result.next = url
result.hasNextResults = results === 'true'
}
}
return result
}

21
app/Helpers/Webhook.ts Normal file
View File

@ -0,0 +1,21 @@
import env from '#start/env'
export async function sendDataToWebhook(responseData: {
version: number
updatedAt: Date
numberOfRecords: number
data: unknown
}) {
try {
console.log('syncing to webhook')
await fetch(env.get('WEBHOOK_URL'), {
headers: {
'content-type': 'application/json',
},
method: 'POST',
body: JSON.stringify(responseData),
})
} catch (e) {
console.error('error sending webhook data', e)
}
}

View File

@ -1,95 +1,51 @@
import Replay from '#models/replay' import Replay from '#models/replay'
import env from '#start/env' import env from '#start/env'
import type { HttpContext } from '@adonisjs/core/http' import type { HttpContext } from '@adonisjs/core/http'
import db from '@adonisjs/lucid/services/db'
const SENTRY_TOKEN = env.get('SENTRY_TOKEN')
const SENTRY_ORG = env.get('SENTRY_ORG') const SENTRY_ORG = env.get('SENTRY_ORG')
let recordsUpdated = 0
import redis from '@adonisjs/redis/services/main' import redis from '@adonisjs/redis/services/main'
import { fetchBatch } from '../Helpers/Replays.js'
import { sendDataToWebhook } from '../Helpers/Webhook.js'
import { faker } from '@faker-js/faker'
interface ApiResponse<T> {
data: T;
// optionally, you can define `meta`, `errors`, etc. if your API returns them
}
interface SentryPagination {
previous: string;
hasPreviousResults: boolean;
hasNextResults: boolean;
next: string
}
export default class ReplaysController { export default class ReplaysController {
public async faker({ request, response }: HttpContext) {
public async search({ response }: HttpContext) { const { page } = await request.qs()
let results = await db.rawQuery(` const sessions = Array.from({ length: 100 }, generateFakeSession)
SELECT const nextPage = +page + 1
u.display_name, await response.safeHeader(
u.sessions, 'link',
u.total_time_seconds, `<http://localhost:3333/faker/?page=${page}>; rel="previous"; results="true"; cursor="0:1100:1", <http://localhost:3333/faker/?page=${nextPage}>; rel="next"; results="${page == 10 ? 'false' : 'true'}"; cursor="0:${page * 100}:0"`
u.total_time_readable,
u.average_session_time_readable,
u.average_time_seconds,
r.id AS last_session_id,
r.finished_at AS last_session_time
FROM (
-- Aggregate sessions in the last 30 days
SELECT
"user" ->> 'display_name' AS display_name,
COUNT(duration) AS sessions,
SUM(duration) AS total_time_seconds,
AVG(duration) AS average_time_seconds,
CONCAT(
FLOOR(SUM(duration) / 86400), 'd ',
FLOOR(MOD(SUM(duration), 86400) / 3600), 'h ',
FLOOR(MOD(SUM(duration), 3600) / 60), 'm'
) AS total_time_readable,
CONCAT(
FLOOR(COUNT(duration) / 86400), 'd ',
FLOOR(MOD(COUNT(duration), 86400) / 3600), 'h ',
FLOOR(MOD(COUNT(duration), 3600) / 60), 'm'
) AS average_session_time_readable
FROM
replays
WHERE
finished_at >= NOW() - INTERVAL '30 days'
GROUP BY
"user" ->> 'display_name'
) u
-- LATERAL JOIN to get latest session (either within 30d or fallback to latest overall)
JOIN LATERAL (
SELECT id, finished_at
FROM replays
WHERE "user" ->> 'display_name' = u.display_name
ORDER BY
CASE WHEN finished_at >= NOW() - INTERVAL '30 days' THEN 0 ELSE 1 END,
finished_at DESC
LIMIT 1
) r ON true
ORDER BY
u.total_time_seconds DESC;`
) )
try { return { data: sessions, count: sessions.length, page: page }
}
await fetch(env.get('WEBHOOK_URL'), public async stats({ request, response }: HttpContext) {
{ const { sendToWebhook } = request.qs()
headers: const latestVersion = await redis.get(`replays:stats:latest_version`)
{ let results
'content-type': 'application/json' if (!latestVersion) {
}, console.log('Cache miss')
method: 'POST', results = await Replay.updateReplayStats()
body: JSON.stringify(results.rows) } else {
console.log('cache hit')
let data = await redis.get(`replays:stats:version:${latestVersion}:results`)
if (data) {
results = JSON.parse(data)
} }
) }
} catch(e) {
console.error('error sending webhook data', e) let responseData = {
version: results.version,
updatedAt: results.updatedAt,
numberOfRecords: results.rows.length,
data: results.rows,
}
if (sendToWebhook) {
await sendDataToWebhook(responseData)
}
return response.json(responseData)
} }
response.json(results.rows)
} public async home({ request, inertia }: HttpContext) {
public async list({ request, inertia }: HttpContext) {
const page = request.input('page', 1) const page = request.input('page', 1)
const perPage = 20 const perPage = 20
const cacheKey = `replays:page:${page}` const cacheKey = `replays:page:${page}`
@ -98,16 +54,16 @@ ORDER BY
let paginated, meta, replays let paginated, meta, replays
if (data) { if (data) {
({ paginated, meta, replays } = JSON.parse(data)) ;({ paginated, meta, replays } = JSON.parse(data))
} else { } else {
paginated = await Replay.query().paginate(page, perPage) paginated = await Replay.query().paginate(page, perPage)
paginated.baseUrl('/list') paginated.baseUrl('/')
const json = paginated.toJSON() const json = paginated.toJSON()
meta = { meta = {
...json.meta, ...json.meta,
links: buildPaginationLinks(json.meta) links: buildPaginationLinks(json.meta),
} }
replays = json.data replays = json.data
@ -118,107 +74,53 @@ ORDER BY
return inertia.render('Replays/Index', { return inertia.render('Replays/Index', {
data: { data: {
replays, replays,
meta meta,
} },
}) })
} }
async index({ request, response }: HttpContext) { async index({ request, response }: HttpContext) {
const { statsPeriod, start, end } = request.qs() const { statsPeriod, start, end } = request.qs()
recordsUpdated = 0 let queryString: string = '?statsPeriod=24h' // Default in case none is provided
let queryString: string = '?statsPeriod=24h'// Default in case none is provided
if (statsPeriod) { if (statsPeriod) {
queryString = `?statsPeriod=${statsPeriod}` queryString = `?statsPeriod=${statsPeriod}`
} else if (start && end) { } else if (start && end) {
queryString = `?start=${start}&end=${end}` queryString = `?start=${start}&end=${end}`
} }
const replays = await fetchBatch(`https://sentry.io/api/0/organizations/${SENTRY_ORG}/replays/${queryString}`) const queryFilter = env.get('QUERY_FILTER')
return response.json(replays) const baseUrl =
} env.get('NODE_ENV') == 'production'
? `https://sentry.io/api/0/organizations/${SENTRY_ORG}/replays/${queryString}&field=id&field=user&field=duration&field=started_at&field=finished_at&query=${encodeURIComponent(queryFilter)}`
: 'http://localhost:3333/faker?page=1'
console.log('base', baseUrl)
await fetchBatch(baseUrl)
let queryResults = await Replay.updateReplayStats()
return response.json({ version: queryResults.latestVersion, ...queryResults })
}
} }
function buildPaginationLinks(meta: {
async function fetchBatch(url: string) { previousPageUrl: string
const options: RequestInit = { lastPage: number
headers: { currentPage: number
Authorization: `Bearer ${SENTRY_TOKEN}` nextPageUrl: string
} }) {
}
const req = await fetch(url, options)
if (!req.ok) {
throw new Error(`Request failed with status ${req.status}`);
}
const resp = await req.json() as ApiResponse<Replay[]>;
const replays = resp.data;
const headers = req.headers
const cleanedData = replays.map(record => sanitizeInput(record, Replay.allowedFields))
let updated = await Replay.updateOrCreateMany('id', cleanedData)
recordsUpdated = recordsUpdated + updated.length
const linkHeader = headers.get('link')
if (!linkHeader) {
return { error: 'link header missing from Sentry API response' }
}
const pagination: SentryPagination = parseSentryLinkHeader(linkHeader)
if (pagination.hasNextResults == true) {
console.log('fetching', pagination.next)
await fetchBatch(pagination.next)
}
console.log('no more results')
return { recordsUpdated }
}
function parseSentryLinkHeader(header: string): SentryPagination {
const links = header.split(',').map(part => part.trim())
let result = {} as SentryPagination
for (const link of links) {
const match = link.match(/<([^>]+)>;\s*rel="([^"]+)";\s*results="([^"]+)";\s*cursor="([^"]+)"/)
if (!match) continue
const [, url, rel, results] = match
if (rel === 'previous') {
result.previous = url
result.hasPreviousResults = results === 'true'
} else if (rel === 'next') {
result.next = url
result.hasNextResults = results === 'true'
}
}
return result
}
function sanitizeInput(data: Record<string, any>, allowedFields: string[]) {
return allowedFields.reduce((acc, key) => {
if (key in data) acc[key] = data[key]
return acc
}, {} as Record<string, any>)
}
function buildPaginationLinks(meta: { previousPageUrl: string, lastPage: number; currentPage: number; nextPageUrl: string }) {
const links = [] const links = []
// Previous // Previous
links.push({ links.push({
url: meta.previousPageUrl, url: meta.previousPageUrl,
label: '&laquo; Prev', label: '&laquo; Prev',
active: false active: false,
}) })
for (let page = 1; page <= meta.lastPage; page++) { for (let page = 1; page <= meta.lastPage; page++) {
links.push({ links.push({
url: `/list?page=${page}`, url: `/?page=${page}`,
label: page.toString(), label: page.toString(),
active: page === meta.currentPage active: page === meta.currentPage,
}) })
} }
@ -226,8 +128,68 @@ function buildPaginationLinks(meta: { previousPageUrl: string, lastPage: number;
links.push({ links.push({
url: meta.nextPageUrl, url: meta.nextPageUrl,
label: 'Next &raquo;', label: 'Next &raquo;',
active: false active: false,
}) })
return links return links
} }
function generateFakeSession() {
const uuid = faker.string.uuid()
const browserName = faker.helpers.arrayElement(['Chrome', 'Firefox', 'Safari', 'Edge', 'Brave'])
const deviceBrand = faker.helpers.arrayElement(['Apple', 'Samsung', 'Google'])
const osName = faker.helpers.arrayElement(['iOS', 'Android', 'Windows', 'macOS'])
const platform = faker.helpers.arrayElement(['Sentry', 'Datadog', 'New Relic', 'Rollbar'])
const finishedAt = new Date(Date.now() - faker.number.int({ min: 0, max: 60 * 60 * 1000 }))
const displayName = faker.internet.email()
return {
activity: faker.number.int({ min: 1, max: 10 }),
browser: {
name: browserName,
version: faker.system.semver(),
},
count_dead_clicks: faker.number.int({ min: 0, max: 10 }),
count_rage_clicks: faker.number.int({ min: 0, max: 5 }),
count_errors: faker.number.int({ min: 0, max: 5 }),
count_segments: faker.number.int({ min: 0, max: 3 }),
count_urls: faker.number.int({ min: 1, max: 3 }),
device: {
brand: deviceBrand,
family: deviceBrand === 'Apple' ? 'iPhone' : deviceBrand,
model: faker.string.numeric({ length: 2 }),
name: `${deviceBrand} ${faker.string.alphanumeric({ length: 3 })}`,
},
dist: null,
duration: faker.number.int({ min: 100, max: 1000 }),
environment: faker.helpers.arrayElement(['production', 'staging', 'development']),
error_ids: [uuid],
finished_at: faker.date.between({ from: finishedAt, to: new Date() }).toISOString(),
has_viewed: faker.datatype.boolean(),
id: uuid,
is_archived: faker.datatype.boolean() ? null : false,
os: {
name: osName,
version: `${faker.number.int({ min: 10, max: 17 })}.${faker.number.int({ min: 0, max: 5 })}`,
},
platform: platform,
project_id: faker.string.numeric({ length: 6 }),
releases: [`version@${faker.system.semver()}`],
sdk: {
name: faker.hacker.noun(),
version: faker.system.semver(),
},
started_at: faker.date.recent().toISOString(),
tags: {
hello: ['world', faker.person.fullName()],
},
trace_ids: [uuid],
urls: [faker.internet.url()],
user: {
display_name: displayName,
email: displayName,
id: faker.string.numeric({ length: 8 }),
ip: faker.internet.ip(),
username: faker.internet.username(),
},
}
}

View File

@ -1,7 +1,85 @@
import { DateTime } from 'luxon' import { DateTime } from 'luxon'
import { BaseModel, column } from '@adonisjs/lucid/orm' import { BaseModel, column } from '@adonisjs/lucid/orm'
import db from '@adonisjs/lucid/services/db'
import redis from '@adonisjs/redis/services/main'
export default class Replay extends BaseModel { export default class Replay extends BaseModel {
public static async updateReplayStats() {
let results = await db.rawQuery(`
SELECT
u.display_name,
u.sessions,
u.total_time_seconds,
u.total_time_readable,
u.average_session_time_readable,
u.average_time_seconds,
r.id AS last_session_id,
r.finished_at AS last_session_time,
o.id AS oldest_session_id,
o.finished_at AS oldest_session_time
FROM (
-- Aggregate sessions in the last 30 days
SELECT
"user" ->> 'display_name' AS display_name,
COUNT(duration) AS sessions,
SUM(duration) AS total_time_seconds,
AVG(duration) AS average_time_seconds,
CONCAT(
FLOOR(SUM(duration) / 86400), 'd ',
FLOOR(MOD(SUM(duration), 86400) / 3600), 'h ',
FLOOR(MOD(SUM(duration), 3600) / 60), 'm'
) AS total_time_readable,
CONCAT(
FLOOR(COUNT(duration) / 86400), 'd ',
FLOOR(MOD(COUNT(duration), 86400) / 3600), 'h ',
FLOOR(MOD(COUNT(duration), 3600) / 60), 'm'
) AS average_session_time_readable
FROM
replays
WHERE
finished_at >= NOW() - INTERVAL '30 days'
AND "user" ->> 'display_name' LIKE '%@%'
AND "user" ->> 'display_name' !~ 'e2etesting|@paragontruss.com'
GROUP BY
"user" ->> 'display_name'
) u
-- LATERAL JOIN to get latest session (either within 30d or fallback to latest overall)
JOIN LATERAL (
SELECT id, finished_at
FROM replays
WHERE "user" ->> 'display_name' = u.display_name
AND "user" ->> 'display_name' LIKE '%@%'
AND "user" ->> 'display_name' !~ 'e2etesting|@paragontruss.com'
ORDER BY
CASE WHEN finished_at >= NOW() - INTERVAL '30 days' THEN 0 ELSE 1 END,
finished_at DESC
LIMIT 1
) r ON true
-- LATERAL JOIN to get the oldest session
JOIN LATERAL (
SELECT id, finished_at
FROM replays
WHERE "user" ->> 'display_name' = u.display_name
AND "user" ->> 'display_name' LIKE '%@%'
AND "user" ->> 'display_name' !~ 'e2etesting|@paragontruss.com'
ORDER BY finished_at ASC
LIMIT 1
) o ON true
ORDER BY
u.total_time_seconds DESC;
`)
const updatedVersion = await redis.incr('replays:stats:latest_version')
results.version = updatedVersion
results.updatedAt = Date.now()
await redis.set(`replays:stats:version:${updatedVersion}:results`, JSON.stringify(results))
return results
}
@column({ isPrimary: true }) @column({ isPrimary: true })
declare id: string declare id: string
@ -12,14 +90,14 @@ export default class Replay extends BaseModel {
prepare: (value) => { prepare: (value) => {
// The values from sentry are just arrays so convert them to json // The values from sentry are just arrays so convert them to json
return JSON.stringify(value) return JSON.stringify(value)
} },
}) })
declare trace_ids: string[] declare trace_ids: string[]
@column({ @column({
prepare: (value) => { prepare: (value) => {
return JSON.stringify(value) return JSON.stringify(value)
} },
}) })
declare error_ids: string[] declare error_ids: string[]
@ -30,50 +108,44 @@ export default class Replay extends BaseModel {
prepare: (value) => { prepare: (value) => {
// The values from sentry are just arrays so convert them to json // The values from sentry are just arrays so convert them to json
return JSON.stringify(value) return JSON.stringify(value)
} },
}) })
declare tags: string[] declare tags: string[]
@column() @column()
declare user: string[] declare user: string[]
@column() @column()
declare sdk: any declare sdk: any
@column() @column()
declare os: any declare os: any
@column() @column()
declare browser: any declare browser: any
@column() @column()
declare device: any declare device: any
@column() @column()
declare ota_updates: any declare ota_updates: any
@column() @column()
declare is_archived: boolean | null declare is_archived: boolean | null
@column({ @column({
prepare: (value) => { prepare: (value) => {
// The values from sentry are just arrays so convert them to json // The values from sentry are just arrays so convert them to json
return JSON.stringify(value) return JSON.stringify(value)
} },
}) })
declare urls: any declare urls: any
@column({ @column({
prepare: (value) => { prepare: (value) => {
// The values from sentry are just arrays so convert them to json // The values from sentry are just arrays so convert them to json
return JSON.stringify(value) return JSON.stringify(value)
} },
}) })
declare clicks: any declare clicks: any
@ -92,7 +164,7 @@ export default class Replay extends BaseModel {
@column.dateTime() @column.dateTime()
declare finished_at: DateTime | null declare finished_at: DateTime | null
@column.dateTime({serializeAs: 'started_at'}) @column.dateTime({ serializeAs: 'started_at' })
declare started_at: DateTime | null declare started_at: DateTime | null
@column() @column()
@ -110,12 +182,11 @@ export default class Replay extends BaseModel {
@column() @column()
declare platform: string | null declare platform: string | null
@column({ @column({
prepare: (value) => { prepare: (value) => {
// The values from sentry are just arrays so convert them to json // The values from sentry are just arrays so convert them to json
return JSON.stringify(value) return JSON.stringify(value)
} },
}) })
declare releases: any declare releases: any

View File

@ -1,16 +1,14 @@
--- ---
name: sentry
services: services:
scraper: scraper:
build: build:
context: . context: .
target: dev-deps target: develop
env_file: .env.develop env_file: .env.develop
volumes: volumes:
- ./:/app - ./:/app
- node_modules:/app/node_modules - node_modules:/app/node_modules
command: /bin/bash -c "echo 'hello' && node ace migration:run --force && node ace serve --watch" command: /bin/sh -c "node ace migration:run --force && node ace serve --watch"
depends_on: depends_on:
db: db:
condition: service_healthy condition: service_healthy

77
compose.yml Normal file
View File

@ -0,0 +1,77 @@
---
name: sentry
services:
reverse-proxy:
image: traefik:latest
command: --api.insecure=true --providers.docker
ports:
- 80:80
- 8080:8080
volumes:
- /var/run/docker.sock:/var/run/docker.sock
networks:
- traefik
scraper:
labels:
- 'traefik.enable=true'
- 'traefik.docker.network=sentry_traefik'
- 'traefik.http.routers.scraper.rule=Host(`sentry.docker.localhost`)'
- 'traefik.http.services.scraper.loadbalancer.server.port=3333'
- 'traefik.http.routers.scraper.entrypoints=http'
- 'traefik.http.routers.scraper.service=scraper'
networks:
- traefik
- redis
- database
depends_on:
db:
condition: service_healthy
db:
image: postgres:16
environment:
- POSTGRES_PASSWORD=password
healthcheck:
test: ['CMD-SHELL', 'pg_isready', '-d', 'postgres']
interval: 5s
timeout: 60s
retries: 5
start_period: 5s
networks:
- database
volumes:
- pg_data:/var/lib/postgresql/data
grafana:
image: grafana/grafana:latest
labels:
- 'traefik.enable=true'
- 'traefik.docker.network=sentry_traefik'
- 'traefik.http.routers.grafana.rule=Host(`grafana.docker.localhost`)'
- 'traefik.http.routers.grafana.entrypoints=http'
- 'traefik.http.services.grafana.loadbalancer.server.port=3000'
- 'traefik.http.routers.grafana.service=grafana'
networks:
- traefik
- database
healthcheck:
test:
[
'CMD-SHELL',
'wget --no-verbose --tries=1 --spider http://localhost:3000/api/health || exit 1',
]
interval: 10s
timeout: 30s
retries: 5
start_period: 30s
redis:
image: redis:latest
networks:
- redis
networks:
traefik:
driver: bridge
database:
driver: bridge
redis:
driver: bridge
volumes:
pg_data: {}

View File

@ -33,4 +33,4 @@ export default redisConfig
declare module '@adonisjs/redis/types' { declare module '@adonisjs/redis/types' {
export interface RedisConnections extends InferConnections<typeof redisConfig> {} export interface RedisConnections extends InferConnections<typeof redisConfig> {}
} }

View File

@ -1,65 +0,0 @@
---
name: sentry
services:
reverse-proxy:
image: traefik:latest
command: --api.insecure=true --providers.docker
ports:
- 80:80
- 8080:8080
volumes:
- /var/run/docker.sock:/var/run/docker.sock
networks:
- traefik
scraper:
labels:
- "traefik.enable=true"
- "traefik.docker.network=sentry_traefik"
- "traefik.http.routers.scraper.rule=Host(`sentry.docker.localhost`)"
- "traefik.http.services.scraper.loadbalancer.server.port=3333"
- "traefik.http.routers.scraper.entrypoints=http"
- "traefik.http.routers.scraper.service=scraper"
networks:
- traefik
- redis
- database
db:
image: postgres:16
environment:
- POSTGRES_PASSWORD=password
healthcheck:
test: ["CMD-SHELL", "pg_isready", "-d", "postgres"]
interval: 5s
timeout: 60s
retries: 5
start_period: 5s
networks:
- database
volumes:
- pg_data:/var/lib/postgresql/data
grafana:
image: grafana/grafana:latest
labels:
- "traefik.enable=true"
- "traefik.docker.network=sentry_traefik"
- "traefik.http.routers.grafana.rule=Host(`grafana.docker.localhost`)"
- "traefik.http.routers.grafana.entrypoints=http"
- "traefik.http.services.grafana.loadbalancer.server.port=3000"
- "traefik.http.routers.grafana.service=grafana"
networks:
- traefik
- database
redis:
image: redis:latest
networks:
- redis
networks:
traefik:
driver: bridge
database:
driver: bridge
redis:
driver: bridge
volumes:
pg_data: {}

BIN
docs/assets/homepage.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

View File

@ -4,10 +4,10 @@
import '../css/app.css' import '../css/app.css'
import { createSSRApp, h } from 'vue' import { createSSRApp, h } from 'vue'
import type { DefineComponent } from 'vue' import type { DefineComponent } from 'vue'
import { createInertiaApp } from '@inertiajs/vue3' import { createInertiaApp, Link } from '@inertiajs/vue3'
import { resolvePageComponent } from '@adonisjs/inertia/helpers' import { resolvePageComponent } from '@adonisjs/inertia/helpers'
const appName = import.meta.env.VITE_APP_NAME || 'AdonisJS' const appName = import.meta.env.VITE_APP_NAME || 'AdonisJS'
Vue.component('inertia-link', Link)
createInertiaApp({ createInertiaApp({
progress: { color: '#5468FF' }, progress: { color: '#5468FF' },

View File

@ -1,5 +1,5 @@
<template> <template>
<div> <div class="m-5">
<h1 class="text-2xl font-bold mb-4">Replays</h1> <h1 class="text-2xl font-bold mb-4">Replays</h1>
<table class="w-full border text-left"> <table class="w-full border text-left">
@ -7,37 +7,115 @@
<tr class="bg-gray-100"> <tr class="bg-gray-100">
<th class="p-2">ID</th> <th class="p-2">ID</th>
<th class="p-2">Email</th> <th class="p-2">Email</th>
<th class="p-2">Started</th> <th class="p-2">Date</th>
<th class="p-2">Location</th>
</tr> </tr>
</thead> </thead>
<tbody> <tbody>
<tr v-for="replay in data.replays" :key="replay.id" class="border-t"> <tr v-for="replay in data.replays" :key="replay.id" class="border-t">
<td class="p-2">{{ replay.id }}</td> <td class="p-2">{{ replay.id }}</td>
<td class="p-2">{{ replay.user.email ?? replay.user.display_name }}</td> <td class="p-2">{{ replay.user.email ?? replay.user.display_name }}</td>
<td class="p-2">{{ replay.started_at }}</td> <td class="p-2">{{ replay.finished_at }}</td>
<td class="p-2">
{{
replay.user.geo
? `${replay.user.geo.city} ${replay.user.geo.subdivision}, ${replay.user.geo.region}`
: 'unknown'
}}
</td>
</tr> </tr>
</tbody> </tbody>
</table> </table>
<!-- Pagination --> <!-- Pagination -->
<div class="mt-4 flex space-x-2"> <div
<template v-for="link in data.meta.links" :key="link.label"> class="mt-4 flex flex-wrap items-center gap-2"
<Link v-if="data.meta && data.meta.links && data.meta.links.length > 1"
>
<!-- First -->
<Link
v-if="firstPageUrl && !isFirstPage"
:href="firstPageUrl"
class="px-3 py-1 border rounded text-sm"
>
« First
</Link>
<!-- Previous -->
<Link v-if="prevPageUrl" :href="prevPageUrl" class="px-3 py-1 border rounded text-sm">
Prev
</Link>
<!-- Page Numbers (windowed) -->
<template v-for="link in paginatedLinks" :key="link.label">
<component
:is="link.url ? Link : 'span'"
:href="link.url" :href="link.url"
class="px-2 py-1 border rounded" class="px-3 py-1 border rounded text-sm"
:class="{ 'font-bold bg-gray-200': link.active, 'text-gray-400': !link.url }" :class="{
/> 'font-bold bg-gray-300': link.active,
'text-gray-400 cursor-not-allowed': !link.url,
}"
>
<span v-html="link.label" />
</component>
</template> </template>
<!-- Next -->
<Link v-if="nextPageUrl" :href="nextPageUrl" class="px-3 py-1 border rounded text-sm">
Next
</Link>
<!-- Last -->
<Link
v-if="lastPageUrl && !isLastPage"
:href="lastPageUrl"
class="px-3 py-1 border rounded text-sm"
>
Last »
</Link>
</div> </div>
</div> </div>
</template> </template>
<script setup> <script setup>
import { Link, usePage } from '@inertiajs/vue3' import { computed } from 'vue'
import { Link } from '@inertiajs/vue3'
const props = defineProps({ const props = defineProps({
data: Object data: Object,
}) })
// Core pagination values
const links = computed(() => props.data.meta.links || [])
const currentIndex = computed(() => links.value.findIndex((link) => link.active))
const maxVisible = 10
const half = Math.floor(maxVisible / 2)
const paginatedLinks = computed(() => {
const total = links.value.length
if (total <= maxVisible) return links.value
let start = Math.max(currentIndex.value - half, 0)
let end = start + maxVisible
if (end > total) {
end = total
start = Math.max(0, end - maxVisible)
}
return links.value.slice(start, end)
})
// Navigation links
const firstPageUrl = computed(() => links.value[1]?.url) // usually index 1 is page=1
const prevPageUrl = computed(() => links.value[currentIndex.value - 1]?.url)
const nextPageUrl = computed(() => links.value[currentIndex.value + 1]?.url)
const lastPageUrl = computed(() => links.value[links.value.length - 2]?.url) // last item is "Next »", second-last is last numbered
const isFirstPage = computed(() => links.value[currentIndex.value]?.label === '1')
const isLastPage = computed(
() => links.value[currentIndex.value]?.label === props.data.meta.last_page
)
</script> </script>

1
package-lock.json generated
View File

@ -34,6 +34,7 @@
"@adonisjs/eslint-config": "^2.0.0", "@adonisjs/eslint-config": "^2.0.0",
"@adonisjs/prettier-config": "^1.4.4", "@adonisjs/prettier-config": "^1.4.4",
"@adonisjs/tsconfig": "^1.4.0", "@adonisjs/tsconfig": "^1.4.0",
"@faker-js/faker": "^9.8.0",
"@japa/assert": "^4.0.1", "@japa/assert": "^4.0.1",
"@japa/plugin-adonisjs": "^4.0.0", "@japa/plugin-adonisjs": "^4.0.0",
"@japa/runner": "^4.2.0", "@japa/runner": "^4.2.0",

View File

@ -36,6 +36,7 @@
"@adonisjs/eslint-config": "^2.0.0", "@adonisjs/eslint-config": "^2.0.0",
"@adonisjs/prettier-config": "^1.4.4", "@adonisjs/prettier-config": "^1.4.4",
"@adonisjs/tsconfig": "^1.4.0", "@adonisjs/tsconfig": "^1.4.0",
"@faker-js/faker": "^9.8.0",
"@japa/assert": "^4.0.1", "@japa/assert": "^4.0.1",
"@japa/plugin-adonisjs": "^4.0.0", "@japa/plugin-adonisjs": "^4.0.0",
"@japa/runner": "^4.2.0", "@japa/runner": "^4.2.0",

View File

@ -6,7 +6,7 @@
<meta name="viewport" content="width=device-width, initial-scale=1" /> <meta name="viewport" content="width=device-width, initial-scale=1" />
<title inertia> <title inertia>
AdonisJS x Inertia x VueJS Sentry Toolkit
</title> </title>
<link rel="preconnect" href="https://fonts.bunny.net" /> <link rel="preconnect" href="https://fonts.bunny.net" />

View File

@ -34,5 +34,7 @@ export default await Env.create(new URL('../', import.meta.url), {
PG_USER: Env.schema.string(), PG_USER: Env.schema.string(),
PG_PASSWORD: Env.schema.string(), PG_PASSWORD: Env.schema.string(),
WEBHOOK_URL: Env.schema.string() WEBHOOK_URL: Env.schema.string.optional(),
QUERY_FILTER: Env.schema.string(),
}) })

View File

@ -9,8 +9,7 @@
import ReplaysController from '#controllers/replays_controller' import ReplaysController from '#controllers/replays_controller'
import router from '@adonisjs/core/services/router' import router from '@adonisjs/core/services/router'
router.on('/').renderInertia('home') router.get('/', [ReplaysController, 'home'])
router.get('/replays', [ReplaysController, 'index']) router.get('/replays', [ReplaysController, 'index'])
router.get('/list', [ReplaysController, 'list' router.get('/stats', [ReplaysController, 'stats'])
]) router.get('/faker', [ReplaysController, 'faker'])
router.get('/search', [ReplaysController, 'search'])