Compare commits

...

18 Commits

Author SHA1 Message Date
2ad1231d83 Add healthcheck and update node version 2025-06-04 21:06:39 -04:00
9a712a096b Fix typo 2025-06-04 21:06:23 -04:00
c20536cd96 Fix database connection in docker 2025-06-04 20:27:13 -04:00
a7bdeb6248 Fix formatting 2025-05-28 15:51:16 -04:00
112c69bb92 Add faker endpoint for local dev 2025-05-28 15:51:05 -04:00
d2b4070206 Remove unused route 2025-05-26 16:16:24 -04:00
e1e9f6c10a Merge branch 'master' of https://git.hackanooga.com/mikeconrad/sentry-scrape 2025-05-22 17:04:01 -04:00
36be410d38 Minor tweaks and get started on docs 2025-05-22 17:03:35 -04:00
c88b7d762e Add configurable query parameter 2025-05-22 16:25:51 -04:00
47a520e945 Fix formatting 2025-05-21 16:36:54 -04:00
f5aceea6fa Refactor dockerfile update compose example 2025-05-21 16:35:00 -04:00
3c44fcc062 Reduce controller logic scope 2025-05-20 14:00:44 -04:00
5df94eaafa Move stats logic to model and fix caching 2025-05-20 13:32:47 -04:00
5e8f7e6005 Update frontend viewS 2025-05-20 11:23:35 -04:00
4962399fcf Caching in progress 2025-05-20 11:23:17 -04:00
274fcf9a5c Add persisted db volume 2025-05-20 10:14:54 -04:00
aa70f99727 Fix traefik routing with explicit network 2025-05-20 10:08:26 -04:00
43f165c782 Fix traefik routing with multiple networks 2025-05-20 09:00:39 -04:00
23 changed files with 555 additions and 277 deletions

View File

@ -1,12 +1,13 @@
TZ=UTC
PORT=3333
HOST=localhost
HOST=0.0.0.0
LOG_LEVEL=info
APP_KEY=
APP_KEY=sMoYEqixvC3sgJO4WM9ej9ctlcVtAdCE
NODE_ENV=development
SESSION_DRIVER=cookie
PG_USER=postgres
PG_PORT=5432
PG_HOST=localhost
PG_HOST=db
PG_PASSWORD=password
SENTRY_TOKEN=
SENTRY_ORG=
@ -14,3 +15,4 @@ REDIS_HOST=sentry-redis-1
REDIS_PORT=6379
REDIS_PASSWORD=
WEBHOOK_URL=
QUERY_FILTER='!user.email:*@mailinator.com !user.email:*@example.com'

1
.gitignore vendored
View File

@ -23,3 +23,4 @@ yarn-error.log
# Platform specific
.DS_Store
*compose-prod.yml

View File

@ -1,4 +1,6 @@
FROM node:20.12.2-alpine3.18 AS base
FROM node:lts-alpine3.22 AS base
HEALTHCHECK --interval=5s --timeout=10s --start-period=5s --retries=5 \
CMD sh -c 'wget --no-verbose --tries=1 --spider http://127.0.0.1:3333 || exit 1'
# All deps stage
FROM base AS deps
@ -6,11 +8,10 @@ WORKDIR /app
ADD package.json package-lock.json ./
RUN npm ci
FROM node:22 AS dev-deps
FROM deps AS develop
WORKDIR /app
ADD package.json package-lock.json ./
COPY --from=deps /app/node_modules /app/node_modules
ENV NODE_ENV=development
RUN npm ci
EXPOSE 3333
# Production only deps stage
@ -33,4 +34,5 @@ WORKDIR /app
COPY --from=production-deps /app/node_modules /app/node_modules
COPY --from=build /app/build /app
EXPOSE 8080
CMD ["node", "./bin/server.js"]

32
README.md Normal file
View File

@ -0,0 +1,32 @@
# Sentry Toolkit
This project was born out of a simple marketing request. Basically along the lines of "how can we track user engagement in our CRM?", to which I answered "We already use Sentry for Session recording, we can pull that data from the API, aggregate it and push it to the CRM." Hence this project. It is currently pretty simple and includes an API as well as basic web ui.
## Tech Stack
- [AdonisJS](https://adonisjs.com): I decided to use the wonderful AdonisJS framework for this project. Overkill? probably but it has a lot of nicecities built in and I didn't want to reinvent the wheel for this simple project. I also wanted to play around with InertiaJS which comes included.
- [Docker](https://docker.com) - All services have been containerized for convience of developing, testing and deploying. A `compose.yml` and `compose.override.yml` are included for testing and developing locally.
- Redis - Some basic caching because why not?
- Postgresql - Useful for storing historical session data.
- Traefik - Reverse Proxy/Ingress controller Provided for convienent development and local testing.
- Grafana - (Optional) For building pretty dashboards.
## Getting started
```shell
$ cp .env.example .env.develop
# Add/edit values in .env.develop as needed
# The WEBHOOK_URL is not strictly necessary for basic functionality.
# Tested on Linux, I have not had the pleasure of setting up Traefik on Windows/Mac
# recently so suggestions welcome. Also you may need `sudo` depending on how your
# Docker environment is set up.
$ docker compose up -d
```
Once all of the containers come up, you should be able to access the UI/API on [http://sentry.docker.localhost]() (Docker compose magic.) The database migrations should automatically run when you start with `docker compose` but if you are running the backend with node instead you will need to run `node ace migration:run` after starting the app for the first time.
The main page will list any Replay sessions stored in the database.
![](./docs/assets/homepage.jpg)
[http://sentry.docker.localhost/replays]() will fetch session data from Sentry and store it in the database. It will also return the results as JSON.

View File

@ -53,7 +53,7 @@ export default defineConfig({
() => import('@adonisjs/lucid/database_provider'),
() => import('@adonisjs/auth/auth_provider'),
() => import('@adonisjs/inertia/inertia_provider'),
() => import('@adonisjs/redis/redis_provider')
() => import('@adonisjs/redis/redis_provider'),
],
/*

52
app/Helpers/Replays.ts Normal file
View File

@ -0,0 +1,52 @@
import Replay from '#models/replay'
import { parseSentryLinkHeader, SentryPagination } from './Sentry.js'
import env from '#start/env'
let recordsUpdated = 0
const SENTRY_TOKEN = env.get('SENTRY_TOKEN')
interface ApiResponse<T> {
data: T
// optionally, you can define `meta`, `errors`, etc. if your API returns them
}
export async function fetchBatch(url: string) {
const options: RequestInit = {
headers: {
Authorization: `Bearer ${SENTRY_TOKEN}`,
},
}
const req = await fetch(url, options)
if (!req.ok) {
throw new Error(`Request failed with status ${req.status}`)
}
const resp = (await req.json()) as ApiResponse<Replay[]>
const replays = resp.data
const headers = req.headers
const cleanedData = replays.map((record) => sanitizeInput(record, Replay.allowedFields))
let updated = await Replay.updateOrCreateMany('id', cleanedData)
recordsUpdated = recordsUpdated + updated.length
const linkHeader = headers.get('link')
if (!linkHeader) {
return { error: 'link header missing from Sentry API response' }
}
const pagination: SentryPagination = parseSentryLinkHeader(linkHeader)
if (pagination.hasNextResults == true) {
console.log('fetching', pagination.next)
await fetchBatch(pagination.next)
}
console.log('no more results')
return { recordsUpdated }
}
function sanitizeInput(data: Record<string, any>, allowedFields: string[]) {
return allowedFields.reduce(
(acc, key) => {
if (key in data) acc[key] = data[key]
return acc
},
{} as Record<string, any>
)
}

27
app/Helpers/Sentry.ts Normal file
View File

@ -0,0 +1,27 @@
export interface SentryPagination {
previous: string
hasPreviousResults: boolean
hasNextResults: boolean
next: string
}
export function parseSentryLinkHeader(header: string): SentryPagination {
const links = header.split(',').map((part) => part.trim())
let result = {} as SentryPagination
for (const link of links) {
const match = link.match(/<([^>]+)>;\s*rel="([^"]+)";\s*results="([^"]+)";\s*cursor="([^"]+)"/)
if (!match) continue
const [, url, rel, results] = match
if (rel === 'previous') {
result.previous = url
result.hasPreviousResults = results === 'true'
} else if (rel === 'next') {
result.next = url
result.hasNextResults = results === 'true'
}
}
return result
}

21
app/Helpers/Webhook.ts Normal file
View File

@ -0,0 +1,21 @@
import env from '#start/env'
export async function sendDataToWebhook(responseData: {
version: number
updatedAt: Date
numberOfRecords: number
data: unknown
}) {
try {
console.log('syncing to webhook')
await fetch(env.get('WEBHOOK_URL'), {
headers: {
'content-type': 'application/json',
},
method: 'POST',
body: JSON.stringify(responseData),
})
} catch (e) {
console.error('error sending webhook data', e)
}
}

View File

@ -1,95 +1,51 @@
import Replay from '#models/replay'
import env from '#start/env'
import type { HttpContext } from '@adonisjs/core/http'
import db from '@adonisjs/lucid/services/db'
const SENTRY_TOKEN = env.get('SENTRY_TOKEN')
const SENTRY_ORG = env.get('SENTRY_ORG')
let recordsUpdated = 0
import redis from '@adonisjs/redis/services/main'
import { fetchBatch } from '../Helpers/Replays.js'
import { sendDataToWebhook } from '../Helpers/Webhook.js'
import { faker } from '@faker-js/faker'
interface ApiResponse<T> {
data: T;
// optionally, you can define `meta`, `errors`, etc. if your API returns them
}
interface SentryPagination {
previous: string;
hasPreviousResults: boolean;
hasNextResults: boolean;
next: string
}
export default class ReplaysController {
public async search({ response }: HttpContext) {
let results = await db.rawQuery(`
SELECT
u.display_name,
u.sessions,
u.total_time_seconds,
u.total_time_readable,
u.average_session_time_readable,
u.average_time_seconds,
r.id AS last_session_id,
r.finished_at AS last_session_time
FROM (
-- Aggregate sessions in the last 30 days
SELECT
"user" ->> 'display_name' AS display_name,
COUNT(duration) AS sessions,
SUM(duration) AS total_time_seconds,
AVG(duration) AS average_time_seconds,
CONCAT(
FLOOR(SUM(duration) / 86400), 'd ',
FLOOR(MOD(SUM(duration), 86400) / 3600), 'h ',
FLOOR(MOD(SUM(duration), 3600) / 60), 'm'
) AS total_time_readable,
CONCAT(
FLOOR(COUNT(duration) / 86400), 'd ',
FLOOR(MOD(COUNT(duration), 86400) / 3600), 'h ',
FLOOR(MOD(COUNT(duration), 3600) / 60), 'm'
) AS average_session_time_readable
FROM
replays
WHERE
finished_at >= NOW() - INTERVAL '30 days'
GROUP BY
"user" ->> 'display_name'
) u
-- LATERAL JOIN to get latest session (either within 30d or fallback to latest overall)
JOIN LATERAL (
SELECT id, finished_at
FROM replays
WHERE "user" ->> 'display_name' = u.display_name
ORDER BY
CASE WHEN finished_at >= NOW() - INTERVAL '30 days' THEN 0 ELSE 1 END,
finished_at DESC
LIMIT 1
) r ON true
ORDER BY
u.total_time_seconds DESC;`
public async faker({ request, response }: HttpContext) {
const { page } = await request.qs()
const sessions = Array.from({ length: 100 }, generateFakeSession)
const nextPage = +page + 1
await response.safeHeader(
'link',
`<http://localhost:3333/faker/?page=${page}>; rel="previous"; results="true"; cursor="0:1100:1", <http://localhost:3333/faker/?page=${nextPage}>; rel="next"; results="${page == 10 ? 'false' : 'true'}"; cursor="0:${page * 100}:0"`
)
try {
return { data: sessions, count: sessions.length, page: page }
}
public async stats({ request, response }: HttpContext) {
const { sendToWebhook } = request.qs()
const latestVersion = await redis.get(`replays:stats:latest_version`)
let results
if (!latestVersion) {
console.log('Cache miss')
results = await Replay.updateReplayStats()
} else {
console.log('cache hit')
let data = await redis.get(`replays:stats:version:${latestVersion}:results`)
if (data) {
results = JSON.parse(data)
}
}
await fetch(env.get('WEBHOOK_URL'),
{
headers:
{
'content-type': 'application/json'
},
method: 'POST',
body: JSON.stringify(results.rows)
let responseData = {
version: results.version,
updatedAt: results.updatedAt,
numberOfRecords: results.rows.length,
data: results.rows,
}
)
} catch(e) {
console.error('error sending webhook data', e)
if (sendToWebhook) {
await sendDataToWebhook(responseData)
}
response.json(results.rows)
return response.json(responseData)
}
public async list({ request, inertia }: HttpContext) {
public async home({ request, inertia }: HttpContext) {
const page = request.input('page', 1)
const perPage = 20
const cacheKey = `replays:page:${page}`
@ -98,16 +54,16 @@ ORDER BY
let paginated, meta, replays
if (data) {
({ paginated, meta, replays } = JSON.parse(data))
;({ paginated, meta, replays } = JSON.parse(data))
} else {
paginated = await Replay.query().paginate(page, perPage)
paginated.baseUrl('/list')
paginated.baseUrl('/')
const json = paginated.toJSON()
meta = {
...json.meta,
links: buildPaginationLinks(json.meta)
links: buildPaginationLinks(json.meta),
}
replays = json.data
@ -118,107 +74,53 @@ ORDER BY
return inertia.render('Replays/Index', {
data: {
replays,
meta
}
meta,
},
})
}
async index({ request, response }: HttpContext) {
const { statsPeriod, start, end } = request.qs()
recordsUpdated = 0
let queryString: string = '?statsPeriod=24h' // Default in case none is provided
if (statsPeriod) {
queryString = `?statsPeriod=${statsPeriod}`
} else if (start && end) {
queryString = `?start=${start}&end=${end}`
}
const replays = await fetchBatch(`https://sentry.io/api/0/organizations/${SENTRY_ORG}/replays/${queryString}`)
return response.json(replays)
}
const queryFilter = env.get('QUERY_FILTER')
const baseUrl =
env.get('NODE_ENV') == 'production'
? `https://sentry.io/api/0/organizations/${SENTRY_ORG}/replays/${queryString}&field=id&field=user&field=duration&field=started_at&field=finished_at&query=${encodeURIComponent(queryFilter)}`
: 'http://localhost:3333/faker?page=1'
console.log('base', baseUrl)
await fetchBatch(baseUrl)
}
let queryResults = await Replay.updateReplayStats()
async function fetchBatch(url: string) {
const options: RequestInit = {
headers: {
Authorization: `Bearer ${SENTRY_TOKEN}`
}
}
const req = await fetch(url, options)
if (!req.ok) {
throw new Error(`Request failed with status ${req.status}`);
}
const resp = await req.json() as ApiResponse<Replay[]>;
const replays = resp.data;
const headers = req.headers
const cleanedData = replays.map(record => sanitizeInput(record, Replay.allowedFields))
let updated = await Replay.updateOrCreateMany('id', cleanedData)
recordsUpdated = recordsUpdated + updated.length
const linkHeader = headers.get('link')
if (!linkHeader) {
return { error: 'link header missing from Sentry API response' }
}
const pagination: SentryPagination = parseSentryLinkHeader(linkHeader)
if (pagination.hasNextResults == true) {
console.log('fetching', pagination.next)
await fetchBatch(pagination.next)
}
console.log('no more results')
return { recordsUpdated }
}
function parseSentryLinkHeader(header: string): SentryPagination {
const links = header.split(',').map(part => part.trim())
let result = {} as SentryPagination
for (const link of links) {
const match = link.match(/<([^>]+)>;\s*rel="([^"]+)";\s*results="([^"]+)";\s*cursor="([^"]+)"/)
if (!match) continue
const [, url, rel, results] = match
if (rel === 'previous') {
result.previous = url
result.hasPreviousResults = results === 'true'
} else if (rel === 'next') {
result.next = url
result.hasNextResults = results === 'true'
return response.json({ version: queryResults.latestVersion, ...queryResults })
}
}
return result
}
function sanitizeInput(data: Record<string, any>, allowedFields: string[]) {
return allowedFields.reduce((acc, key) => {
if (key in data) acc[key] = data[key]
return acc
}, {} as Record<string, any>)
}
function buildPaginationLinks(meta: { previousPageUrl: string, lastPage: number; currentPage: number; nextPageUrl: string }) {
function buildPaginationLinks(meta: {
previousPageUrl: string
lastPage: number
currentPage: number
nextPageUrl: string
}) {
const links = []
// Previous
links.push({
url: meta.previousPageUrl,
label: '&laquo; Prev',
active: false
active: false,
})
for (let page = 1; page <= meta.lastPage; page++) {
links.push({
url: `/list?page=${page}`,
url: `/?page=${page}`,
label: page.toString(),
active: page === meta.currentPage
active: page === meta.currentPage,
})
}
@ -226,8 +128,68 @@ function buildPaginationLinks(meta: { previousPageUrl: string, lastPage: number;
links.push({
url: meta.nextPageUrl,
label: 'Next &raquo;',
active: false
active: false,
})
return links
}
function generateFakeSession() {
const uuid = faker.string.uuid()
const browserName = faker.helpers.arrayElement(['Chrome', 'Firefox', 'Safari', 'Edge', 'Brave'])
const deviceBrand = faker.helpers.arrayElement(['Apple', 'Samsung', 'Google'])
const osName = faker.helpers.arrayElement(['iOS', 'Android', 'Windows', 'macOS'])
const platform = faker.helpers.arrayElement(['Sentry', 'Datadog', 'New Relic', 'Rollbar'])
const finishedAt = new Date(Date.now() - faker.number.int({ min: 0, max: 60 * 60 * 1000 }))
const displayName = faker.internet.email()
return {
activity: faker.number.int({ min: 1, max: 10 }),
browser: {
name: browserName,
version: faker.system.semver(),
},
count_dead_clicks: faker.number.int({ min: 0, max: 10 }),
count_rage_clicks: faker.number.int({ min: 0, max: 5 }),
count_errors: faker.number.int({ min: 0, max: 5 }),
count_segments: faker.number.int({ min: 0, max: 3 }),
count_urls: faker.number.int({ min: 1, max: 3 }),
device: {
brand: deviceBrand,
family: deviceBrand === 'Apple' ? 'iPhone' : deviceBrand,
model: faker.string.numeric({ length: 2 }),
name: `${deviceBrand} ${faker.string.alphanumeric({ length: 3 })}`,
},
dist: null,
duration: faker.number.int({ min: 100, max: 1000 }),
environment: faker.helpers.arrayElement(['production', 'staging', 'development']),
error_ids: [uuid],
finished_at: faker.date.between({ from: finishedAt, to: new Date() }).toISOString(),
has_viewed: faker.datatype.boolean(),
id: uuid,
is_archived: faker.datatype.boolean() ? null : false,
os: {
name: osName,
version: `${faker.number.int({ min: 10, max: 17 })}.${faker.number.int({ min: 0, max: 5 })}`,
},
platform: platform,
project_id: faker.string.numeric({ length: 6 }),
releases: [`version@${faker.system.semver()}`],
sdk: {
name: faker.hacker.noun(),
version: faker.system.semver(),
},
started_at: faker.date.recent().toISOString(),
tags: {
hello: ['world', faker.person.fullName()],
},
trace_ids: [uuid],
urls: [faker.internet.url()],
user: {
display_name: displayName,
email: displayName,
id: faker.string.numeric({ length: 8 }),
ip: faker.internet.ip(),
username: faker.internet.username(),
},
}
}

View File

@ -1,7 +1,85 @@
import { DateTime } from 'luxon'
import { BaseModel, column } from '@adonisjs/lucid/orm'
import db from '@adonisjs/lucid/services/db'
import redis from '@adonisjs/redis/services/main'
export default class Replay extends BaseModel {
public static async updateReplayStats() {
let results = await db.rawQuery(`
SELECT
u.display_name,
u.sessions,
u.total_time_seconds,
u.total_time_readable,
u.average_session_time_readable,
u.average_time_seconds,
r.id AS last_session_id,
r.finished_at AS last_session_time,
o.id AS oldest_session_id,
o.finished_at AS oldest_session_time
FROM (
-- Aggregate sessions in the last 30 days
SELECT
"user" ->> 'display_name' AS display_name,
COUNT(duration) AS sessions,
SUM(duration) AS total_time_seconds,
AVG(duration) AS average_time_seconds,
CONCAT(
FLOOR(SUM(duration) / 86400), 'd ',
FLOOR(MOD(SUM(duration), 86400) / 3600), 'h ',
FLOOR(MOD(SUM(duration), 3600) / 60), 'm'
) AS total_time_readable,
CONCAT(
FLOOR(COUNT(duration) / 86400), 'd ',
FLOOR(MOD(COUNT(duration), 86400) / 3600), 'h ',
FLOOR(MOD(COUNT(duration), 3600) / 60), 'm'
) AS average_session_time_readable
FROM
replays
WHERE
finished_at >= NOW() - INTERVAL '30 days'
AND "user" ->> 'display_name' LIKE '%@%'
AND "user" ->> 'display_name' !~ 'e2etesting|@paragontruss.com'
GROUP BY
"user" ->> 'display_name'
) u
-- LATERAL JOIN to get latest session (either within 30d or fallback to latest overall)
JOIN LATERAL (
SELECT id, finished_at
FROM replays
WHERE "user" ->> 'display_name' = u.display_name
AND "user" ->> 'display_name' LIKE '%@%'
AND "user" ->> 'display_name' !~ 'e2etesting|@paragontruss.com'
ORDER BY
CASE WHEN finished_at >= NOW() - INTERVAL '30 days' THEN 0 ELSE 1 END,
finished_at DESC
LIMIT 1
) r ON true
-- LATERAL JOIN to get the oldest session
JOIN LATERAL (
SELECT id, finished_at
FROM replays
WHERE "user" ->> 'display_name' = u.display_name
AND "user" ->> 'display_name' LIKE '%@%'
AND "user" ->> 'display_name' !~ 'e2etesting|@paragontruss.com'
ORDER BY finished_at ASC
LIMIT 1
) o ON true
ORDER BY
u.total_time_seconds DESC;
`)
const updatedVersion = await redis.incr('replays:stats:latest_version')
results.version = updatedVersion
results.updatedAt = Date.now()
await redis.set(`replays:stats:version:${updatedVersion}:results`, JSON.stringify(results))
return results
}
@column({ isPrimary: true })
declare id: string
@ -12,14 +90,14 @@ export default class Replay extends BaseModel {
prepare: (value) => {
// The values from sentry are just arrays so convert them to json
return JSON.stringify(value)
}
},
})
declare trace_ids: string[]
@column({
prepare: (value) => {
return JSON.stringify(value)
}
},
})
declare error_ids: string[]
@ -30,26 +108,22 @@ export default class Replay extends BaseModel {
prepare: (value) => {
// The values from sentry are just arrays so convert them to json
return JSON.stringify(value)
}
},
})
declare tags: string[]
@column()
declare user: string[]
@column()
declare sdk: any
@column()
declare os: any
@column()
declare browser: any
@column()
declare device: any
@ -59,21 +133,19 @@ export default class Replay extends BaseModel {
@column()
declare is_archived: boolean | null
@column({
prepare: (value) => {
// The values from sentry are just arrays so convert them to json
return JSON.stringify(value)
}
},
})
declare urls: any
@column({
prepare: (value) => {
// The values from sentry are just arrays so convert them to json
return JSON.stringify(value)
}
},
})
declare clicks: any
@ -110,12 +182,11 @@ export default class Replay extends BaseModel {
@column()
declare platform: string | null
@column({
prepare: (value) => {
// The values from sentry are just arrays so convert them to json
return JSON.stringify(value)
}
},
})
declare releases: any

16
compose.override.yml Normal file
View File

@ -0,0 +1,16 @@
---
services:
scraper:
build:
context: .
target: develop
env_file: .env.develop
volumes:
- ./:/app
- node_modules:/app/node_modules
command: /bin/sh -c "node ace migration:run --force && node ace serve --watch"
depends_on:
db:
condition: service_healthy
volumes:
node_modules: {}

77
compose.yml Normal file
View File

@ -0,0 +1,77 @@
---
name: sentry
services:
reverse-proxy:
image: traefik:latest
command: --api.insecure=true --providers.docker
ports:
- 80:80
- 8080:8080
volumes:
- /var/run/docker.sock:/var/run/docker.sock
networks:
- traefik
scraper:
labels:
- 'traefik.enable=true'
- 'traefik.docker.network=sentry_traefik'
- 'traefik.http.routers.scraper.rule=Host(`sentry.docker.localhost`)'
- 'traefik.http.services.scraper.loadbalancer.server.port=3333'
- 'traefik.http.routers.scraper.entrypoints=http'
- 'traefik.http.routers.scraper.service=scraper'
networks:
- traefik
- redis
- database
depends_on:
db:
condition: service_healthy
db:
image: postgres:16
environment:
- POSTGRES_PASSWORD=password
healthcheck:
test: ['CMD-SHELL', 'pg_isready', '-d', 'postgres']
interval: 5s
timeout: 60s
retries: 5
start_period: 5s
networks:
- database
volumes:
- pg_data:/var/lib/postgresql/data
grafana:
image: grafana/grafana:latest
labels:
- 'traefik.enable=true'
- 'traefik.docker.network=sentry_traefik'
- 'traefik.http.routers.grafana.rule=Host(`grafana.docker.localhost`)'
- 'traefik.http.routers.grafana.entrypoints=http'
- 'traefik.http.services.grafana.loadbalancer.server.port=3000'
- 'traefik.http.routers.grafana.service=grafana'
networks:
- traefik
- database
healthcheck:
test:
[
'CMD-SHELL',
'wget --no-verbose --tries=1 --spider http://localhost:3000/api/health || exit 1',
]
interval: 10s
timeout: 30s
retries: 5
start_period: 30s
redis:
image: redis:latest
networks:
- redis
networks:
traefik:
driver: bridge
database:
driver: bridge
redis:
driver: bridge
volumes:
pg_data: {}

View File

@ -1,16 +0,0 @@
name: sentry
services:
scraper:
build:
context: .
target: dev-deps
env_file: .env.develop
volumes:
- ./:/app
- node_modules:/app/node_modules
command: node ace serve --watch
depends_on:
db:
condition: service_healthy
volumes:
node_modules: {}

View File

@ -1,50 +0,0 @@
---
name: sentry
services:
reverse-proxy:
image: traefik:latest
command: --api.insecure=true --providers.docker
ports:
- 80:80
- 8080:8080
volumes:
- /var/run/docker.sock:/var/run/docker.sock
networks:
- traefik
scraper:
labels:
- "traefik.http.routers.scraper.rule=Host(`sentry.docker.localhost`)"
networks:
- traefik
- redis
- database
db:
image: postgres:16
environment:
- POSTGRES_PASSWORD=password
healthcheck:
test: ["CMD-SHELL", "pg_isready", "-d", "postgres"]
interval: 5s
timeout: 60s
retries: 5
start_period: 5s
networks:
- database
grafana:
image: grafana/grafana:latest
labels:
- "traefik.http.routers.grafana.rule=Host(`grafana.docker.localhost`)"
networks:
- traefik
- database
redis:
image: redis:latest
networks:
- redis
networks:
traefik:
driver: bridge
database:
driver: bridge
redis:
driver: bridge

BIN
docs/assets/homepage.jpg Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

View File

@ -4,10 +4,10 @@
import '../css/app.css'
import { createSSRApp, h } from 'vue'
import type { DefineComponent } from 'vue'
import { createInertiaApp } from '@inertiajs/vue3'
import { createInertiaApp, Link } from '@inertiajs/vue3'
import { resolvePageComponent } from '@adonisjs/inertia/helpers'
const appName = import.meta.env.VITE_APP_NAME || 'AdonisJS'
Vue.component('inertia-link', Link)
createInertiaApp({
progress: { color: '#5468FF' },

View File

@ -1,5 +1,5 @@
<template>
<div>
<div class="m-5">
<h1 class="text-2xl font-bold mb-4">Replays</h1>
<table class="w-full border text-left">
@ -7,37 +7,115 @@
<tr class="bg-gray-100">
<th class="p-2">ID</th>
<th class="p-2">Email</th>
<th class="p-2">Started</th>
<th class="p-2">Date</th>
<th class="p-2">Location</th>
</tr>
</thead>
<tbody>
<tr v-for="replay in data.replays" :key="replay.id" class="border-t">
<td class="p-2">{{ replay.id }}</td>
<td class="p-2">{{ replay.user.email ?? replay.user.display_name }}</td>
<td class="p-2">{{ replay.started_at }}</td>
<td class="p-2">{{ replay.finished_at }}</td>
<td class="p-2">
{{
replay.user.geo
? `${replay.user.geo.city} ${replay.user.geo.subdivision}, ${replay.user.geo.region}`
: 'unknown'
}}
</td>
</tr>
</tbody>
</table>
<!-- Pagination -->
<div class="mt-4 flex space-x-2">
<template v-for="link in data.meta.links" :key="link.label">
<div
class="mt-4 flex flex-wrap items-center gap-2"
v-if="data.meta && data.meta.links && data.meta.links.length > 1"
>
<!-- First -->
<Link
v-if="firstPageUrl && !isFirstPage"
:href="firstPageUrl"
class="px-3 py-1 border rounded text-sm"
>
« First
</Link>
<!-- Previous -->
<Link v-if="prevPageUrl" :href="prevPageUrl" class="px-3 py-1 border rounded text-sm">
Prev
</Link>
<!-- Page Numbers (windowed) -->
<template v-for="link in paginatedLinks" :key="link.label">
<component
:is="link.url ? Link : 'span'"
:href="link.url"
class="px-2 py-1 border rounded"
:class="{ 'font-bold bg-gray-200': link.active, 'text-gray-400': !link.url }"
/>
class="px-3 py-1 border rounded text-sm"
:class="{
'font-bold bg-gray-300': link.active,
'text-gray-400 cursor-not-allowed': !link.url,
}"
>
<span v-html="link.label" />
</component>
</template>
<!-- Next -->
<Link v-if="nextPageUrl" :href="nextPageUrl" class="px-3 py-1 border rounded text-sm">
Next
</Link>
<!-- Last -->
<Link
v-if="lastPageUrl && !isLastPage"
:href="lastPageUrl"
class="px-3 py-1 border rounded text-sm"
>
Last »
</Link>
</div>
</div>
</template>
<script setup>
import { Link, usePage } from '@inertiajs/vue3'
import { computed } from 'vue'
import { Link } from '@inertiajs/vue3'
const props = defineProps({
data: Object
data: Object,
})
// Core pagination values
const links = computed(() => props.data.meta.links || [])
const currentIndex = computed(() => links.value.findIndex((link) => link.active))
const maxVisible = 10
const half = Math.floor(maxVisible / 2)
const paginatedLinks = computed(() => {
const total = links.value.length
if (total <= maxVisible) return links.value
let start = Math.max(currentIndex.value - half, 0)
let end = start + maxVisible
if (end > total) {
end = total
start = Math.max(0, end - maxVisible)
}
return links.value.slice(start, end)
})
// Navigation links
const firstPageUrl = computed(() => links.value[1]?.url) // usually index 1 is page=1
const prevPageUrl = computed(() => links.value[currentIndex.value - 1]?.url)
const nextPageUrl = computed(() => links.value[currentIndex.value + 1]?.url)
const lastPageUrl = computed(() => links.value[links.value.length - 2]?.url) // last item is "Next »", second-last is last numbered
const isFirstPage = computed(() => links.value[currentIndex.value]?.label === '1')
const isLastPage = computed(
() => links.value[currentIndex.value]?.label === props.data.meta.last_page
)
</script>

1
package-lock.json generated
View File

@ -34,6 +34,7 @@
"@adonisjs/eslint-config": "^2.0.0",
"@adonisjs/prettier-config": "^1.4.4",
"@adonisjs/tsconfig": "^1.4.0",
"@faker-js/faker": "^9.8.0",
"@japa/assert": "^4.0.1",
"@japa/plugin-adonisjs": "^4.0.0",
"@japa/runner": "^4.2.0",

View File

@ -36,6 +36,7 @@
"@adonisjs/eslint-config": "^2.0.0",
"@adonisjs/prettier-config": "^1.4.4",
"@adonisjs/tsconfig": "^1.4.0",
"@faker-js/faker": "^9.8.0",
"@japa/assert": "^4.0.1",
"@japa/plugin-adonisjs": "^4.0.0",
"@japa/runner": "^4.2.0",

View File

@ -6,7 +6,7 @@
<meta name="viewport" content="width=device-width, initial-scale=1" />
<title inertia>
AdonisJS x Inertia x VueJS
Sentry Toolkit
</title>
<link rel="preconnect" href="https://fonts.bunny.net" />

View File

@ -34,5 +34,7 @@ export default await Env.create(new URL('../', import.meta.url), {
PG_USER: Env.schema.string(),
PG_PASSWORD: Env.schema.string(),
WEBHOOK_URL: Env.schema.string()
WEBHOOK_URL: Env.schema.string.optional(),
QUERY_FILTER: Env.schema.string(),
})

View File

@ -9,8 +9,7 @@
import ReplaysController from '#controllers/replays_controller'
import router from '@adonisjs/core/services/router'
router.on('/').renderInertia('home')
router.get('/', [ReplaysController, 'home'])
router.get('/replays', [ReplaysController, 'index'])
router.get('/list', [ReplaysController, 'list'
])
router.get('/search', [ReplaysController, 'search'])
router.get('/stats', [ReplaysController, 'stats'])
router.get('/faker', [ReplaysController, 'faker'])