Compare commits

...

4 Commits

Author SHA1 Message Date
47a520e945 Fix formatting 2025-05-21 16:36:54 -04:00
f5aceea6fa Refactor dockerfile update compose example 2025-05-21 16:35:00 -04:00
3c44fcc062 Reduce controller logic scope 2025-05-20 14:00:44 -04:00
5df94eaafa Move stats logic to model and fix caching 2025-05-20 13:32:47 -04:00
16 changed files with 326 additions and 315 deletions

1
.gitignore vendored
View File

@ -23,3 +23,4 @@ yarn-error.log
# Platform specific # Platform specific
.DS_Store .DS_Store
*compose-prod.yml

View File

@ -1,4 +1,4 @@
FROM node:20.12.2-alpine3.18 AS base FROM node:22-alpine AS base
# All deps stage # All deps stage
FROM base AS deps FROM base AS deps
@ -6,11 +6,10 @@ WORKDIR /app
ADD package.json package-lock.json ./ ADD package.json package-lock.json ./
RUN npm ci RUN npm ci
FROM node:22 AS dev-deps FROM deps AS develop
WORKDIR /app WORKDIR /app
ADD package.json package-lock.json ./ COPY --from=deps /app/node_modules /app/node_modules
ENV NODE_ENV=development ENV NODE_ENV=development
RUN npm ci
EXPOSE 3333 EXPOSE 3333
# Production only deps stage # Production only deps stage

View File

@ -53,7 +53,7 @@ export default defineConfig({
() => import('@adonisjs/lucid/database_provider'), () => import('@adonisjs/lucid/database_provider'),
() => import('@adonisjs/auth/auth_provider'), () => import('@adonisjs/auth/auth_provider'),
() => import('@adonisjs/inertia/inertia_provider'), () => import('@adonisjs/inertia/inertia_provider'),
() => import('@adonisjs/redis/redis_provider') () => import('@adonisjs/redis/redis_provider'),
], ],
/* /*

52
app/Helpers/Replays.ts Normal file
View File

@ -0,0 +1,52 @@
import Replay from '#models/replay'
import { parseSentryLinkHeader, SentryPagination } from './Sentry.js'
import env from '#start/env'
let recordsUpdated = 0
const SENTRY_TOKEN = env.get('SENTRY_TOKEN')
interface ApiResponse<T> {
data: T
// optionally, you can define `meta`, `errors`, etc. if your API returns them
}
export async function fetchBatch(url: string) {
const options: RequestInit = {
headers: {
Authorization: `Bearer ${SENTRY_TOKEN}`,
},
}
const req = await fetch(url, options)
if (!req.ok) {
throw new Error(`Request failed with status ${req.status}`)
}
const resp = (await req.json()) as ApiResponse<Replay[]>
const replays = resp.data
const headers = req.headers
const cleanedData = replays.map((record) => sanitizeInput(record, Replay.allowedFields))
let updated = await Replay.updateOrCreateMany('id', cleanedData)
recordsUpdated = recordsUpdated + updated.length
const linkHeader = headers.get('link')
if (!linkHeader) {
return { error: 'link header missing from Sentry API response' }
}
const pagination: SentryPagination = parseSentryLinkHeader(linkHeader)
if (pagination.hasNextResults == true) {
console.log('fetching', pagination.next)
await fetchBatch(pagination.next)
}
console.log('no more results')
return { recordsUpdated }
}
function sanitizeInput(data: Record<string, any>, allowedFields: string[]) {
return allowedFields.reduce(
(acc, key) => {
if (key in data) acc[key] = data[key]
return acc
},
{} as Record<string, any>
)
}

27
app/Helpers/Sentry.ts Normal file
View File

@ -0,0 +1,27 @@
export interface SentryPagination {
previous: string
hasPreviousResults: boolean
hasNextResults: boolean
next: string
}
export function parseSentryLinkHeader(header: string): SentryPagination {
const links = header.split(',').map((part) => part.trim())
let result = {} as SentryPagination
for (const link of links) {
const match = link.match(/<([^>]+)>;\s*rel="([^"]+)";\s*results="([^"]+)";\s*cursor="([^"]+)"/)
if (!match) continue
const [, url, rel, results] = match
if (rel === 'previous') {
result.previous = url
result.hasPreviousResults = results === 'true'
} else if (rel === 'next') {
result.next = url
result.hasNextResults = results === 'true'
}
}
return result
}

21
app/Helpers/Webhook.ts Normal file
View File

@ -0,0 +1,21 @@
import env from '#start/env'
export async function sendDataToWebhook(responseData: {
version: number
updatedAt: Date
numberOfRecords: number
data: unknown
}) {
try {
console.log('syncing to webhook')
await fetch(env.get('WEBHOOK_URL'), {
headers: {
'content-type': 'application/json',
},
method: 'POST',
body: JSON.stringify(responseData),
})
} catch (e) {
console.error('error sending webhook data', e)
}
}

View File

@ -1,70 +1,39 @@
import Replay from '#models/replay' import Replay from '#models/replay'
import env from '#start/env' import env from '#start/env'
import type { HttpContext } from '@adonisjs/core/http' import type { HttpContext } from '@adonisjs/core/http'
import db from '@adonisjs/lucid/services/db'
const SENTRY_TOKEN = env.get('SENTRY_TOKEN')
const SENTRY_ORG = env.get('SENTRY_ORG') const SENTRY_ORG = env.get('SENTRY_ORG')
let recordsUpdated = 0
import redis from '@adonisjs/redis/services/main' import redis from '@adonisjs/redis/services/main'
import { fetchBatch } from '../Helpers/Replays.js'
import { sendDataToWebhook } from '../Helpers/Webhook.js'
interface ApiResponse<T> {
data: T;
// optionally, you can define `meta`, `errors`, etc. if your API returns them
}
interface SentryPagination {
previous: string;
hasPreviousResults: boolean;
hasNextResults: boolean;
next: string
}
export default class ReplaysController { export default class ReplaysController {
public async stats({ request, response }: HttpContext) { public async stats({ request, response }: HttpContext) {
const {sendToWebhook} = request.qs() const { sendToWebhook } = request.qs()
const cacheKey = `replays:sync:latest_version` const latestVersion = await redis.get(`replays:stats:latest_version`)
const latestFetchVersion = await redis.get(`replays:fetch:latest_version`)
const latestQueryVersion = await redis.get(`replays:stats:latest_version`)
if (latestFetchVersion == latestQueryVersion) {
let results let results
results = await redis.get(`replays:sync:version:${latestQueryVersion}:results`) if (!latestVersion) {
if (!results) { console.log('Cache miss')
console.log('no data in cache, updating') results = await Replay.updateReplayStats()
results = await getResults()
await redis.set(`replays:sync:version:${latestQueryVersion}:results`, JSON.stringify(results))
}
console.log('resultssdsdfds')
return response.json(results)
} else { } else {
let results = await getResults() console.log('cache hit')
console.log('results quer', latestQueryVersion) let data = await redis.get(`replays:stats:version:${latestVersion}:results`)
await redis.set(`replays:stats:version:${latestQueryVersion}:results`, JSON.stringify(results)) if (data) {
await redis.set(`replays:stats:latest_version`, latestFetchVersion) results = JSON.parse(data)
await redis.set(`replays:fetch:latest_version`, latestFetchVersion)
return response.json(results)
if (sendToWebhook) {
try {
console.log('syncing to webhook')
await fetch(env.get('WEBHOOK_URL'),
{
headers:
{
'content-type': 'application/json'
},
method: 'POST',
body: JSON.stringify(results.rows)
}
)
} catch(e) {
console.error('error sending webhook data', e)
}
} }
} }
response.json(results.rows) let responseData = {
version: results.version,
updatedAt: results.updatedAt,
numberOfRecords: results.rows.length,
data: results.rows,
} }
if (sendToWebhook) {
await sendDataToWebhook(responseData)
}
return response.json(responseData)
}
public async list({ request, inertia }: HttpContext) { public async list({ request, inertia }: HttpContext) {
const page = request.input('page', 1) const page = request.input('page', 1)
const perPage = 20 const perPage = 20
@ -74,7 +43,7 @@ export default class ReplaysController {
let paginated, meta, replays let paginated, meta, replays
if (data) { if (data) {
({ paginated, meta, replays } = JSON.parse(data)) ;({ paginated, meta, replays } = JSON.parse(data))
} else { } else {
paginated = await Replay.query().paginate(page, perPage) paginated = await Replay.query().paginate(page, perPage)
paginated.baseUrl('/list') paginated.baseUrl('/list')
@ -83,7 +52,7 @@ export default class ReplaysController {
meta = { meta = {
...json.meta, ...json.meta,
links: buildPaginationLinks(json.meta) links: buildPaginationLinks(json.meta),
} }
replays = json.data replays = json.data
@ -94,114 +63,47 @@ export default class ReplaysController {
return inertia.render('Replays/Index', { return inertia.render('Replays/Index', {
data: { data: {
replays, replays,
meta meta,
} },
}) })
} }
async index({ request, response }: HttpContext) { async index({ request, response }: HttpContext) {
const { statsPeriod, start, end } = request.qs() const { statsPeriod, start, end } = request.qs()
recordsUpdated = 0 let queryString: string = '?statsPeriod=24h' // Default in case none is provided
let queryString: string = '?statsPeriod=24h'// Default in case none is provided
if (statsPeriod) { if (statsPeriod) {
queryString = `?statsPeriod=${statsPeriod}` queryString = `?statsPeriod=${statsPeriod}`
} else if (start && end) { } else if (start && end) {
queryString = `?start=${start}&end=${end}` queryString = `?start=${start}&end=${end}`
} }
const replays = await fetchBatch(`https://sentry.io/api/0/organizations/${SENTRY_ORG}/replays/${queryString}`) await fetchBatch(`https://sentry.io/api/0/organizations/${SENTRY_ORG}/replays/${queryString}`)
let latestVersion = await redis.get(`replays:fetch:latest_version`)
if (!latestVersion) {
redis.set('replays:fetch:latest_version', 1)
} else {
redis.set('replays:fetch:latest_version', ++latestVersion)
}
return response.json(replays) let queryResults = await Replay.updateReplayStats()
}
return response.json({ version: queryResults.latestVersion, ...queryResults })
}
} }
function buildPaginationLinks(meta: {
async function fetchBatch(url: string) { previousPageUrl: string
const options: RequestInit = { lastPage: number
headers: { currentPage: number
Authorization: `Bearer ${SENTRY_TOKEN}` nextPageUrl: string
} }) {
}
const req = await fetch(url, options)
if (!req.ok) {
throw new Error(`Request failed with status ${req.status}`);
}
const resp = await req.json() as ApiResponse<Replay[]>;
const replays = resp.data;
const headers = req.headers
const cleanedData = replays.map(record => sanitizeInput(record, Replay.allowedFields))
let updated = await Replay.updateOrCreateMany('id', cleanedData)
recordsUpdated = recordsUpdated + updated.length
const linkHeader = headers.get('link')
if (!linkHeader) {
return { error: 'link header missing from Sentry API response' }
}
const pagination: SentryPagination = parseSentryLinkHeader(linkHeader)
if (pagination.hasNextResults == true) {
console.log('fetching', pagination.next)
await fetchBatch(pagination.next)
}
console.log('no more results')
return { recordsUpdated }
}
function parseSentryLinkHeader(header: string): SentryPagination {
const links = header.split(',').map(part => part.trim())
let result = {} as SentryPagination
for (const link of links) {
const match = link.match(/<([^>]+)>;\s*rel="([^"]+)";\s*results="([^"]+)";\s*cursor="([^"]+)"/)
if (!match) continue
const [, url, rel, results] = match
if (rel === 'previous') {
result.previous = url
result.hasPreviousResults = results === 'true'
} else if (rel === 'next') {
result.next = url
result.hasNextResults = results === 'true'
}
}
return result
}
function sanitizeInput(data: Record<string, any>, allowedFields: string[]) {
return allowedFields.reduce((acc, key) => {
if (key in data) acc[key] = data[key]
return acc
}, {} as Record<string, any>)
}
function buildPaginationLinks(meta: { previousPageUrl: string, lastPage: number; currentPage: number; nextPageUrl: string }) {
const links = [] const links = []
// Previous // Previous
links.push({ links.push({
url: meta.previousPageUrl, url: meta.previousPageUrl,
label: '&laquo; Prev', label: '&laquo; Prev',
active: false active: false,
}) })
for (let page = 1; page <= meta.lastPage; page++) { for (let page = 1; page <= meta.lastPage; page++) {
links.push({ links.push({
url: `/list?page=${page}`, url: `/list?page=${page}`,
label: page.toString(), label: page.toString(),
active: page === meta.currentPage active: page === meta.currentPage,
}) })
} }
@ -209,62 +111,8 @@ function buildPaginationLinks(meta: { previousPageUrl: string, lastPage: number;
links.push({ links.push({
url: meta.nextPageUrl, url: meta.nextPageUrl,
label: 'Next &raquo;', label: 'Next &raquo;',
active: false active: false,
}) })
return links return links
} }
async function getResults(){
let results = await db.rawQuery(`
SELECT
u.display_name,
u.sessions,
u.total_time_seconds,
u.total_time_readable,
u.average_session_time_readable,
u.average_time_seconds,
r.id AS last_session_id,
r.finished_at AS last_session_time
FROM (
-- Aggregate sessions in the last 30 days
SELECT
"user" ->> 'display_name' AS display_name,
COUNT(duration) AS sessions,
SUM(duration) AS total_time_seconds,
AVG(duration) AS average_time_seconds,
CONCAT(
FLOOR(SUM(duration) / 86400), 'd ',
FLOOR(MOD(SUM(duration), 86400) / 3600), 'h ',
FLOOR(MOD(SUM(duration), 3600) / 60), 'm'
) AS total_time_readable,
CONCAT(
FLOOR(COUNT(duration) / 86400), 'd ',
FLOOR(MOD(COUNT(duration), 86400) / 3600), 'h ',
FLOOR(MOD(COUNT(duration), 3600) / 60), 'm'
) AS average_session_time_readable
FROM
replays
WHERE
finished_at >= NOW() - INTERVAL '30 days'
GROUP BY
"user" ->> 'display_name'
) u
-- LATERAL JOIN to get latest session (either within 30d or fallback to latest overall)
JOIN LATERAL (
SELECT id, finished_at
FROM replays
WHERE "user" ->> 'display_name' = u.display_name
ORDER BY
CASE WHEN finished_at >= NOW() - INTERVAL '30 days' THEN 0 ELSE 1 END,
finished_at DESC
LIMIT 1
) r ON true
ORDER BY
u.total_time_seconds DESC;`
)
return results
}

View File

@ -1,7 +1,66 @@
import { DateTime } from 'luxon' import { DateTime } from 'luxon'
import { BaseModel, column } from '@adonisjs/lucid/orm' import { BaseModel, column } from '@adonisjs/lucid/orm'
import db from '@adonisjs/lucid/services/db'
import redis from '@adonisjs/redis/services/main'
export default class Replay extends BaseModel { export default class Replay extends BaseModel {
public static async updateReplayStats() {
let results = await db.rawQuery(`
SELECT
u.display_name,
u.sessions,
u.total_time_seconds,
u.total_time_readable,
u.average_session_time_readable,
u.average_time_seconds,
r.id AS last_session_id,
r.finished_at AS last_session_time
FROM (
-- Aggregate sessions in the last 30 days
SELECT
"user" ->> 'display_name' AS display_name,
COUNT(duration) AS sessions,
SUM(duration) AS total_time_seconds,
AVG(duration) AS average_time_seconds,
CONCAT(
FLOOR(SUM(duration) / 86400), 'd ',
FLOOR(MOD(SUM(duration), 86400) / 3600), 'h ',
FLOOR(MOD(SUM(duration), 3600) / 60), 'm'
) AS total_time_readable,
CONCAT(
FLOOR(COUNT(duration) / 86400), 'd ',
FLOOR(MOD(COUNT(duration), 86400) / 3600), 'h ',
FLOOR(MOD(COUNT(duration), 3600) / 60), 'm'
) AS average_session_time_readable
FROM
replays
WHERE
finished_at >= NOW() - INTERVAL '30 days'
AND "user" ->> 'display_name' LIKE '%@%'
GROUP BY
"user" ->> 'display_name'
) u
-- LATERAL JOIN to get latest session (either within 30d or fallback to latest overall)
JOIN LATERAL (
SELECT id, finished_at
FROM replays
WHERE "user" ->> 'display_name' = u.display_name
ORDER BY
CASE WHEN finished_at >= NOW() - INTERVAL '30 days' THEN 0 ELSE 1 END,
finished_at DESC
LIMIT 1
) r ON true
ORDER BY
u.total_time_seconds DESC;`)
const updatedVersion = await redis.incr('replays:stats:latest_version')
results.version = updatedVersion
results.updatedAt = Date.now()
await redis.set(`replays:stats:version:${updatedVersion}:results`, JSON.stringify(results))
return results
}
@column({ isPrimary: true }) @column({ isPrimary: true })
declare id: string declare id: string
@ -12,14 +71,14 @@ export default class Replay extends BaseModel {
prepare: (value) => { prepare: (value) => {
// The values from sentry are just arrays so convert them to json // The values from sentry are just arrays so convert them to json
return JSON.stringify(value) return JSON.stringify(value)
} },
}) })
declare trace_ids: string[] declare trace_ids: string[]
@column({ @column({
prepare: (value) => { prepare: (value) => {
return JSON.stringify(value) return JSON.stringify(value)
} },
}) })
declare error_ids: string[] declare error_ids: string[]
@ -30,26 +89,22 @@ export default class Replay extends BaseModel {
prepare: (value) => { prepare: (value) => {
// The values from sentry are just arrays so convert them to json // The values from sentry are just arrays so convert them to json
return JSON.stringify(value) return JSON.stringify(value)
} },
}) })
declare tags: string[] declare tags: string[]
@column() @column()
declare user: string[] declare user: string[]
@column() @column()
declare sdk: any declare sdk: any
@column() @column()
declare os: any declare os: any
@column() @column()
declare browser: any declare browser: any
@column() @column()
declare device: any declare device: any
@ -59,21 +114,19 @@ export default class Replay extends BaseModel {
@column() @column()
declare is_archived: boolean | null declare is_archived: boolean | null
@column({ @column({
prepare: (value) => { prepare: (value) => {
// The values from sentry are just arrays so convert them to json // The values from sentry are just arrays so convert them to json
return JSON.stringify(value) return JSON.stringify(value)
} },
}) })
declare urls: any declare urls: any
@column({ @column({
prepare: (value) => { prepare: (value) => {
// The values from sentry are just arrays so convert them to json // The values from sentry are just arrays so convert them to json
return JSON.stringify(value) return JSON.stringify(value)
} },
}) })
declare clicks: any declare clicks: any
@ -92,7 +145,7 @@ export default class Replay extends BaseModel {
@column.dateTime() @column.dateTime()
declare finished_at: DateTime | null declare finished_at: DateTime | null
@column.dateTime({serializeAs: 'started_at'}) @column.dateTime({ serializeAs: 'started_at' })
declare started_at: DateTime | null declare started_at: DateTime | null
@column() @column()
@ -110,12 +163,11 @@ export default class Replay extends BaseModel {
@column() @column()
declare platform: string | null declare platform: string | null
@column({ @column({
prepare: (value) => { prepare: (value) => {
// The values from sentry are just arrays so convert them to json // The values from sentry are just arrays so convert them to json
return JSON.stringify(value) return JSON.stringify(value)
} },
}) })
declare releases: any declare releases: any

View File

@ -1,16 +1,14 @@
--- ---
name: sentry
services: services:
scraper: scraper:
build: build:
context: . context: .
target: dev-deps target: develop
env_file: .env.develop env_file: .env.develop
volumes: volumes:
- ./:/app - ./:/app
- node_modules:/app/node_modules - node_modules:/app/node_modules
command: /bin/bash -c "echo 'hello' && node ace migration:run --force && node ace serve --watch" command: /bin/sh -c "node ace migration:run --force && node ace serve --watch"
depends_on: depends_on:
db: db:
condition: service_healthy condition: service_healthy

77
compose.yml Normal file
View File

@ -0,0 +1,77 @@
---
name: sentry
services:
reverse-proxy:
image: traefik:latest
command: --api.insecure=true --providers.docker
ports:
- 80:80
- 8080:8080
volumes:
- /var/run/docker.sock:/var/run/docker.sock
networks:
- traefik
scraper:
labels:
- 'traefik.enable=true'
- 'traefik.docker.network=sentry_traefik'
- 'traefik.http.routers.scraper.rule=Host(`sentry.docker.localhost`)'
- 'traefik.http.services.scraper.loadbalancer.server.port=3333'
- 'traefik.http.routers.scraper.entrypoints=http'
- 'traefik.http.routers.scraper.service=scraper'
networks:
- traefik
- redis
- database
depends_on:
db:
condition: service_healthy
db:
image: postgres:16
environment:
- POSTGRES_PASSWORD=password
healthcheck:
test: ['CMD-SHELL', 'pg_isready', '-d', 'postgres']
interval: 5s
timeout: 60s
retries: 5
start_period: 5s
networks:
- database
volumes:
- pg_data:/var/lib/postgresql/data
grafana:
image: grafana/grafana:latest
labels:
- 'traefik.enable=true'
- 'traefik.docker.network=sentry_traefik'
- 'traefik.http.routers.grafana.rule=Host(`grafana.docker.localhost`)'
- 'traefik.http.routers.grafana.entrypoints=http'
- 'traefik.http.services.grafana.loadbalancer.server.port=3000'
- 'traefik.http.routers.grafana.service=grafana'
networks:
- traefik
- database
healthcheck:
test:
[
'CMD-SHELL',
'wget --no-verbose --tries=1 --spider http://localhost:3000/api/health || exit 1',
]
interval: 10s
timeout: 30s
retries: 5
start_period: 30s
redis:
image: redis:latest
networks:
- redis
networks:
traefik:
driver: bridge
database:
driver: bridge
redis:
driver: bridge
volumes:
pg_data: {}

View File

@ -1,65 +0,0 @@
---
name: sentry
services:
reverse-proxy:
image: traefik:latest
command: --api.insecure=true --providers.docker
ports:
- 80:80
- 8080:8080
volumes:
- /var/run/docker.sock:/var/run/docker.sock
networks:
- traefik
scraper:
labels:
- "traefik.enable=true"
- "traefik.docker.network=sentry_traefik"
- "traefik.http.routers.scraper.rule=Host(`sentry.docker.localhost`)"
- "traefik.http.services.scraper.loadbalancer.server.port=3333"
- "traefik.http.routers.scraper.entrypoints=http"
- "traefik.http.routers.scraper.service=scraper"
networks:
- traefik
- redis
- database
db:
image: postgres:16
environment:
- POSTGRES_PASSWORD=password
healthcheck:
test: ["CMD-SHELL", "pg_isready", "-d", "postgres"]
interval: 5s
timeout: 60s
retries: 5
start_period: 5s
networks:
- database
volumes:
- pg_data:/var/lib/postgresql/data
grafana:
image: grafana/grafana:latest
labels:
- "traefik.enable=true"
- "traefik.docker.network=sentry_traefik"
- "traefik.http.routers.grafana.rule=Host(`grafana.docker.localhost`)"
- "traefik.http.routers.grafana.entrypoints=http"
- "traefik.http.services.grafana.loadbalancer.server.port=3000"
- "traefik.http.routers.grafana.service=grafana"
networks:
- traefik
- database
redis:
image: redis:latest
networks:
- redis
networks:
traefik:
driver: bridge
database:
driver: bridge
redis:
driver: bridge
volumes:
pg_data: {}

View File

@ -4,10 +4,10 @@
import '../css/app.css' import '../css/app.css'
import { createSSRApp, h } from 'vue' import { createSSRApp, h } from 'vue'
import type { DefineComponent } from 'vue' import type { DefineComponent } from 'vue'
import { createInertiaApp } from '@inertiajs/vue3' import { createInertiaApp, Link } from '@inertiajs/vue3'
import { resolvePageComponent } from '@adonisjs/inertia/helpers' import { resolvePageComponent } from '@adonisjs/inertia/helpers'
const appName = import.meta.env.VITE_APP_NAME || 'AdonisJS' const appName = import.meta.env.VITE_APP_NAME || 'AdonisJS'
Vue.component('inertia-link', Link)
createInertiaApp({ createInertiaApp({
progress: { color: '#5468FF' }, progress: { color: '#5468FF' },

View File

@ -9,7 +9,6 @@
<th class="p-2">Email</th> <th class="p-2">Email</th>
<th class="p-2">Date</th> <th class="p-2">Date</th>
<th class="p-2">Location</th> <th class="p-2">Location</th>
</tr> </tr>
</thead> </thead>
<tbody> <tbody>
@ -17,13 +16,22 @@
<td class="p-2">{{ replay.id }}</td> <td class="p-2">{{ replay.id }}</td>
<td class="p-2">{{ replay.user.email ?? replay.user.display_name }}</td> <td class="p-2">{{ replay.user.email ?? replay.user.display_name }}</td>
<td class="p-2">{{ replay.finished_at }}</td> <td class="p-2">{{ replay.finished_at }}</td>
<td class="p-2">{{ replay.user.geo ? `${replay.user.geo.city} ${replay.user.geo.subdivision}, ${replay.user.geo.region}` : 'unknown' }}</td> <td class="p-2">
{{
replay.user.geo
? `${replay.user.geo.city} ${replay.user.geo.subdivision}, ${replay.user.geo.region}`
: 'unknown'
}}
</td>
</tr> </tr>
</tbody> </tbody>
</table> </table>
<!-- Pagination --> <!-- Pagination -->
<div class="mt-4 flex flex-wrap items-center gap-2" v-if="data.meta && data.meta.links && data.meta.links.length > 1"> <div
class="mt-4 flex flex-wrap items-center gap-2"
v-if="data.meta && data.meta.links && data.meta.links.length > 1"
>
<!-- First --> <!-- First -->
<Link <Link
v-if="firstPageUrl && !isFirstPage" v-if="firstPageUrl && !isFirstPage"
@ -34,11 +42,7 @@
</Link> </Link>
<!-- Previous --> <!-- Previous -->
<Link <Link v-if="prevPageUrl" :href="prevPageUrl" class="px-3 py-1 border rounded text-sm">
v-if="prevPageUrl"
:href="prevPageUrl"
class="px-3 py-1 border rounded text-sm"
>
Prev Prev
</Link> </Link>
@ -50,7 +54,7 @@
class="px-3 py-1 border rounded text-sm" class="px-3 py-1 border rounded text-sm"
:class="{ :class="{
'font-bold bg-gray-300': link.active, 'font-bold bg-gray-300': link.active,
'text-gray-400 cursor-not-allowed': !link.url 'text-gray-400 cursor-not-allowed': !link.url,
}" }"
> >
<span v-html="link.label" /> <span v-html="link.label" />
@ -58,11 +62,7 @@
</template> </template>
<!-- Next --> <!-- Next -->
<Link <Link v-if="nextPageUrl" :href="nextPageUrl" class="px-3 py-1 border rounded text-sm">
v-if="nextPageUrl"
:href="nextPageUrl"
class="px-3 py-1 border rounded text-sm"
>
Next Next
</Link> </Link>
@ -83,12 +83,12 @@ import { computed } from 'vue'
import { Link } from '@inertiajs/vue3' import { Link } from '@inertiajs/vue3'
const props = defineProps({ const props = defineProps({
data: Object data: Object,
}) })
// Core pagination values // Core pagination values
const links = computed(() => props.data.meta.links || []) const links = computed(() => props.data.meta.links || [])
const currentIndex = computed(() => links.value.findIndex(link => link.active)) const currentIndex = computed(() => links.value.findIndex((link) => link.active))
const maxVisible = 10 const maxVisible = 10
const half = Math.floor(maxVisible / 2) const half = Math.floor(maxVisible / 2)
@ -115,5 +115,7 @@ const nextPageUrl = computed(() => links.value[currentIndex.value + 1]?.url)
const lastPageUrl = computed(() => links.value[links.value.length - 2]?.url) // last item is "Next »", second-last is last numbered const lastPageUrl = computed(() => links.value[links.value.length - 2]?.url) // last item is "Next »", second-last is last numbered
const isFirstPage = computed(() => links.value[currentIndex.value]?.label === '1') const isFirstPage = computed(() => links.value[currentIndex.value]?.label === '1')
const isLastPage = computed(() => links.value[currentIndex.value]?.label === props.data.meta.last_page) const isLastPage = computed(
() => links.value[currentIndex.value]?.label === props.data.meta.last_page
)
</script> </script>

View File

@ -34,5 +34,5 @@ export default await Env.create(new URL('../', import.meta.url), {
PG_USER: Env.schema.string(), PG_USER: Env.schema.string(),
PG_PASSWORD: Env.schema.string(), PG_PASSWORD: Env.schema.string(),
WEBHOOK_URL: Env.schema.string() WEBHOOK_URL: Env.schema.string(),
}) })

View File

@ -11,6 +11,5 @@ import ReplaysController from '#controllers/replays_controller'
import router from '@adonisjs/core/services/router' import router from '@adonisjs/core/services/router'
router.on('/').renderInertia('home') router.on('/').renderInertia('home')
router.get('/replays', [ReplaysController, 'index']) router.get('/replays', [ReplaysController, 'index'])
router.get('/list', [ReplaysController, 'list' router.get('/list', [ReplaysController, 'list'])
])
router.get('/stats', [ReplaysController, 'stats']) router.get('/stats', [ReplaysController, 'stats'])