Move stats logic to model and fix caching

This commit is contained in:
Mike Conrad
2025-05-20 13:32:47 -04:00
parent 5e8f7e6005
commit 5df94eaafa
2 changed files with 91 additions and 95 deletions

View File

@ -23,27 +23,23 @@ export default class ReplaysController {
public async stats({ request, response }: HttpContext) {
const {sendToWebhook} = request.qs()
const cacheKey = `replays:sync:latest_version`
const latestFetchVersion = await redis.get(`replays:fetch:latest_version`)
const latestQueryVersion = await redis.get(`replays:stats:latest_version`)
if (latestFetchVersion == latestQueryVersion) {
let results
results = await redis.get(`replays:sync:version:${latestQueryVersion}:results`)
if (!results) {
console.log('no data in cache, updating')
results = await getResults()
await redis.set(`replays:sync:version:${latestQueryVersion}:results`, JSON.stringify(results))
}
console.log('resultssdsdfds')
return response.json(results)
const latestVersion = await redis.get(`replays:stats:latest_version`)
if (!latestVersion) {
//
console.log('Cache miss')
const queryResults = await Replay.updateReplayStats()
queryResults.latest_version = 1
queryResults.updatedAt = Date.now()
await redis.set(`replays:stats:version:1:results`, JSON.stringify(queryResults))
await redis.set(`replays:stats:latest_version`, 1)
return response.json(queryResults)
} else {
let results = await getResults()
console.log('results quer', latestQueryVersion)
console.log('cache hit')
const results = await redis.get(`replays:stats:version:${latestVersion}:results`)
return response.json(JSON.parse(results))
}
await redis.set(`replays:stats:version:${latestQueryVersion}:results`, JSON.stringify(results))
await redis.set(`replays:stats:latest_version`, latestFetchVersion)
await redis.set(`replays:fetch:latest_version`, latestFetchVersion)
return response.json(results)
if (sendToWebhook) {
try {
console.log('syncing to webhook')
@ -63,8 +59,6 @@ export default class ReplaysController {
}
}
response.json(results.rows)
}
public async list({ request, inertia }: HttpContext) {
const page = request.input('page', 1)
const perPage = 20
@ -112,14 +106,10 @@ export default class ReplaysController {
queryString = `?start=${start}&end=${end}`
}
const replays = await fetchBatch(`https://sentry.io/api/0/organizations/${SENTRY_ORG}/replays/${queryString}`)
let latestVersion = await redis.get(`replays:fetch:latest_version`)
if (!latestVersion) {
redis.set('replays:fetch:latest_version', 1)
} else {
redis.set('replays:fetch:latest_version', ++latestVersion)
}
return response.json(replays)
let queryResults = await Replay.updateReplayStats()
return response.json({version: queryResults.latestVersion, ...queryResults})
}
}
@ -214,57 +204,3 @@ function buildPaginationLinks(meta: { previousPageUrl: string, lastPage: number;
return links
}
async function getResults(){
let results = await db.rawQuery(`
SELECT
u.display_name,
u.sessions,
u.total_time_seconds,
u.total_time_readable,
u.average_session_time_readable,
u.average_time_seconds,
r.id AS last_session_id,
r.finished_at AS last_session_time
FROM (
-- Aggregate sessions in the last 30 days
SELECT
"user" ->> 'display_name' AS display_name,
COUNT(duration) AS sessions,
SUM(duration) AS total_time_seconds,
AVG(duration) AS average_time_seconds,
CONCAT(
FLOOR(SUM(duration) / 86400), 'd ',
FLOOR(MOD(SUM(duration), 86400) / 3600), 'h ',
FLOOR(MOD(SUM(duration), 3600) / 60), 'm'
) AS total_time_readable,
CONCAT(
FLOOR(COUNT(duration) / 86400), 'd ',
FLOOR(MOD(COUNT(duration), 86400) / 3600), 'h ',
FLOOR(MOD(COUNT(duration), 3600) / 60), 'm'
) AS average_session_time_readable
FROM
replays
WHERE
finished_at >= NOW() - INTERVAL '30 days'
GROUP BY
"user" ->> 'display_name'
) u
-- LATERAL JOIN to get latest session (either within 30d or fallback to latest overall)
JOIN LATERAL (
SELECT id, finished_at
FROM replays
WHERE "user" ->> 'display_name' = u.display_name
ORDER BY
CASE WHEN finished_at >= NOW() - INTERVAL '30 days' THEN 0 ELSE 1 END,
finished_at DESC
LIMIT 1
) r ON true
ORDER BY
u.total_time_seconds DESC;`
)
return results
}

View File

@ -1,7 +1,67 @@
import { DateTime } from 'luxon'
import { BaseModel, column } from '@adonisjs/lucid/orm'
import db from '@adonisjs/lucid/services/db'
import redis from '@adonisjs/redis/services/main'
export default class Replay extends BaseModel {
public static async updateReplayStats() {
let results = await db.rawQuery(`
SELECT
u.display_name,
u.sessions,
u.total_time_seconds,
u.total_time_readable,
u.average_session_time_readable,
u.average_time_seconds,
r.id AS last_session_id,
r.finished_at AS last_session_time
FROM (
-- Aggregate sessions in the last 30 days
SELECT
"user" ->> 'display_name' AS display_name,
COUNT(duration) AS sessions,
SUM(duration) AS total_time_seconds,
AVG(duration) AS average_time_seconds,
CONCAT(
FLOOR(SUM(duration) / 86400), 'd ',
FLOOR(MOD(SUM(duration), 86400) / 3600), 'h ',
FLOOR(MOD(SUM(duration), 3600) / 60), 'm'
) AS total_time_readable,
CONCAT(
FLOOR(COUNT(duration) / 86400), 'd ',
FLOOR(MOD(COUNT(duration), 86400) / 3600), 'h ',
FLOOR(MOD(COUNT(duration), 3600) / 60), 'm'
) AS average_session_time_readable
FROM
replays
WHERE
finished_at >= NOW() - INTERVAL '30 days'
GROUP BY
"user" ->> 'display_name'
) u
-- LATERAL JOIN to get latest session (either within 30d or fallback to latest overall)
JOIN LATERAL (
SELECT id, finished_at
FROM replays
WHERE "user" ->> 'display_name' = u.display_name
ORDER BY
CASE WHEN finished_at >= NOW() - INTERVAL '30 days' THEN 0 ELSE 1 END,
finished_at DESC
LIMIT 1
) r ON true
ORDER BY
u.total_time_seconds DESC;`
)
const updatedVersion = await redis.incr('replay:stats:latest_version')
results.version = updatedVersion
results.updatedAt = Date.now()
await redis.set(`replays:stats:latest_version`, updatedVersion)
await redis.set(`replays:stats:version:${updatedVersion}:results`, JSON.stringify(results))
return results
}
@column({ isPrimary: true })
declare id: string