Finalize sql query and add webhook endpoint

This commit is contained in:
Mike Conrad
2025-05-19 16:57:02 -04:00
parent 692a086aa5
commit 83a6053eb3
3 changed files with 134 additions and 83 deletions

View File

@ -13,3 +13,4 @@ SENTRY_ORG=
REDIS_HOST=sentry-redis-1 REDIS_HOST=sentry-redis-1
REDIS_PORT=6379 REDIS_PORT=6379
REDIS_PASSWORD= REDIS_PASSWORD=
WEBHOOK_URL=

View File

@ -6,9 +6,7 @@ const SENTRY_TOKEN = env.get('SENTRY_TOKEN')
const SENTRY_ORG = env.get('SENTRY_ORG') const SENTRY_ORG = env.get('SENTRY_ORG')
let recordsUpdated = 0 let recordsUpdated = 0
import redis from '@adonisjs/redis/services/main' import redis from '@adonisjs/redis/services/main'
import { DateTime } from 'luxon'
const thirtyDaysAgo = DateTime.now().minus({ days: 30 }).toSQL()
interface ApiResponse<T> { interface ApiResponse<T> {
data: T; data: T;
@ -23,24 +21,74 @@ interface SentryPagination {
} }
export default class ReplaysController { export default class ReplaysController {
public async search({ response}: HttpContext) { public async search({ response }: HttpContext) {
console.log('thir', thirtyDaysAgo) let results = await db.rawQuery(`
let results = await db.rawQuery(`SELECT SELECT
"user" ->> 'display_name' AS user_email, u.display_name,
SUM(duration) AS duration, u.sessions,
COUNT(duration) AS sessions, u.total_time_seconds,
AVG(duration) AS avg_duration u.total_time_readable,
FROM u.average_session_time_readable,
replays u.average_time_seconds,
WHERE r.id AS last_session_id,
finished_at >= ? r.finished_at AS last_session_time
GROUP BY
user_email FROM (
ORDER BY -- Aggregate sessions in the last 30 days
SUM(duration) desc`, [thirtyDaysAgo] SELECT
) "user" ->> 'display_name' AS display_name,
response.json(results) COUNT(duration) AS sessions,
} SUM(duration) AS total_time_seconds,
AVG(duration) AS average_time_seconds,
CONCAT(
FLOOR(SUM(duration) / 86400), 'd ',
FLOOR(MOD(SUM(duration), 86400) / 3600), 'h ',
FLOOR(MOD(SUM(duration), 3600) / 60), 'm'
) AS total_time_readable,
CONCAT(
FLOOR(COUNT(duration) / 86400), 'd ',
FLOOR(MOD(COUNT(duration), 86400) / 3600), 'h ',
FLOOR(MOD(COUNT(duration), 3600) / 60), 'm'
) AS average_session_time_readable
FROM
replays
WHERE
finished_at >= NOW() - INTERVAL '30 days'
GROUP BY
"user" ->> 'display_name'
) u
-- LATERAL JOIN to get latest session (either within 30d or fallback to latest overall)
JOIN LATERAL (
SELECT id, finished_at
FROM replays
WHERE "user" ->> 'display_name' = u.display_name
ORDER BY
CASE WHEN finished_at >= NOW() - INTERVAL '30 days' THEN 0 ELSE 1 END,
finished_at DESC
LIMIT 1
) r ON true
ORDER BY
u.total_time_seconds DESC;`
)
try {
await fetch(env.get('WEBHOOK_URL'),
{
headers:
{
'content-type': 'application/json'
},
method: 'POST',
body: JSON.stringify(results.rows)
}
)
} catch(e) {
console.error('error sending webhook data', e)
}
response.json(results.rows)
}
public async list({ request, inertia }: HttpContext) { public async list({ request, inertia }: HttpContext) {
const page = request.input('page', 1) const page = request.input('page', 1)
const perPage = 20 const perPage = 20
@ -75,88 +123,88 @@ export default class ReplaysController {
}) })
} }
async index({ request, response }: HttpContext) { async index({ request, response }: HttpContext) {
const {statsPeriod, start, end} = request.qs() const { statsPeriod, start, end } = request.qs()
recordsUpdated = 0 recordsUpdated = 0
let queryString: string = '?statsPeriod=24h'// Default in case none is provided let queryString: string = '?statsPeriod=24h'// Default in case none is provided
if (statsPeriod) { if (statsPeriod) {
queryString = `?statsPeriod=${statsPeriod}` queryString = `?statsPeriod=${statsPeriod}`
} else if (start && end) { } else if (start && end) {
queryString = `?start=${start}&end=${end}` queryString = `?start=${start}&end=${end}`
}
const replays = await fetchBatch(`https://sentry.io/api/0/organizations/${SENTRY_ORG}/replays/${queryString}`)
return response.json(replays)
} }
const replays = await fetchBatch(`https://sentry.io/api/0/organizations/${SENTRY_ORG}/replays/${queryString}`)
return response.json(replays)
}
} }
async function fetchBatch(url: string) { async function fetchBatch(url: string) {
const options: RequestInit = { const options: RequestInit = {
headers: { headers: {
Authorization: `Bearer ${SENTRY_TOKEN}` Authorization: `Bearer ${SENTRY_TOKEN}`
}
}
const req = await fetch(url, options)
if (!req.ok) {
throw new Error(`Request failed with status ${req.status}`);
} }
}
const req = await fetch(url, options)
if (!req.ok) {
throw new Error(`Request failed with status ${req.status}`);
}
const resp = await req.json() as ApiResponse<Replay[]>; const resp = await req.json() as ApiResponse<Replay[]>;
const replays = resp.data; const replays = resp.data;
const headers = req.headers const headers = req.headers
const cleanedData = replays.map(record => sanitizeInput(record, Replay.allowedFields)) const cleanedData = replays.map(record => sanitizeInput(record, Replay.allowedFields))
let updated = await Replay.updateOrCreateMany('id', cleanedData ) let updated = await Replay.updateOrCreateMany('id', cleanedData)
recordsUpdated = recordsUpdated + updated.length recordsUpdated = recordsUpdated + updated.length
const linkHeader = headers.get('link') const linkHeader = headers.get('link')
if (!linkHeader) { if (!linkHeader) {
return {error: 'link header missing from Sentry API response'} return { error: 'link header missing from Sentry API response' }
} }
const pagination: SentryPagination = parseSentryLinkHeader(linkHeader) const pagination: SentryPagination = parseSentryLinkHeader(linkHeader)
if (pagination.hasNextResults == true) { if (pagination.hasNextResults == true) {
console.log('fetching', pagination.next) console.log('fetching', pagination.next)
await fetchBatch(pagination.next) await fetchBatch(pagination.next)
} }
console.log('no more results') console.log('no more results')
return {recordsUpdated} return { recordsUpdated }
} }
function parseSentryLinkHeader(header:string): SentryPagination { function parseSentryLinkHeader(header: string): SentryPagination {
const links = header.split(',').map(part => part.trim()) const links = header.split(',').map(part => part.trim())
let result = {} as SentryPagination let result = {} as SentryPagination
for (const link of links) { for (const link of links) {
const match = link.match(/<([^>]+)>;\s*rel="([^"]+)";\s*results="([^"]+)";\s*cursor="([^"]+)"/) const match = link.match(/<([^>]+)>;\s*rel="([^"]+)";\s*results="([^"]+)";\s*cursor="([^"]+)"/)
if (!match) continue if (!match) continue
const [, url, rel, results] = match const [, url, rel, results] = match
if (rel === 'previous') { if (rel === 'previous') {
result.previous = url result.previous = url
result.hasPreviousResults = results === 'true' result.hasPreviousResults = results === 'true'
} else if (rel === 'next') { } else if (rel === 'next') {
result.next = url result.next = url
result.hasNextResults = results === 'true' result.hasNextResults = results === 'true'
}
} }
return result
} }
function sanitizeInput(data: Record<string, any>, allowedFields: string[]) { return result
return allowedFields.reduce((acc, key) => { }
if (key in data) acc[key] = data[key]
return acc
}, {} as Record<string, any>)
}
function buildPaginationLinks(meta: { previousPageUrl: string, lastPage: number; currentPage: number; nextPageUrl: string}) { function sanitizeInput(data: Record<string, any>, allowedFields: string[]) {
return allowedFields.reduce((acc, key) => {
if (key in data) acc[key] = data[key]
return acc
}, {} as Record<string, any>)
}
function buildPaginationLinks(meta: { previousPageUrl: string, lastPage: number; currentPage: number; nextPageUrl: string }) {
const links = [] const links = []
// Previous // Previous

View File

@ -33,4 +33,6 @@ export default await Env.create(new URL('../', import.meta.url), {
PG_HOST: Env.schema.string(), PG_HOST: Env.schema.string(),
PG_USER: Env.schema.string(), PG_USER: Env.schema.string(),
PG_PASSWORD: Env.schema.string(), PG_PASSWORD: Env.schema.string(),
WEBHOOK_URL: Env.schema.string()
}) })