Compare commits
13 Commits
5e8f7e6005
...
master
Author | SHA1 | Date | |
---|---|---|---|
2ad1231d83 | |||
9a712a096b | |||
c20536cd96 | |||
a7bdeb6248 | |||
112c69bb92 | |||
d2b4070206 | |||
e1e9f6c10a | |||
36be410d38 | |||
c88b7d762e | |||
47a520e945 | |||
f5aceea6fa | |||
3c44fcc062 | |||
5df94eaafa |
10
.env.example
10
.env.example
@ -1,16 +1,18 @@
|
|||||||
TZ=UTC
|
TZ=UTC
|
||||||
PORT=3333
|
PORT=3333
|
||||||
HOST=localhost
|
HOST=0.0.0.0
|
||||||
LOG_LEVEL=info
|
LOG_LEVEL=info
|
||||||
APP_KEY=
|
APP_KEY=sMoYEqixvC3sgJO4WM9ej9ctlcVtAdCE
|
||||||
NODE_ENV=development
|
NODE_ENV=development
|
||||||
SESSION_DRIVER=cookie
|
SESSION_DRIVER=cookie
|
||||||
|
PG_USER=postgres
|
||||||
PG_PORT=5432
|
PG_PORT=5432
|
||||||
PG_HOST=localhost
|
PG_HOST=db
|
||||||
PG_PASSWORD=password
|
PG_PASSWORD=password
|
||||||
SENTRY_TOKEN=
|
SENTRY_TOKEN=
|
||||||
SENTRY_ORG=
|
SENTRY_ORG=
|
||||||
REDIS_HOST=sentry-redis-1
|
REDIS_HOST=sentry-redis-1
|
||||||
REDIS_PORT=6379
|
REDIS_PORT=6379
|
||||||
REDIS_PASSWORD=
|
REDIS_PASSWORD=
|
||||||
WEBHOOK_URL=
|
WEBHOOK_URL=
|
||||||
|
QUERY_FILTER='!user.email:*@mailinator.com !user.email:*@example.com'
|
||||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -23,3 +23,4 @@ yarn-error.log
|
|||||||
|
|
||||||
# Platform specific
|
# Platform specific
|
||||||
.DS_Store
|
.DS_Store
|
||||||
|
*compose-prod.yml
|
||||||
|
10
Dockerfile
10
Dockerfile
@ -1,4 +1,6 @@
|
|||||||
FROM node:20.12.2-alpine3.18 AS base
|
FROM node:lts-alpine3.22 AS base
|
||||||
|
HEALTHCHECK --interval=5s --timeout=10s --start-period=5s --retries=5 \
|
||||||
|
CMD sh -c 'wget --no-verbose --tries=1 --spider http://127.0.0.1:3333 || exit 1'
|
||||||
|
|
||||||
# All deps stage
|
# All deps stage
|
||||||
FROM base AS deps
|
FROM base AS deps
|
||||||
@ -6,11 +8,10 @@ WORKDIR /app
|
|||||||
ADD package.json package-lock.json ./
|
ADD package.json package-lock.json ./
|
||||||
RUN npm ci
|
RUN npm ci
|
||||||
|
|
||||||
FROM node:22 AS dev-deps
|
FROM deps AS develop
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
ADD package.json package-lock.json ./
|
COPY --from=deps /app/node_modules /app/node_modules
|
||||||
ENV NODE_ENV=development
|
ENV NODE_ENV=development
|
||||||
RUN npm ci
|
|
||||||
EXPOSE 3333
|
EXPOSE 3333
|
||||||
|
|
||||||
# Production only deps stage
|
# Production only deps stage
|
||||||
@ -33,4 +34,5 @@ WORKDIR /app
|
|||||||
COPY --from=production-deps /app/node_modules /app/node_modules
|
COPY --from=production-deps /app/node_modules /app/node_modules
|
||||||
COPY --from=build /app/build /app
|
COPY --from=build /app/build /app
|
||||||
EXPOSE 8080
|
EXPOSE 8080
|
||||||
|
|
||||||
CMD ["node", "./bin/server.js"]
|
CMD ["node", "./bin/server.js"]
|
32
README.md
Normal file
32
README.md
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
# Sentry Toolkit
|
||||||
|
This project was born out of a simple marketing request. Basically along the lines of "how can we track user engagement in our CRM?", to which I answered "We already use Sentry for Session recording, we can pull that data from the API, aggregate it and push it to the CRM." Hence this project. It is currently pretty simple and includes an API as well as basic web ui.
|
||||||
|
|
||||||
|
|
||||||
|
## Tech Stack
|
||||||
|
- [AdonisJS](https://adonisjs.com): I decided to use the wonderful AdonisJS framework for this project. Overkill? probably but it has a lot of nicecities built in and I didn't want to reinvent the wheel for this simple project. I also wanted to play around with InertiaJS which comes included.
|
||||||
|
- [Docker](https://docker.com) - All services have been containerized for convience of developing, testing and deploying. A `compose.yml` and `compose.override.yml` are included for testing and developing locally.
|
||||||
|
- Redis - Some basic caching because why not?
|
||||||
|
- Postgresql - Useful for storing historical session data.
|
||||||
|
- Traefik - Reverse Proxy/Ingress controller Provided for convienent development and local testing.
|
||||||
|
- Grafana - (Optional) For building pretty dashboards.
|
||||||
|
|
||||||
|
|
||||||
|
## Getting started
|
||||||
|
```shell
|
||||||
|
$ cp .env.example .env.develop
|
||||||
|
# Add/edit values in .env.develop as needed
|
||||||
|
# The WEBHOOK_URL is not strictly necessary for basic functionality.
|
||||||
|
|
||||||
|
# Tested on Linux, I have not had the pleasure of setting up Traefik on Windows/Mac
|
||||||
|
# recently so suggestions welcome. Also you may need `sudo` depending on how your
|
||||||
|
# Docker environment is set up.
|
||||||
|
$ docker compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
Once all of the containers come up, you should be able to access the UI/API on [http://sentry.docker.localhost]() (Docker compose magic.) The database migrations should automatically run when you start with `docker compose` but if you are running the backend with node instead you will need to run `node ace migration:run` after starting the app for the first time.
|
||||||
|
|
||||||
|
The main page will list any Replay sessions stored in the database.
|
||||||
|

|
||||||
|
|
||||||
|
|
||||||
|
[http://sentry.docker.localhost/replays]() will fetch session data from Sentry and store it in the database. It will also return the results as JSON.
|
@ -53,7 +53,7 @@ export default defineConfig({
|
|||||||
() => import('@adonisjs/lucid/database_provider'),
|
() => import('@adonisjs/lucid/database_provider'),
|
||||||
() => import('@adonisjs/auth/auth_provider'),
|
() => import('@adonisjs/auth/auth_provider'),
|
||||||
() => import('@adonisjs/inertia/inertia_provider'),
|
() => import('@adonisjs/inertia/inertia_provider'),
|
||||||
() => import('@adonisjs/redis/redis_provider')
|
() => import('@adonisjs/redis/redis_provider'),
|
||||||
],
|
],
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
52
app/Helpers/Replays.ts
Normal file
52
app/Helpers/Replays.ts
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
import Replay from '#models/replay'
|
||||||
|
import { parseSentryLinkHeader, SentryPagination } from './Sentry.js'
|
||||||
|
|
||||||
|
import env from '#start/env'
|
||||||
|
let recordsUpdated = 0
|
||||||
|
const SENTRY_TOKEN = env.get('SENTRY_TOKEN')
|
||||||
|
interface ApiResponse<T> {
|
||||||
|
data: T
|
||||||
|
// optionally, you can define `meta`, `errors`, etc. if your API returns them
|
||||||
|
}
|
||||||
|
export async function fetchBatch(url: string) {
|
||||||
|
const options: RequestInit = {
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${SENTRY_TOKEN}`,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
const req = await fetch(url, options)
|
||||||
|
if (!req.ok) {
|
||||||
|
throw new Error(`Request failed with status ${req.status}`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const resp = (await req.json()) as ApiResponse<Replay[]>
|
||||||
|
const replays = resp.data
|
||||||
|
const headers = req.headers
|
||||||
|
|
||||||
|
const cleanedData = replays.map((record) => sanitizeInput(record, Replay.allowedFields))
|
||||||
|
|
||||||
|
let updated = await Replay.updateOrCreateMany('id', cleanedData)
|
||||||
|
recordsUpdated = recordsUpdated + updated.length
|
||||||
|
const linkHeader = headers.get('link')
|
||||||
|
if (!linkHeader) {
|
||||||
|
return { error: 'link header missing from Sentry API response' }
|
||||||
|
}
|
||||||
|
const pagination: SentryPagination = parseSentryLinkHeader(linkHeader)
|
||||||
|
|
||||||
|
if (pagination.hasNextResults == true) {
|
||||||
|
console.log('fetching', pagination.next)
|
||||||
|
await fetchBatch(pagination.next)
|
||||||
|
}
|
||||||
|
console.log('no more results')
|
||||||
|
return { recordsUpdated }
|
||||||
|
}
|
||||||
|
|
||||||
|
function sanitizeInput(data: Record<string, any>, allowedFields: string[]) {
|
||||||
|
return allowedFields.reduce(
|
||||||
|
(acc, key) => {
|
||||||
|
if (key in data) acc[key] = data[key]
|
||||||
|
return acc
|
||||||
|
},
|
||||||
|
{} as Record<string, any>
|
||||||
|
)
|
||||||
|
}
|
27
app/Helpers/Sentry.ts
Normal file
27
app/Helpers/Sentry.ts
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
export interface SentryPagination {
|
||||||
|
previous: string
|
||||||
|
hasPreviousResults: boolean
|
||||||
|
hasNextResults: boolean
|
||||||
|
next: string
|
||||||
|
}
|
||||||
|
export function parseSentryLinkHeader(header: string): SentryPagination {
|
||||||
|
const links = header.split(',').map((part) => part.trim())
|
||||||
|
|
||||||
|
let result = {} as SentryPagination
|
||||||
|
for (const link of links) {
|
||||||
|
const match = link.match(/<([^>]+)>;\s*rel="([^"]+)";\s*results="([^"]+)";\s*cursor="([^"]+)"/)
|
||||||
|
if (!match) continue
|
||||||
|
|
||||||
|
const [, url, rel, results] = match
|
||||||
|
|
||||||
|
if (rel === 'previous') {
|
||||||
|
result.previous = url
|
||||||
|
result.hasPreviousResults = results === 'true'
|
||||||
|
} else if (rel === 'next') {
|
||||||
|
result.next = url
|
||||||
|
result.hasNextResults = results === 'true'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
21
app/Helpers/Webhook.ts
Normal file
21
app/Helpers/Webhook.ts
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
import env from '#start/env'
|
||||||
|
|
||||||
|
export async function sendDataToWebhook(responseData: {
|
||||||
|
version: number
|
||||||
|
updatedAt: Date
|
||||||
|
numberOfRecords: number
|
||||||
|
data: unknown
|
||||||
|
}) {
|
||||||
|
try {
|
||||||
|
console.log('syncing to webhook')
|
||||||
|
await fetch(env.get('WEBHOOK_URL'), {
|
||||||
|
headers: {
|
||||||
|
'content-type': 'application/json',
|
||||||
|
},
|
||||||
|
method: 'POST',
|
||||||
|
body: JSON.stringify(responseData),
|
||||||
|
})
|
||||||
|
} catch (e) {
|
||||||
|
console.error('error sending webhook data', e)
|
||||||
|
}
|
||||||
|
}
|
@ -1,71 +1,51 @@
|
|||||||
import Replay from '#models/replay'
|
import Replay from '#models/replay'
|
||||||
import env from '#start/env'
|
import env from '#start/env'
|
||||||
import type { HttpContext } from '@adonisjs/core/http'
|
import type { HttpContext } from '@adonisjs/core/http'
|
||||||
import db from '@adonisjs/lucid/services/db'
|
|
||||||
const SENTRY_TOKEN = env.get('SENTRY_TOKEN')
|
|
||||||
const SENTRY_ORG = env.get('SENTRY_ORG')
|
const SENTRY_ORG = env.get('SENTRY_ORG')
|
||||||
let recordsUpdated = 0
|
|
||||||
import redis from '@adonisjs/redis/services/main'
|
import redis from '@adonisjs/redis/services/main'
|
||||||
|
import { fetchBatch } from '../Helpers/Replays.js'
|
||||||
|
import { sendDataToWebhook } from '../Helpers/Webhook.js'
|
||||||
|
import { faker } from '@faker-js/faker'
|
||||||
|
|
||||||
|
|
||||||
interface ApiResponse<T> {
|
|
||||||
data: T;
|
|
||||||
// optionally, you can define `meta`, `errors`, etc. if your API returns them
|
|
||||||
}
|
|
||||||
|
|
||||||
interface SentryPagination {
|
|
||||||
previous: string;
|
|
||||||
hasPreviousResults: boolean;
|
|
||||||
hasNextResults: boolean;
|
|
||||||
next: string
|
|
||||||
}
|
|
||||||
export default class ReplaysController {
|
export default class ReplaysController {
|
||||||
|
public async faker({ request, response }: HttpContext) {
|
||||||
public async stats({ request, response }: HttpContext) {
|
const { page } = await request.qs()
|
||||||
const {sendToWebhook} = request.qs()
|
const sessions = Array.from({ length: 100 }, generateFakeSession)
|
||||||
const cacheKey = `replays:sync:latest_version`
|
const nextPage = +page + 1
|
||||||
const latestFetchVersion = await redis.get(`replays:fetch:latest_version`)
|
await response.safeHeader(
|
||||||
const latestQueryVersion = await redis.get(`replays:stats:latest_version`)
|
'link',
|
||||||
if (latestFetchVersion == latestQueryVersion) {
|
`<http://localhost:3333/faker/?page=${page}>; rel="previous"; results="true"; cursor="0:1100:1", <http://localhost:3333/faker/?page=${nextPage}>; rel="next"; results="${page == 10 ? 'false' : 'true'}"; cursor="0:${page * 100}:0"`
|
||||||
let results
|
|
||||||
results = await redis.get(`replays:sync:version:${latestQueryVersion}:results`)
|
|
||||||
if (!results) {
|
|
||||||
console.log('no data in cache, updating')
|
|
||||||
results = await getResults()
|
|
||||||
await redis.set(`replays:sync:version:${latestQueryVersion}:results`, JSON.stringify(results))
|
|
||||||
|
|
||||||
}
|
|
||||||
console.log('resultssdsdfds')
|
|
||||||
return response.json(results)
|
|
||||||
} else {
|
|
||||||
let results = await getResults()
|
|
||||||
console.log('results quer', latestQueryVersion)
|
|
||||||
await redis.set(`replays:stats:version:${latestQueryVersion}:results`, JSON.stringify(results))
|
|
||||||
await redis.set(`replays:stats:latest_version`, latestFetchVersion)
|
|
||||||
await redis.set(`replays:fetch:latest_version`, latestFetchVersion)
|
|
||||||
return response.json(results)
|
|
||||||
if (sendToWebhook) {
|
|
||||||
try {
|
|
||||||
console.log('syncing to webhook')
|
|
||||||
await fetch(env.get('WEBHOOK_URL'),
|
|
||||||
{
|
|
||||||
headers:
|
|
||||||
{
|
|
||||||
'content-type': 'application/json'
|
|
||||||
},
|
|
||||||
method: 'POST',
|
|
||||||
body: JSON.stringify(results.rows)
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
} catch(e) {
|
return { data: sessions, count: sessions.length, page: page }
|
||||||
console.error('error sending webhook data', e)
|
|
||||||
}
|
}
|
||||||
}
|
public async stats({ request, response }: HttpContext) {
|
||||||
|
const { sendToWebhook } = request.qs()
|
||||||
|
const latestVersion = await redis.get(`replays:stats:latest_version`)
|
||||||
|
let results
|
||||||
|
if (!latestVersion) {
|
||||||
|
console.log('Cache miss')
|
||||||
|
results = await Replay.updateReplayStats()
|
||||||
|
} else {
|
||||||
|
console.log('cache hit')
|
||||||
|
let data = await redis.get(`replays:stats:version:${latestVersion}:results`)
|
||||||
|
if (data) {
|
||||||
|
results = JSON.parse(data)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
response.json(results.rows)
|
let responseData = {
|
||||||
|
version: results.version,
|
||||||
|
updatedAt: results.updatedAt,
|
||||||
|
numberOfRecords: results.rows.length,
|
||||||
|
data: results.rows,
|
||||||
|
}
|
||||||
|
if (sendToWebhook) {
|
||||||
|
await sendDataToWebhook(responseData)
|
||||||
|
}
|
||||||
|
return response.json(responseData)
|
||||||
}
|
}
|
||||||
public async list({ request, inertia }: HttpContext) {
|
|
||||||
|
public async home({ request, inertia }: HttpContext) {
|
||||||
const page = request.input('page', 1)
|
const page = request.input('page', 1)
|
||||||
const perPage = 20
|
const perPage = 20
|
||||||
const cacheKey = `replays:page:${page}`
|
const cacheKey = `replays:page:${page}`
|
||||||
@ -74,16 +54,16 @@ export default class ReplaysController {
|
|||||||
let paginated, meta, replays
|
let paginated, meta, replays
|
||||||
|
|
||||||
if (data) {
|
if (data) {
|
||||||
({ paginated, meta, replays } = JSON.parse(data))
|
;({ paginated, meta, replays } = JSON.parse(data))
|
||||||
} else {
|
} else {
|
||||||
paginated = await Replay.query().paginate(page, perPage)
|
paginated = await Replay.query().paginate(page, perPage)
|
||||||
paginated.baseUrl('/list')
|
paginated.baseUrl('/')
|
||||||
|
|
||||||
const json = paginated.toJSON()
|
const json = paginated.toJSON()
|
||||||
|
|
||||||
meta = {
|
meta = {
|
||||||
...json.meta,
|
...json.meta,
|
||||||
links: buildPaginationLinks(json.meta)
|
links: buildPaginationLinks(json.meta),
|
||||||
}
|
}
|
||||||
|
|
||||||
replays = json.data
|
replays = json.data
|
||||||
@ -94,114 +74,53 @@ export default class ReplaysController {
|
|||||||
return inertia.render('Replays/Index', {
|
return inertia.render('Replays/Index', {
|
||||||
data: {
|
data: {
|
||||||
replays,
|
replays,
|
||||||
meta
|
meta,
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async index({ request, response }: HttpContext) {
|
async index({ request, response }: HttpContext) {
|
||||||
const { statsPeriod, start, end } = request.qs()
|
const { statsPeriod, start, end } = request.qs()
|
||||||
recordsUpdated = 0
|
let queryString: string = '?statsPeriod=24h' // Default in case none is provided
|
||||||
|
|
||||||
let queryString: string = '?statsPeriod=24h'// Default in case none is provided
|
|
||||||
if (statsPeriod) {
|
if (statsPeriod) {
|
||||||
queryString = `?statsPeriod=${statsPeriod}`
|
queryString = `?statsPeriod=${statsPeriod}`
|
||||||
} else if (start && end) {
|
} else if (start && end) {
|
||||||
queryString = `?start=${start}&end=${end}`
|
queryString = `?start=${start}&end=${end}`
|
||||||
}
|
}
|
||||||
const replays = await fetchBatch(`https://sentry.io/api/0/organizations/${SENTRY_ORG}/replays/${queryString}`)
|
const queryFilter = env.get('QUERY_FILTER')
|
||||||
let latestVersion = await redis.get(`replays:fetch:latest_version`)
|
const baseUrl =
|
||||||
if (!latestVersion) {
|
env.get('NODE_ENV') == 'production'
|
||||||
redis.set('replays:fetch:latest_version', 1)
|
? `https://sentry.io/api/0/organizations/${SENTRY_ORG}/replays/${queryString}&field=id&field=user&field=duration&field=started_at&field=finished_at&query=${encodeURIComponent(queryFilter)}`
|
||||||
} else {
|
: 'http://localhost:3333/faker?page=1'
|
||||||
redis.set('replays:fetch:latest_version', ++latestVersion)
|
console.log('base', baseUrl)
|
||||||
}
|
await fetchBatch(baseUrl)
|
||||||
|
|
||||||
return response.json(replays)
|
let queryResults = await Replay.updateReplayStats()
|
||||||
|
|
||||||
|
return response.json({ version: queryResults.latestVersion, ...queryResults })
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function buildPaginationLinks(meta: {
|
||||||
async function fetchBatch(url: string) {
|
previousPageUrl: string
|
||||||
const options: RequestInit = {
|
lastPage: number
|
||||||
headers: {
|
currentPage: number
|
||||||
Authorization: `Bearer ${SENTRY_TOKEN}`
|
nextPageUrl: string
|
||||||
}
|
}) {
|
||||||
}
|
|
||||||
const req = await fetch(url, options)
|
|
||||||
if (!req.ok) {
|
|
||||||
throw new Error(`Request failed with status ${req.status}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const resp = await req.json() as ApiResponse<Replay[]>;
|
|
||||||
const replays = resp.data;
|
|
||||||
const headers = req.headers
|
|
||||||
|
|
||||||
const cleanedData = replays.map(record => sanitizeInput(record, Replay.allowedFields))
|
|
||||||
|
|
||||||
let updated = await Replay.updateOrCreateMany('id', cleanedData)
|
|
||||||
recordsUpdated = recordsUpdated + updated.length
|
|
||||||
const linkHeader = headers.get('link')
|
|
||||||
if (!linkHeader) {
|
|
||||||
return { error: 'link header missing from Sentry API response' }
|
|
||||||
}
|
|
||||||
const pagination: SentryPagination = parseSentryLinkHeader(linkHeader)
|
|
||||||
|
|
||||||
if (pagination.hasNextResults == true) {
|
|
||||||
console.log('fetching', pagination.next)
|
|
||||||
await fetchBatch(pagination.next)
|
|
||||||
}
|
|
||||||
console.log('no more results')
|
|
||||||
return { recordsUpdated }
|
|
||||||
|
|
||||||
}
|
|
||||||
function parseSentryLinkHeader(header: string): SentryPagination {
|
|
||||||
const links = header.split(',').map(part => part.trim())
|
|
||||||
|
|
||||||
let result = {} as SentryPagination
|
|
||||||
for (const link of links) {
|
|
||||||
const match = link.match(/<([^>]+)>;\s*rel="([^"]+)";\s*results="([^"]+)";\s*cursor="([^"]+)"/)
|
|
||||||
if (!match) continue
|
|
||||||
|
|
||||||
const [, url, rel, results] = match
|
|
||||||
|
|
||||||
if (rel === 'previous') {
|
|
||||||
result.previous = url
|
|
||||||
result.hasPreviousResults = results === 'true'
|
|
||||||
} else if (rel === 'next') {
|
|
||||||
result.next = url
|
|
||||||
result.hasNextResults = results === 'true'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
function sanitizeInput(data: Record<string, any>, allowedFields: string[]) {
|
|
||||||
return allowedFields.reduce((acc, key) => {
|
|
||||||
if (key in data) acc[key] = data[key]
|
|
||||||
return acc
|
|
||||||
}, {} as Record<string, any>)
|
|
||||||
}
|
|
||||||
|
|
||||||
function buildPaginationLinks(meta: { previousPageUrl: string, lastPage: number; currentPage: number; nextPageUrl: string }) {
|
|
||||||
const links = []
|
const links = []
|
||||||
|
|
||||||
// Previous
|
// Previous
|
||||||
links.push({
|
links.push({
|
||||||
url: meta.previousPageUrl,
|
url: meta.previousPageUrl,
|
||||||
label: '« Prev',
|
label: '« Prev',
|
||||||
active: false
|
active: false,
|
||||||
})
|
})
|
||||||
|
|
||||||
for (let page = 1; page <= meta.lastPage; page++) {
|
for (let page = 1; page <= meta.lastPage; page++) {
|
||||||
links.push({
|
links.push({
|
||||||
url: `/list?page=${page}`,
|
url: `/?page=${page}`,
|
||||||
label: page.toString(),
|
label: page.toString(),
|
||||||
active: page === meta.currentPage
|
active: page === meta.currentPage,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -209,62 +128,68 @@ function buildPaginationLinks(meta: { previousPageUrl: string, lastPage: number;
|
|||||||
links.push({
|
links.push({
|
||||||
url: meta.nextPageUrl,
|
url: meta.nextPageUrl,
|
||||||
label: 'Next »',
|
label: 'Next »',
|
||||||
active: false
|
active: false,
|
||||||
})
|
})
|
||||||
|
|
||||||
return links
|
return links
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getResults(){
|
function generateFakeSession() {
|
||||||
let results = await db.rawQuery(`
|
const uuid = faker.string.uuid()
|
||||||
SELECT
|
const browserName = faker.helpers.arrayElement(['Chrome', 'Firefox', 'Safari', 'Edge', 'Brave'])
|
||||||
u.display_name,
|
const deviceBrand = faker.helpers.arrayElement(['Apple', 'Samsung', 'Google'])
|
||||||
u.sessions,
|
const osName = faker.helpers.arrayElement(['iOS', 'Android', 'Windows', 'macOS'])
|
||||||
u.total_time_seconds,
|
const platform = faker.helpers.arrayElement(['Sentry', 'Datadog', 'New Relic', 'Rollbar'])
|
||||||
u.total_time_readable,
|
const finishedAt = new Date(Date.now() - faker.number.int({ min: 0, max: 60 * 60 * 1000 }))
|
||||||
u.average_session_time_readable,
|
const displayName = faker.internet.email()
|
||||||
u.average_time_seconds,
|
return {
|
||||||
r.id AS last_session_id,
|
activity: faker.number.int({ min: 1, max: 10 }),
|
||||||
r.finished_at AS last_session_time
|
browser: {
|
||||||
|
name: browserName,
|
||||||
FROM (
|
version: faker.system.semver(),
|
||||||
-- Aggregate sessions in the last 30 days
|
},
|
||||||
SELECT
|
count_dead_clicks: faker.number.int({ min: 0, max: 10 }),
|
||||||
"user" ->> 'display_name' AS display_name,
|
count_rage_clicks: faker.number.int({ min: 0, max: 5 }),
|
||||||
COUNT(duration) AS sessions,
|
count_errors: faker.number.int({ min: 0, max: 5 }),
|
||||||
SUM(duration) AS total_time_seconds,
|
count_segments: faker.number.int({ min: 0, max: 3 }),
|
||||||
AVG(duration) AS average_time_seconds,
|
count_urls: faker.number.int({ min: 1, max: 3 }),
|
||||||
CONCAT(
|
device: {
|
||||||
FLOOR(SUM(duration) / 86400), 'd ',
|
brand: deviceBrand,
|
||||||
FLOOR(MOD(SUM(duration), 86400) / 3600), 'h ',
|
family: deviceBrand === 'Apple' ? 'iPhone' : deviceBrand,
|
||||||
FLOOR(MOD(SUM(duration), 3600) / 60), 'm'
|
model: faker.string.numeric({ length: 2 }),
|
||||||
) AS total_time_readable,
|
name: `${deviceBrand} ${faker.string.alphanumeric({ length: 3 })}`,
|
||||||
CONCAT(
|
},
|
||||||
FLOOR(COUNT(duration) / 86400), 'd ',
|
dist: null,
|
||||||
FLOOR(MOD(COUNT(duration), 86400) / 3600), 'h ',
|
duration: faker.number.int({ min: 100, max: 1000 }),
|
||||||
FLOOR(MOD(COUNT(duration), 3600) / 60), 'm'
|
environment: faker.helpers.arrayElement(['production', 'staging', 'development']),
|
||||||
) AS average_session_time_readable
|
error_ids: [uuid],
|
||||||
FROM
|
finished_at: faker.date.between({ from: finishedAt, to: new Date() }).toISOString(),
|
||||||
replays
|
has_viewed: faker.datatype.boolean(),
|
||||||
WHERE
|
id: uuid,
|
||||||
finished_at >= NOW() - INTERVAL '30 days'
|
is_archived: faker.datatype.boolean() ? null : false,
|
||||||
GROUP BY
|
os: {
|
||||||
"user" ->> 'display_name'
|
name: osName,
|
||||||
) u
|
version: `${faker.number.int({ min: 10, max: 17 })}.${faker.number.int({ min: 0, max: 5 })}`,
|
||||||
|
},
|
||||||
-- LATERAL JOIN to get latest session (either within 30d or fallback to latest overall)
|
platform: platform,
|
||||||
JOIN LATERAL (
|
project_id: faker.string.numeric({ length: 6 }),
|
||||||
SELECT id, finished_at
|
releases: [`version@${faker.system.semver()}`],
|
||||||
FROM replays
|
sdk: {
|
||||||
WHERE "user" ->> 'display_name' = u.display_name
|
name: faker.hacker.noun(),
|
||||||
ORDER BY
|
version: faker.system.semver(),
|
||||||
CASE WHEN finished_at >= NOW() - INTERVAL '30 days' THEN 0 ELSE 1 END,
|
},
|
||||||
finished_at DESC
|
started_at: faker.date.recent().toISOString(),
|
||||||
LIMIT 1
|
tags: {
|
||||||
) r ON true
|
hello: ['world', faker.person.fullName()],
|
||||||
|
},
|
||||||
ORDER BY
|
trace_ids: [uuid],
|
||||||
u.total_time_seconds DESC;`
|
urls: [faker.internet.url()],
|
||||||
)
|
user: {
|
||||||
return results
|
display_name: displayName,
|
||||||
}
|
email: displayName,
|
||||||
|
id: faker.string.numeric({ length: 8 }),
|
||||||
|
ip: faker.internet.ip(),
|
||||||
|
username: faker.internet.username(),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -1,7 +1,85 @@
|
|||||||
import { DateTime } from 'luxon'
|
import { DateTime } from 'luxon'
|
||||||
import { BaseModel, column } from '@adonisjs/lucid/orm'
|
import { BaseModel, column } from '@adonisjs/lucid/orm'
|
||||||
|
import db from '@adonisjs/lucid/services/db'
|
||||||
|
import redis from '@adonisjs/redis/services/main'
|
||||||
|
|
||||||
export default class Replay extends BaseModel {
|
export default class Replay extends BaseModel {
|
||||||
|
public static async updateReplayStats() {
|
||||||
|
let results = await db.rawQuery(`
|
||||||
|
SELECT
|
||||||
|
u.display_name,
|
||||||
|
u.sessions,
|
||||||
|
u.total_time_seconds,
|
||||||
|
u.total_time_readable,
|
||||||
|
u.average_session_time_readable,
|
||||||
|
u.average_time_seconds,
|
||||||
|
r.id AS last_session_id,
|
||||||
|
r.finished_at AS last_session_time,
|
||||||
|
o.id AS oldest_session_id,
|
||||||
|
o.finished_at AS oldest_session_time
|
||||||
|
|
||||||
|
FROM (
|
||||||
|
-- Aggregate sessions in the last 30 days
|
||||||
|
SELECT
|
||||||
|
"user" ->> 'display_name' AS display_name,
|
||||||
|
COUNT(duration) AS sessions,
|
||||||
|
SUM(duration) AS total_time_seconds,
|
||||||
|
AVG(duration) AS average_time_seconds,
|
||||||
|
CONCAT(
|
||||||
|
FLOOR(SUM(duration) / 86400), 'd ',
|
||||||
|
FLOOR(MOD(SUM(duration), 86400) / 3600), 'h ',
|
||||||
|
FLOOR(MOD(SUM(duration), 3600) / 60), 'm'
|
||||||
|
) AS total_time_readable,
|
||||||
|
CONCAT(
|
||||||
|
FLOOR(COUNT(duration) / 86400), 'd ',
|
||||||
|
FLOOR(MOD(COUNT(duration), 86400) / 3600), 'h ',
|
||||||
|
FLOOR(MOD(COUNT(duration), 3600) / 60), 'm'
|
||||||
|
) AS average_session_time_readable
|
||||||
|
FROM
|
||||||
|
replays
|
||||||
|
WHERE
|
||||||
|
finished_at >= NOW() - INTERVAL '30 days'
|
||||||
|
AND "user" ->> 'display_name' LIKE '%@%'
|
||||||
|
AND "user" ->> 'display_name' !~ 'e2etesting|@paragontruss.com'
|
||||||
|
|
||||||
|
GROUP BY
|
||||||
|
"user" ->> 'display_name'
|
||||||
|
) u
|
||||||
|
|
||||||
|
-- LATERAL JOIN to get latest session (either within 30d or fallback to latest overall)
|
||||||
|
JOIN LATERAL (
|
||||||
|
SELECT id, finished_at
|
||||||
|
FROM replays
|
||||||
|
WHERE "user" ->> 'display_name' = u.display_name
|
||||||
|
AND "user" ->> 'display_name' LIKE '%@%'
|
||||||
|
AND "user" ->> 'display_name' !~ 'e2etesting|@paragontruss.com'
|
||||||
|
ORDER BY
|
||||||
|
CASE WHEN finished_at >= NOW() - INTERVAL '30 days' THEN 0 ELSE 1 END,
|
||||||
|
finished_at DESC
|
||||||
|
LIMIT 1
|
||||||
|
) r ON true
|
||||||
|
|
||||||
|
-- LATERAL JOIN to get the oldest session
|
||||||
|
JOIN LATERAL (
|
||||||
|
SELECT id, finished_at
|
||||||
|
FROM replays
|
||||||
|
WHERE "user" ->> 'display_name' = u.display_name
|
||||||
|
AND "user" ->> 'display_name' LIKE '%@%'
|
||||||
|
AND "user" ->> 'display_name' !~ 'e2etesting|@paragontruss.com'
|
||||||
|
ORDER BY finished_at ASC
|
||||||
|
LIMIT 1
|
||||||
|
) o ON true
|
||||||
|
|
||||||
|
ORDER BY
|
||||||
|
u.total_time_seconds DESC;
|
||||||
|
|
||||||
|
`)
|
||||||
|
const updatedVersion = await redis.incr('replays:stats:latest_version')
|
||||||
|
results.version = updatedVersion
|
||||||
|
results.updatedAt = Date.now()
|
||||||
|
await redis.set(`replays:stats:version:${updatedVersion}:results`, JSON.stringify(results))
|
||||||
|
return results
|
||||||
|
}
|
||||||
@column({ isPrimary: true })
|
@column({ isPrimary: true })
|
||||||
declare id: string
|
declare id: string
|
||||||
|
|
||||||
@ -12,14 +90,14 @@ export default class Replay extends BaseModel {
|
|||||||
prepare: (value) => {
|
prepare: (value) => {
|
||||||
// The values from sentry are just arrays so convert them to json
|
// The values from sentry are just arrays so convert them to json
|
||||||
return JSON.stringify(value)
|
return JSON.stringify(value)
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
declare trace_ids: string[]
|
declare trace_ids: string[]
|
||||||
|
|
||||||
@column({
|
@column({
|
||||||
prepare: (value) => {
|
prepare: (value) => {
|
||||||
return JSON.stringify(value)
|
return JSON.stringify(value)
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
declare error_ids: string[]
|
declare error_ids: string[]
|
||||||
|
|
||||||
@ -30,50 +108,44 @@ export default class Replay extends BaseModel {
|
|||||||
prepare: (value) => {
|
prepare: (value) => {
|
||||||
// The values from sentry are just arrays so convert them to json
|
// The values from sentry are just arrays so convert them to json
|
||||||
return JSON.stringify(value)
|
return JSON.stringify(value)
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
declare tags: string[]
|
declare tags: string[]
|
||||||
|
|
||||||
@column()
|
@column()
|
||||||
declare user: string[]
|
declare user: string[]
|
||||||
|
|
||||||
|
|
||||||
@column()
|
@column()
|
||||||
declare sdk: any
|
declare sdk: any
|
||||||
|
|
||||||
|
|
||||||
@column()
|
@column()
|
||||||
declare os: any
|
declare os: any
|
||||||
|
|
||||||
|
|
||||||
@column()
|
@column()
|
||||||
declare browser: any
|
declare browser: any
|
||||||
|
|
||||||
|
|
||||||
@column()
|
@column()
|
||||||
declare device: any
|
declare device: any
|
||||||
|
|
||||||
@column()
|
@column()
|
||||||
declare ota_updates: any
|
declare ota_updates: any
|
||||||
|
|
||||||
@column()
|
@column()
|
||||||
declare is_archived: boolean | null
|
declare is_archived: boolean | null
|
||||||
|
|
||||||
|
|
||||||
@column({
|
@column({
|
||||||
prepare: (value) => {
|
prepare: (value) => {
|
||||||
// The values from sentry are just arrays so convert them to json
|
// The values from sentry are just arrays so convert them to json
|
||||||
return JSON.stringify(value)
|
return JSON.stringify(value)
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
declare urls: any
|
declare urls: any
|
||||||
|
|
||||||
|
|
||||||
@column({
|
@column({
|
||||||
prepare: (value) => {
|
prepare: (value) => {
|
||||||
// The values from sentry are just arrays so convert them to json
|
// The values from sentry are just arrays so convert them to json
|
||||||
return JSON.stringify(value)
|
return JSON.stringify(value)
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
declare clicks: any
|
declare clicks: any
|
||||||
|
|
||||||
@ -92,7 +164,7 @@ export default class Replay extends BaseModel {
|
|||||||
@column.dateTime()
|
@column.dateTime()
|
||||||
declare finished_at: DateTime | null
|
declare finished_at: DateTime | null
|
||||||
|
|
||||||
@column.dateTime({serializeAs: 'started_at'})
|
@column.dateTime({ serializeAs: 'started_at' })
|
||||||
declare started_at: DateTime | null
|
declare started_at: DateTime | null
|
||||||
|
|
||||||
@column()
|
@column()
|
||||||
@ -110,12 +182,11 @@ export default class Replay extends BaseModel {
|
|||||||
@column()
|
@column()
|
||||||
declare platform: string | null
|
declare platform: string | null
|
||||||
|
|
||||||
|
|
||||||
@column({
|
@column({
|
||||||
prepare: (value) => {
|
prepare: (value) => {
|
||||||
// The values from sentry are just arrays so convert them to json
|
// The values from sentry are just arrays so convert them to json
|
||||||
return JSON.stringify(value)
|
return JSON.stringify(value)
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
declare releases: any
|
declare releases: any
|
||||||
|
|
||||||
|
@ -1,16 +1,14 @@
|
|||||||
---
|
---
|
||||||
name: sentry
|
|
||||||
services:
|
services:
|
||||||
scraper:
|
scraper:
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
target: dev-deps
|
target: develop
|
||||||
env_file: .env.develop
|
env_file: .env.develop
|
||||||
volumes:
|
volumes:
|
||||||
- ./:/app
|
- ./:/app
|
||||||
- node_modules:/app/node_modules
|
- node_modules:/app/node_modules
|
||||||
command: /bin/bash -c "echo 'hello' && node ace migration:run --force && node ace serve --watch"
|
command: /bin/sh -c "node ace migration:run --force && node ace serve --watch"
|
||||||
|
|
||||||
depends_on:
|
depends_on:
|
||||||
db:
|
db:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
77
compose.yml
Normal file
77
compose.yml
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
---
|
||||||
|
name: sentry
|
||||||
|
services:
|
||||||
|
reverse-proxy:
|
||||||
|
image: traefik:latest
|
||||||
|
command: --api.insecure=true --providers.docker
|
||||||
|
ports:
|
||||||
|
- 80:80
|
||||||
|
- 8080:8080
|
||||||
|
volumes:
|
||||||
|
- /var/run/docker.sock:/var/run/docker.sock
|
||||||
|
networks:
|
||||||
|
- traefik
|
||||||
|
scraper:
|
||||||
|
labels:
|
||||||
|
- 'traefik.enable=true'
|
||||||
|
- 'traefik.docker.network=sentry_traefik'
|
||||||
|
- 'traefik.http.routers.scraper.rule=Host(`sentry.docker.localhost`)'
|
||||||
|
- 'traefik.http.services.scraper.loadbalancer.server.port=3333'
|
||||||
|
- 'traefik.http.routers.scraper.entrypoints=http'
|
||||||
|
- 'traefik.http.routers.scraper.service=scraper'
|
||||||
|
networks:
|
||||||
|
- traefik
|
||||||
|
- redis
|
||||||
|
- database
|
||||||
|
depends_on:
|
||||||
|
db:
|
||||||
|
condition: service_healthy
|
||||||
|
db:
|
||||||
|
image: postgres:16
|
||||||
|
environment:
|
||||||
|
- POSTGRES_PASSWORD=password
|
||||||
|
healthcheck:
|
||||||
|
test: ['CMD-SHELL', 'pg_isready', '-d', 'postgres']
|
||||||
|
interval: 5s
|
||||||
|
timeout: 60s
|
||||||
|
retries: 5
|
||||||
|
start_period: 5s
|
||||||
|
networks:
|
||||||
|
- database
|
||||||
|
volumes:
|
||||||
|
- pg_data:/var/lib/postgresql/data
|
||||||
|
grafana:
|
||||||
|
image: grafana/grafana:latest
|
||||||
|
labels:
|
||||||
|
- 'traefik.enable=true'
|
||||||
|
- 'traefik.docker.network=sentry_traefik'
|
||||||
|
- 'traefik.http.routers.grafana.rule=Host(`grafana.docker.localhost`)'
|
||||||
|
- 'traefik.http.routers.grafana.entrypoints=http'
|
||||||
|
- 'traefik.http.services.grafana.loadbalancer.server.port=3000'
|
||||||
|
- 'traefik.http.routers.grafana.service=grafana'
|
||||||
|
networks:
|
||||||
|
- traefik
|
||||||
|
- database
|
||||||
|
healthcheck:
|
||||||
|
test:
|
||||||
|
[
|
||||||
|
'CMD-SHELL',
|
||||||
|
'wget --no-verbose --tries=1 --spider http://localhost:3000/api/health || exit 1',
|
||||||
|
]
|
||||||
|
interval: 10s
|
||||||
|
timeout: 30s
|
||||||
|
retries: 5
|
||||||
|
start_period: 30s
|
||||||
|
redis:
|
||||||
|
image: redis:latest
|
||||||
|
networks:
|
||||||
|
- redis
|
||||||
|
networks:
|
||||||
|
traefik:
|
||||||
|
driver: bridge
|
||||||
|
database:
|
||||||
|
driver: bridge
|
||||||
|
redis:
|
||||||
|
driver: bridge
|
||||||
|
volumes:
|
||||||
|
pg_data: {}
|
@ -33,4 +33,4 @@ export default redisConfig
|
|||||||
|
|
||||||
declare module '@adonisjs/redis/types' {
|
declare module '@adonisjs/redis/types' {
|
||||||
export interface RedisConnections extends InferConnections<typeof redisConfig> {}
|
export interface RedisConnections extends InferConnections<typeof redisConfig> {}
|
||||||
}
|
}
|
||||||
|
@ -1,65 +0,0 @@
|
|||||||
---
|
|
||||||
name: sentry
|
|
||||||
services:
|
|
||||||
reverse-proxy:
|
|
||||||
image: traefik:latest
|
|
||||||
command: --api.insecure=true --providers.docker
|
|
||||||
ports:
|
|
||||||
- 80:80
|
|
||||||
- 8080:8080
|
|
||||||
volumes:
|
|
||||||
- /var/run/docker.sock:/var/run/docker.sock
|
|
||||||
networks:
|
|
||||||
- traefik
|
|
||||||
scraper:
|
|
||||||
labels:
|
|
||||||
- "traefik.enable=true"
|
|
||||||
- "traefik.docker.network=sentry_traefik"
|
|
||||||
- "traefik.http.routers.scraper.rule=Host(`sentry.docker.localhost`)"
|
|
||||||
- "traefik.http.services.scraper.loadbalancer.server.port=3333"
|
|
||||||
- "traefik.http.routers.scraper.entrypoints=http"
|
|
||||||
- "traefik.http.routers.scraper.service=scraper"
|
|
||||||
networks:
|
|
||||||
- traefik
|
|
||||||
- redis
|
|
||||||
- database
|
|
||||||
db:
|
|
||||||
image: postgres:16
|
|
||||||
environment:
|
|
||||||
- POSTGRES_PASSWORD=password
|
|
||||||
healthcheck:
|
|
||||||
test: ["CMD-SHELL", "pg_isready", "-d", "postgres"]
|
|
||||||
interval: 5s
|
|
||||||
timeout: 60s
|
|
||||||
retries: 5
|
|
||||||
start_period: 5s
|
|
||||||
networks:
|
|
||||||
- database
|
|
||||||
volumes:
|
|
||||||
- pg_data:/var/lib/postgresql/data
|
|
||||||
grafana:
|
|
||||||
image: grafana/grafana:latest
|
|
||||||
labels:
|
|
||||||
- "traefik.enable=true"
|
|
||||||
- "traefik.docker.network=sentry_traefik"
|
|
||||||
- "traefik.http.routers.grafana.rule=Host(`grafana.docker.localhost`)"
|
|
||||||
- "traefik.http.routers.grafana.entrypoints=http"
|
|
||||||
- "traefik.http.services.grafana.loadbalancer.server.port=3000"
|
|
||||||
- "traefik.http.routers.grafana.service=grafana"
|
|
||||||
|
|
||||||
networks:
|
|
||||||
- traefik
|
|
||||||
- database
|
|
||||||
redis:
|
|
||||||
image: redis:latest
|
|
||||||
networks:
|
|
||||||
- redis
|
|
||||||
networks:
|
|
||||||
traefik:
|
|
||||||
driver: bridge
|
|
||||||
database:
|
|
||||||
driver: bridge
|
|
||||||
redis:
|
|
||||||
driver: bridge
|
|
||||||
volumes:
|
|
||||||
pg_data: {}
|
|
BIN
docs/assets/homepage.jpg
Normal file
BIN
docs/assets/homepage.jpg
Normal file
Binary file not shown.
After Width: | Height: | Size: 26 KiB |
@ -4,10 +4,10 @@
|
|||||||
import '../css/app.css'
|
import '../css/app.css'
|
||||||
import { createSSRApp, h } from 'vue'
|
import { createSSRApp, h } from 'vue'
|
||||||
import type { DefineComponent } from 'vue'
|
import type { DefineComponent } from 'vue'
|
||||||
import { createInertiaApp } from '@inertiajs/vue3'
|
import { createInertiaApp, Link } from '@inertiajs/vue3'
|
||||||
import { resolvePageComponent } from '@adonisjs/inertia/helpers'
|
import { resolvePageComponent } from '@adonisjs/inertia/helpers'
|
||||||
|
|
||||||
const appName = import.meta.env.VITE_APP_NAME || 'AdonisJS'
|
const appName = import.meta.env.VITE_APP_NAME || 'AdonisJS'
|
||||||
|
Vue.component('inertia-link', Link)
|
||||||
|
|
||||||
createInertiaApp({
|
createInertiaApp({
|
||||||
progress: { color: '#5468FF' },
|
progress: { color: '#5468FF' },
|
||||||
|
@ -9,7 +9,6 @@
|
|||||||
<th class="p-2">Email</th>
|
<th class="p-2">Email</th>
|
||||||
<th class="p-2">Date</th>
|
<th class="p-2">Date</th>
|
||||||
<th class="p-2">Location</th>
|
<th class="p-2">Location</th>
|
||||||
|
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
@ -17,28 +16,33 @@
|
|||||||
<td class="p-2">{{ replay.id }}</td>
|
<td class="p-2">{{ replay.id }}</td>
|
||||||
<td class="p-2">{{ replay.user.email ?? replay.user.display_name }}</td>
|
<td class="p-2">{{ replay.user.email ?? replay.user.display_name }}</td>
|
||||||
<td class="p-2">{{ replay.finished_at }}</td>
|
<td class="p-2">{{ replay.finished_at }}</td>
|
||||||
<td class="p-2">{{ replay.user.geo ? `${replay.user.geo.city} ${replay.user.geo.subdivision}, ${replay.user.geo.region}` : 'unknown' }}</td>
|
<td class="p-2">
|
||||||
|
{{
|
||||||
|
replay.user.geo
|
||||||
|
? `${replay.user.geo.city} ${replay.user.geo.subdivision}, ${replay.user.geo.region}`
|
||||||
|
: 'unknown'
|
||||||
|
}}
|
||||||
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
<!-- Pagination -->
|
<!-- Pagination -->
|
||||||
<div class="mt-4 flex flex-wrap items-center gap-2" v-if="data.meta && data.meta.links && data.meta.links.length > 1">
|
<div
|
||||||
|
class="mt-4 flex flex-wrap items-center gap-2"
|
||||||
|
v-if="data.meta && data.meta.links && data.meta.links.length > 1"
|
||||||
|
>
|
||||||
<!-- First -->
|
<!-- First -->
|
||||||
<Link
|
<Link
|
||||||
v-if="firstPageUrl && !isFirstPage"
|
v-if="firstPageUrl && !isFirstPage"
|
||||||
:href="firstPageUrl"
|
:href="firstPageUrl"
|
||||||
class="px-3 py-1 border rounded text-sm"
|
class="px-3 py-1 border rounded text-sm"
|
||||||
>
|
>
|
||||||
« First
|
« First
|
||||||
</Link>
|
</Link>
|
||||||
|
|
||||||
<!-- Previous -->
|
<!-- Previous -->
|
||||||
<Link
|
<Link v-if="prevPageUrl" :href="prevPageUrl" class="px-3 py-1 border rounded text-sm">
|
||||||
v-if="prevPageUrl"
|
|
||||||
:href="prevPageUrl"
|
|
||||||
class="px-3 py-1 border rounded text-sm"
|
|
||||||
>
|
|
||||||
‹ Prev
|
‹ Prev
|
||||||
</Link>
|
</Link>
|
||||||
|
|
||||||
@ -48,9 +52,9 @@
|
|||||||
:is="link.url ? Link : 'span'"
|
:is="link.url ? Link : 'span'"
|
||||||
:href="link.url"
|
:href="link.url"
|
||||||
class="px-3 py-1 border rounded text-sm"
|
class="px-3 py-1 border rounded text-sm"
|
||||||
:class="{
|
:class="{
|
||||||
'font-bold bg-gray-300': link.active,
|
'font-bold bg-gray-300': link.active,
|
||||||
'text-gray-400 cursor-not-allowed': !link.url
|
'text-gray-400 cursor-not-allowed': !link.url,
|
||||||
}"
|
}"
|
||||||
>
|
>
|
||||||
<span v-html="link.label" />
|
<span v-html="link.label" />
|
||||||
@ -58,18 +62,14 @@
|
|||||||
</template>
|
</template>
|
||||||
|
|
||||||
<!-- Next -->
|
<!-- Next -->
|
||||||
<Link
|
<Link v-if="nextPageUrl" :href="nextPageUrl" class="px-3 py-1 border rounded text-sm">
|
||||||
v-if="nextPageUrl"
|
|
||||||
:href="nextPageUrl"
|
|
||||||
class="px-3 py-1 border rounded text-sm"
|
|
||||||
>
|
|
||||||
Next ›
|
Next ›
|
||||||
</Link>
|
</Link>
|
||||||
|
|
||||||
<!-- Last -->
|
<!-- Last -->
|
||||||
<Link
|
<Link
|
||||||
v-if="lastPageUrl && !isLastPage"
|
v-if="lastPageUrl && !isLastPage"
|
||||||
:href="lastPageUrl"
|
:href="lastPageUrl"
|
||||||
class="px-3 py-1 border rounded text-sm"
|
class="px-3 py-1 border rounded text-sm"
|
||||||
>
|
>
|
||||||
Last »
|
Last »
|
||||||
@ -83,12 +83,12 @@ import { computed } from 'vue'
|
|||||||
import { Link } from '@inertiajs/vue3'
|
import { Link } from '@inertiajs/vue3'
|
||||||
|
|
||||||
const props = defineProps({
|
const props = defineProps({
|
||||||
data: Object
|
data: Object,
|
||||||
})
|
})
|
||||||
|
|
||||||
// Core pagination values
|
// Core pagination values
|
||||||
const links = computed(() => props.data.meta.links || [])
|
const links = computed(() => props.data.meta.links || [])
|
||||||
const currentIndex = computed(() => links.value.findIndex(link => link.active))
|
const currentIndex = computed(() => links.value.findIndex((link) => link.active))
|
||||||
|
|
||||||
const maxVisible = 10
|
const maxVisible = 10
|
||||||
const half = Math.floor(maxVisible / 2)
|
const half = Math.floor(maxVisible / 2)
|
||||||
@ -115,5 +115,7 @@ const nextPageUrl = computed(() => links.value[currentIndex.value + 1]?.url)
|
|||||||
const lastPageUrl = computed(() => links.value[links.value.length - 2]?.url) // last item is "Next »", second-last is last numbered
|
const lastPageUrl = computed(() => links.value[links.value.length - 2]?.url) // last item is "Next »", second-last is last numbered
|
||||||
|
|
||||||
const isFirstPage = computed(() => links.value[currentIndex.value]?.label === '1')
|
const isFirstPage = computed(() => links.value[currentIndex.value]?.label === '1')
|
||||||
const isLastPage = computed(() => links.value[currentIndex.value]?.label === props.data.meta.last_page)
|
const isLastPage = computed(
|
||||||
|
() => links.value[currentIndex.value]?.label === props.data.meta.last_page
|
||||||
|
)
|
||||||
</script>
|
</script>
|
||||||
|
1
package-lock.json
generated
1
package-lock.json
generated
@ -34,6 +34,7 @@
|
|||||||
"@adonisjs/eslint-config": "^2.0.0",
|
"@adonisjs/eslint-config": "^2.0.0",
|
||||||
"@adonisjs/prettier-config": "^1.4.4",
|
"@adonisjs/prettier-config": "^1.4.4",
|
||||||
"@adonisjs/tsconfig": "^1.4.0",
|
"@adonisjs/tsconfig": "^1.4.0",
|
||||||
|
"@faker-js/faker": "^9.8.0",
|
||||||
"@japa/assert": "^4.0.1",
|
"@japa/assert": "^4.0.1",
|
||||||
"@japa/plugin-adonisjs": "^4.0.0",
|
"@japa/plugin-adonisjs": "^4.0.0",
|
||||||
"@japa/runner": "^4.2.0",
|
"@japa/runner": "^4.2.0",
|
||||||
|
@ -36,6 +36,7 @@
|
|||||||
"@adonisjs/eslint-config": "^2.0.0",
|
"@adonisjs/eslint-config": "^2.0.0",
|
||||||
"@adonisjs/prettier-config": "^1.4.4",
|
"@adonisjs/prettier-config": "^1.4.4",
|
||||||
"@adonisjs/tsconfig": "^1.4.0",
|
"@adonisjs/tsconfig": "^1.4.0",
|
||||||
|
"@faker-js/faker": "^9.8.0",
|
||||||
"@japa/assert": "^4.0.1",
|
"@japa/assert": "^4.0.1",
|
||||||
"@japa/plugin-adonisjs": "^4.0.0",
|
"@japa/plugin-adonisjs": "^4.0.0",
|
||||||
"@japa/runner": "^4.2.0",
|
"@japa/runner": "^4.2.0",
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||||
|
|
||||||
<title inertia>
|
<title inertia>
|
||||||
AdonisJS x Inertia x VueJS
|
Sentry Toolkit
|
||||||
</title>
|
</title>
|
||||||
|
|
||||||
<link rel="preconnect" href="https://fonts.bunny.net" />
|
<link rel="preconnect" href="https://fonts.bunny.net" />
|
||||||
|
@ -34,5 +34,7 @@ export default await Env.create(new URL('../', import.meta.url), {
|
|||||||
PG_USER: Env.schema.string(),
|
PG_USER: Env.schema.string(),
|
||||||
PG_PASSWORD: Env.schema.string(),
|
PG_PASSWORD: Env.schema.string(),
|
||||||
|
|
||||||
WEBHOOK_URL: Env.schema.string()
|
WEBHOOK_URL: Env.schema.string.optional(),
|
||||||
|
|
||||||
|
QUERY_FILTER: Env.schema.string(),
|
||||||
})
|
})
|
||||||
|
@ -9,8 +9,7 @@
|
|||||||
|
|
||||||
import ReplaysController from '#controllers/replays_controller'
|
import ReplaysController from '#controllers/replays_controller'
|
||||||
import router from '@adonisjs/core/services/router'
|
import router from '@adonisjs/core/services/router'
|
||||||
router.on('/').renderInertia('home')
|
router.get('/', [ReplaysController, 'home'])
|
||||||
router.get('/replays', [ReplaysController, 'index'])
|
router.get('/replays', [ReplaysController, 'index'])
|
||||||
router.get('/list', [ReplaysController, 'list'
|
router.get('/stats', [ReplaysController, 'stats'])
|
||||||
])
|
router.get('/faker', [ReplaysController, 'faker'])
|
||||||
router.get('/stats', [ReplaysController, 'stats'])
|
|
||||||
|
Reference in New Issue
Block a user