mirror of
https://github.com/sourcebot-dev/sourcebot.git
synced 2025-12-11 20:05:25 +00:00
experiment with writing logs to DB
This commit is contained in:
parent
963f6fd69e
commit
bbc9e815c8
5 changed files with 174 additions and 74 deletions
|
|
@ -1,8 +1,8 @@
|
||||||
import { createBullBoard } from '@bull-board/api';
|
import { createBullBoard } from '@bull-board/api';
|
||||||
import { ExpressAdapter } from '@bull-board/express';
|
import { ExpressAdapter } from '@bull-board/express';
|
||||||
import * as Sentry from '@sentry/node';
|
import * as Sentry from '@sentry/node';
|
||||||
import { PrismaClient, Repo, RepoIndexingJobStatus } from "@sourcebot/db";
|
import { Prisma, PrismaClient, Repo, RepoIndexingJobStatus } from "@sourcebot/db";
|
||||||
import { createLogger } from "@sourcebot/logger";
|
import { createLogger, Logger, Transport, TransportStreamOptions } from "@sourcebot/logger";
|
||||||
import express from 'express';
|
import express from 'express';
|
||||||
import { BullBoardGroupMQAdapter, Job, Queue, ReservedJob, Worker } from "groupmq";
|
import { BullBoardGroupMQAdapter, Job, Queue, ReservedJob, Worker } from "groupmq";
|
||||||
import { Redis } from 'ioredis';
|
import { Redis } from 'ioredis';
|
||||||
|
|
@ -12,7 +12,87 @@ import { existsSync } from 'fs';
|
||||||
import { cloneRepository, fetchRepository, unsetGitConfig, upsertGitConfig } from './git.js';
|
import { cloneRepository, fetchRepository, unsetGitConfig, upsertGitConfig } from './git.js';
|
||||||
import { indexGitRepository } from './zoekt.js';
|
import { indexGitRepository } from './zoekt.js';
|
||||||
|
|
||||||
const logger = createLogger('index-syncer');
|
interface LogEntry {
|
||||||
|
message: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface DatabaseTransportOptions extends TransportStreamOptions {
|
||||||
|
writer: (logs: LogEntry[]) => Promise<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class DatabaseTransport extends Transport {
|
||||||
|
private logs: LogEntry[] = [];
|
||||||
|
private writer: (logs: LogEntry[]) => Promise<void>;
|
||||||
|
|
||||||
|
constructor(opts: DatabaseTransportOptions) {
|
||||||
|
super(opts);
|
||||||
|
this.writer = opts.writer;
|
||||||
|
}
|
||||||
|
|
||||||
|
log(info: any, callback: () => void) {
|
||||||
|
setImmediate(() => {
|
||||||
|
this.emit('logged', info);
|
||||||
|
});
|
||||||
|
|
||||||
|
// Capture structured log data
|
||||||
|
const logEntry: LogEntry = {
|
||||||
|
// timestamp: info.timestamp,
|
||||||
|
// level: info.level,
|
||||||
|
message: info.message,
|
||||||
|
// label: info.label,
|
||||||
|
// stack: info.stack,
|
||||||
|
// metadata: info.metadata || {},
|
||||||
|
// ...info // Include any additional fields
|
||||||
|
};
|
||||||
|
|
||||||
|
this.logs.push(logEntry);
|
||||||
|
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
|
||||||
|
async flush() {
|
||||||
|
if (this.logs.length > 0) {
|
||||||
|
await this.writer(this.logs);
|
||||||
|
this.logs = [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
const useScopedLogger = async (jobId: string, db: PrismaClient, cb: (logger: Logger) => Promise<void>) => {
|
||||||
|
const transport = new DatabaseTransport({
|
||||||
|
writer: async (logs) => {
|
||||||
|
try {
|
||||||
|
const existingLogs = await db.repoIndexingJob.findUnique({
|
||||||
|
where: { id: jobId },
|
||||||
|
select: { logs: true }
|
||||||
|
});
|
||||||
|
|
||||||
|
await db.repoIndexingJob.update({
|
||||||
|
where: { id: jobId },
|
||||||
|
data: {
|
||||||
|
logs: [
|
||||||
|
...(existingLogs?.logs as unknown as LogEntry[] ?? []),
|
||||||
|
...logs,
|
||||||
|
] as unknown as Prisma.InputJsonValue,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error writing logs for job ${jobId}.`, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const logger = createLogger('index-syncer', [
|
||||||
|
transport,
|
||||||
|
]);
|
||||||
|
|
||||||
|
try {
|
||||||
|
await cb(logger);
|
||||||
|
} finally {
|
||||||
|
await transport.flush();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
type IndexSyncJob = {
|
type IndexSyncJob = {
|
||||||
jobId: string;
|
jobId: string;
|
||||||
|
|
@ -24,6 +104,7 @@ export class IndexSyncer {
|
||||||
private interval?: NodeJS.Timeout;
|
private interval?: NodeJS.Timeout;
|
||||||
private queue: Queue<IndexSyncJob>;
|
private queue: Queue<IndexSyncJob>;
|
||||||
private worker: Worker<IndexSyncJob>;
|
private worker: Worker<IndexSyncJob>;
|
||||||
|
private globalLogger: Logger;
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
private db: PrismaClient,
|
private db: PrismaClient,
|
||||||
|
|
@ -31,6 +112,7 @@ export class IndexSyncer {
|
||||||
redis: Redis,
|
redis: Redis,
|
||||||
private ctx: AppContext,
|
private ctx: AppContext,
|
||||||
) {
|
) {
|
||||||
|
this.globalLogger = createLogger('index-syncer');
|
||||||
this.queue = new Queue<IndexSyncJob>({
|
this.queue = new Queue<IndexSyncJob>({
|
||||||
redis,
|
redis,
|
||||||
namespace: 'index-sync-queue',
|
namespace: 'index-sync-queue',
|
||||||
|
|
@ -51,7 +133,7 @@ export class IndexSyncer {
|
||||||
this.worker.on('stalled', this.onJobStalled.bind(this));
|
this.worker.on('stalled', this.onJobStalled.bind(this));
|
||||||
this.worker.on('error', async (error) => {
|
this.worker.on('error', async (error) => {
|
||||||
Sentry.captureException(error);
|
Sentry.captureException(error);
|
||||||
logger.error(`Index syncer worker error.`, error);
|
this.globalLogger.error(`Index syncer worker error.`, error);
|
||||||
});
|
});
|
||||||
|
|
||||||
// @nocheckin
|
// @nocheckin
|
||||||
|
|
@ -151,32 +233,84 @@ export class IndexSyncer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async runJob(job: ReservedJob<IndexSyncJob>) {
|
private runJob = async (job: ReservedJob<IndexSyncJob>) =>
|
||||||
const id = job.data.jobId;
|
useScopedLogger(job.data.jobId, this.db, async (logger) => {
|
||||||
const { repo } = await this.db.repoIndexingJob.update({
|
const id = job.data.jobId;
|
||||||
where: {
|
|
||||||
id,
|
const { repo } = await this.db.repoIndexingJob.update({
|
||||||
},
|
where: {
|
||||||
data: {
|
id,
|
||||||
status: RepoIndexingJobStatus.IN_PROGRESS,
|
},
|
||||||
},
|
data: {
|
||||||
select: {
|
status: RepoIndexingJobStatus.IN_PROGRESS,
|
||||||
repo: {
|
},
|
||||||
include: {
|
select: {
|
||||||
connections: {
|
repo: {
|
||||||
include: {
|
include: {
|
||||||
connection: true,
|
connections: {
|
||||||
|
include: {
|
||||||
|
connection: true,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
});
|
||||||
});
|
|
||||||
|
|
||||||
await this.syncGitRepository(repo);
|
await this._syncGitRepository(repo, logger);
|
||||||
}
|
})
|
||||||
|
|
||||||
private async syncGitRepository(repo: RepoWithConnections) {
|
private onJobCompleted = async (job: Job<IndexSyncJob>) =>
|
||||||
|
useScopedLogger(job.data.jobId, this.db, async (logger) => {
|
||||||
|
const { repo } = await this.db.repoIndexingJob.update({
|
||||||
|
where: { id: job.data.jobId },
|
||||||
|
data: {
|
||||||
|
status: RepoIndexingJobStatus.COMPLETED,
|
||||||
|
repo: {
|
||||||
|
update: {
|
||||||
|
indexedAt: new Date(),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
completedAt: new Date(),
|
||||||
|
},
|
||||||
|
select: { repo: true }
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.info(`Completed index job ${job.data.jobId} for repo ${repo.name}`);
|
||||||
|
})
|
||||||
|
|
||||||
|
private onJobFailed = (job: Job<IndexSyncJob>) =>
|
||||||
|
useScopedLogger(job.data.jobId, this.db, async (logger) => {
|
||||||
|
const { repo } = await this.db.repoIndexingJob.update({
|
||||||
|
where: { id: job.data.jobId },
|
||||||
|
data: {
|
||||||
|
status: RepoIndexingJobStatus.FAILED,
|
||||||
|
completedAt: new Date(),
|
||||||
|
errorMessage: job.failedReason,
|
||||||
|
},
|
||||||
|
select: { repo: true }
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.error(`Failed index job ${job.data.jobId} for repo ${repo.name}`);
|
||||||
|
})
|
||||||
|
|
||||||
|
private onJobStalled = (jobId: string) =>
|
||||||
|
useScopedLogger(jobId, this.db, async (logger) => {
|
||||||
|
const { repo } = await this.db.repoIndexingJob.update({
|
||||||
|
where: { id: jobId },
|
||||||
|
data: {
|
||||||
|
status: RepoIndexingJobStatus.FAILED,
|
||||||
|
completedAt: new Date(),
|
||||||
|
errorMessage: 'Job stalled',
|
||||||
|
},
|
||||||
|
select: { repo: true }
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.error(`Job ${jobId} stalled for repo ${repo.name}`);
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
private _syncGitRepository = async (repo: RepoWithConnections, logger: Logger) => {
|
||||||
const { path: repoPath, isReadOnly } = getRepoPath(repo, this.ctx);
|
const { path: repoPath, isReadOnly } = getRepoPath(repo, this.ctx);
|
||||||
|
|
||||||
const metadata = repoMetadataSchema.parse(repo.metadata);
|
const metadata = repoMetadataSchema.parse(repo.metadata);
|
||||||
|
|
@ -238,51 +372,6 @@ export class IndexSyncer {
|
||||||
logger.info(`Indexed ${repo.displayName} in ${indexDuration_s}s`);
|
logger.info(`Indexed ${repo.displayName} in ${indexDuration_s}s`);
|
||||||
}
|
}
|
||||||
|
|
||||||
private async onJobCompleted(job: Job<IndexSyncJob>) {
|
|
||||||
const { repo } = await this.db.repoIndexingJob.update({
|
|
||||||
where: { id: job.data.jobId },
|
|
||||||
data: {
|
|
||||||
status: RepoIndexingJobStatus.COMPLETED,
|
|
||||||
repo: {
|
|
||||||
update: {
|
|
||||||
indexedAt: new Date(),
|
|
||||||
}
|
|
||||||
},
|
|
||||||
completedAt: new Date(),
|
|
||||||
},
|
|
||||||
select: { repo: true }
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.info(`Completed index job ${job.data.jobId} for repo ${repo.name}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
private async onJobFailed(job: Job<IndexSyncJob>) {
|
|
||||||
const { repo } = await this.db.repoIndexingJob.update({
|
|
||||||
where: { id: job.data.jobId },
|
|
||||||
data: {
|
|
||||||
status: RepoIndexingJobStatus.FAILED,
|
|
||||||
completedAt: new Date(),
|
|
||||||
errorMessage: job.failedReason,
|
|
||||||
},
|
|
||||||
select: { repo: true}
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.error(`Failed index job ${job.data.jobId} for repo ${repo.name}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
private async onJobStalled(jobId: string) {
|
|
||||||
const { repo } = await this.db.repoIndexingJob.update({
|
|
||||||
where: { id: jobId },
|
|
||||||
data: {
|
|
||||||
status: RepoIndexingJobStatus.FAILED,
|
|
||||||
completedAt: new Date(),
|
|
||||||
errorMessage: 'Job stalled',
|
|
||||||
},
|
|
||||||
select: { repo: true }
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.error(`Job ${jobId} stalled for repo ${repo.name}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
public dispose() {
|
public dispose() {
|
||||||
if (this.interval) {
|
if (this.interval) {
|
||||||
|
|
|
||||||
|
|
@ -91,6 +91,7 @@ model RepoIndexingJob {
|
||||||
completedAt DateTime?
|
completedAt DateTime?
|
||||||
|
|
||||||
errorMessage String?
|
errorMessage String?
|
||||||
|
logs Json?
|
||||||
|
|
||||||
repo Repo @relation(fields: [repoId], references: [id], onDelete: Cascade)
|
repo Repo @relation(fields: [repoId], references: [id], onDelete: Cascade)
|
||||||
repoId Int
|
repoId Int
|
||||||
|
|
|
||||||
|
|
@ -15,6 +15,7 @@
|
||||||
"dotenv": "^16.4.5",
|
"dotenv": "^16.4.5",
|
||||||
"triple-beam": "^1.4.1",
|
"triple-beam": "^1.4.1",
|
||||||
"winston": "^3.15.0",
|
"winston": "^3.15.0",
|
||||||
|
"winston-transport": "^4.9.0",
|
||||||
"zod": "^3.24.3"
|
"zod": "^3.24.3"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
import winston, { format } from 'winston';
|
import winston, { format, Logger } from 'winston';
|
||||||
|
import Transport, { TransportStreamOptions } from 'winston-transport';
|
||||||
import { Logtail } from '@logtail/node';
|
import { Logtail } from '@logtail/node';
|
||||||
import { LogtailTransport } from '@logtail/winston';
|
import { LogtailTransport } from '@logtail/winston';
|
||||||
import { MESSAGE } from 'triple-beam';
|
import { MESSAGE } from 'triple-beam';
|
||||||
|
|
@ -40,7 +41,7 @@ const humanReadableFormat = printf(({ level, message, timestamp, stack, label: _
|
||||||
return `${timestamp} ${level}: ${label}${message}`;
|
return `${timestamp} ${level}: ${label}${message}`;
|
||||||
});
|
});
|
||||||
|
|
||||||
const createLogger = (label: string) => {
|
const createLogger = (label: string, transports: Transport[] = []) => {
|
||||||
const isStructuredLoggingEnabled = env.SOURCEBOT_STRUCTURED_LOGGING_ENABLED === 'true';
|
const isStructuredLoggingEnabled = env.SOURCEBOT_STRUCTURED_LOGGING_ENABLED === 'true';
|
||||||
|
|
||||||
return winston.createLogger({
|
return winston.createLogger({
|
||||||
|
|
@ -78,10 +79,17 @@ const createLogger = (label: string) => {
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
] : []),
|
] : []),
|
||||||
|
...transports,
|
||||||
]
|
]
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
export {
|
export {
|
||||||
createLogger
|
createLogger,
|
||||||
|
Transport,
|
||||||
|
Logger
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export type {
|
||||||
|
TransportStreamOptions,
|
||||||
|
}
|
||||||
|
|
@ -7684,6 +7684,7 @@ __metadata:
|
||||||
triple-beam: "npm:^1.4.1"
|
triple-beam: "npm:^1.4.1"
|
||||||
typescript: "npm:^5.7.3"
|
typescript: "npm:^5.7.3"
|
||||||
winston: "npm:^3.15.0"
|
winston: "npm:^3.15.0"
|
||||||
|
winston-transport: "npm:^4.9.0"
|
||||||
zod: "npm:^3.24.3"
|
zod: "npm:^3.24.3"
|
||||||
languageName: unknown
|
languageName: unknown
|
||||||
linkType: soft
|
linkType: soft
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue