mirror of
https://github.com/sourcebot-dev/sourcebot.git
synced 2025-12-12 12:25:22 +00:00
Add support for structured logs (#323)
* wip on refactoring docs * wip * initial structured logs impl * structured log docs * create logger package * add news entry for structured logging * add logger package to dockerfile and cleanup * add gh workflow for catching broken links * further wip * fix * further wip on docs * review feedback * remove logger dep from mcp package * fix build errors * add back auth_url warning * fix sidebar title consistency --------- Co-authored-by: bkellam <bshizzle1234@gmail.com>
This commit is contained in:
parent
82a786a1d4
commit
3b36ffa17e
57 changed files with 490 additions and 222 deletions
|
|
@ -42,11 +42,13 @@ COPY ./packages/db ./packages/db
|
||||||
COPY ./packages/schemas ./packages/schemas
|
COPY ./packages/schemas ./packages/schemas
|
||||||
COPY ./packages/crypto ./packages/crypto
|
COPY ./packages/crypto ./packages/crypto
|
||||||
COPY ./packages/error ./packages/error
|
COPY ./packages/error ./packages/error
|
||||||
|
COPY ./packages/logger ./packages/logger
|
||||||
|
|
||||||
RUN yarn workspace @sourcebot/db install
|
RUN yarn workspace @sourcebot/db install
|
||||||
RUN yarn workspace @sourcebot/schemas install
|
RUN yarn workspace @sourcebot/schemas install
|
||||||
RUN yarn workspace @sourcebot/crypto install
|
RUN yarn workspace @sourcebot/crypto install
|
||||||
RUN yarn workspace @sourcebot/error install
|
RUN yarn workspace @sourcebot/error install
|
||||||
|
RUN yarn workspace @sourcebot/logger install
|
||||||
# ------------------------------------
|
# ------------------------------------
|
||||||
|
|
||||||
# ------ Build Web ------
|
# ------ Build Web ------
|
||||||
|
|
@ -89,6 +91,7 @@ COPY --from=shared-libs-builder /app/packages/db ./packages/db
|
||||||
COPY --from=shared-libs-builder /app/packages/schemas ./packages/schemas
|
COPY --from=shared-libs-builder /app/packages/schemas ./packages/schemas
|
||||||
COPY --from=shared-libs-builder /app/packages/crypto ./packages/crypto
|
COPY --from=shared-libs-builder /app/packages/crypto ./packages/crypto
|
||||||
COPY --from=shared-libs-builder /app/packages/error ./packages/error
|
COPY --from=shared-libs-builder /app/packages/error ./packages/error
|
||||||
|
COPY --from=shared-libs-builder /app/packages/logger ./packages/logger
|
||||||
|
|
||||||
# Fixes arm64 timeouts
|
# Fixes arm64 timeouts
|
||||||
RUN yarn workspace @sourcebot/web install
|
RUN yarn workspace @sourcebot/web install
|
||||||
|
|
@ -128,6 +131,7 @@ COPY --from=shared-libs-builder /app/packages/db ./packages/db
|
||||||
COPY --from=shared-libs-builder /app/packages/schemas ./packages/schemas
|
COPY --from=shared-libs-builder /app/packages/schemas ./packages/schemas
|
||||||
COPY --from=shared-libs-builder /app/packages/crypto ./packages/crypto
|
COPY --from=shared-libs-builder /app/packages/crypto ./packages/crypto
|
||||||
COPY --from=shared-libs-builder /app/packages/error ./packages/error
|
COPY --from=shared-libs-builder /app/packages/error ./packages/error
|
||||||
|
COPY --from=shared-libs-builder /app/packages/logger ./packages/logger
|
||||||
RUN yarn workspace @sourcebot/backend install
|
RUN yarn workspace @sourcebot/backend install
|
||||||
RUN yarn workspace @sourcebot/backend build
|
RUN yarn workspace @sourcebot/backend build
|
||||||
|
|
||||||
|
|
@ -209,6 +213,7 @@ COPY --from=shared-libs-builder /app/packages/db ./packages/db
|
||||||
COPY --from=shared-libs-builder /app/packages/schemas ./packages/schemas
|
COPY --from=shared-libs-builder /app/packages/schemas ./packages/schemas
|
||||||
COPY --from=shared-libs-builder /app/packages/crypto ./packages/crypto
|
COPY --from=shared-libs-builder /app/packages/crypto ./packages/crypto
|
||||||
COPY --from=shared-libs-builder /app/packages/error ./packages/error
|
COPY --from=shared-libs-builder /app/packages/error ./packages/error
|
||||||
|
COPY --from=shared-libs-builder /app/packages/logger ./packages/logger
|
||||||
|
|
||||||
# Configure dependencies
|
# Configure dependencies
|
||||||
RUN apk add --no-cache git ca-certificates bind-tools tini jansson wget supervisor uuidgen curl perl jq redis postgresql postgresql-contrib openssl util-linux unzip
|
RUN apk add --no-cache git ca-certificates bind-tools tini jansson wget supervisor uuidgen curl perl jq redis postgresql postgresql-contrib openssl util-linux unzip
|
||||||
|
|
|
||||||
|
|
@ -74,7 +74,8 @@
|
||||||
"docs/configuration/auth/roles-and-permissions"
|
"docs/configuration/auth/roles-and-permissions"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"docs/configuration/transactional-emails"
|
"docs/configuration/transactional-emails",
|
||||||
|
"docs/configuration/structured-logging"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,8 @@
|
||||||
title: Overview
|
title: Overview
|
||||||
---
|
---
|
||||||
|
|
||||||
|
<Warning>If you're deploying Sourcebot behind a domain, you must set the [AUTH_URL](/docs/configuration/environment-variables) environment variable.</Warning>
|
||||||
|
|
||||||
Sourcebot has built-in authentication that gates access to your organization. OAuth, email codes, and email / password are supported.
|
Sourcebot has built-in authentication that gates access to your organization. OAuth, email codes, and email / password are supported.
|
||||||
|
|
||||||
The first account that's registered on a Sourcebot deployment is made the owner. All other users who register must be [approved](/docs/configuration/auth/overview#approving-new-members) by the owner.
|
The first account that's registered on a Sourcebot deployment is made the owner. All other users who register must be [approved](/docs/configuration/auth/overview#approving-new-members) by the owner.
|
||||||
|
|
@ -40,8 +42,6 @@ See [transactional emails](/docs/configuration/transactional-emails) for more de
|
||||||
|
|
||||||
## Enterprise Authentication Providers
|
## Enterprise Authentication Providers
|
||||||
|
|
||||||
<Warning>If you're deploying Sourcebot behind a domain, you must set the [AUTH_URL](/docs/configuration/environment-variables) environment variable to use these providers.</Warning>
|
|
||||||
|
|
||||||
The following authentication providers require an [enterprise license](/docs/license-key) to be enabled.
|
The following authentication providers require an [enterprise license](/docs/license-key) to be enabled.
|
||||||
|
|
||||||
By default, a new user registering using these providers must have their join request accepted by the owner of the organization to join. To allow a user to join automatically when
|
By default, a new user registering using these providers must have their join request accepted by the owner of the organization to join. To allow a user to join automatically when
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,6 @@
|
||||||
---
|
---
|
||||||
title: Roles and Permissions
|
title: Roles and Permissions
|
||||||
|
sidebarTitle: Roles and permissions
|
||||||
---
|
---
|
||||||
|
|
||||||
<Note>Looking to sync permissions with your identify provider? We're working on it - [reach out](https://www.sourcebot.dev/contact) to us to learn more</Note>
|
<Note>Looking to sync permissions with your identify provider? We're working on it - [reach out](https://www.sourcebot.dev/contact) to us to learn more</Note>
|
||||||
|
|
|
||||||
|
|
@ -27,6 +27,8 @@ The following environment variables allow you to configure your Sourcebot deploy
|
||||||
| `SMTP_CONNECTION_URL` | `-` | <p>The url to the SMTP service used for sending transactional emails. See [this doc](/docs/configuration/transactional-emails) for more info.</p> |
|
| `SMTP_CONNECTION_URL` | `-` | <p>The url to the SMTP service used for sending transactional emails. See [this doc](/docs/configuration/transactional-emails) for more info.</p> |
|
||||||
| `SOURCEBOT_ENCRYPTION_KEY` | Automatically generated at startup if no value is provided. Generated using `openssl rand -base64 24` | <p>Used to encrypt connection secrets and generate API keys.</p> |
|
| `SOURCEBOT_ENCRYPTION_KEY` | Automatically generated at startup if no value is provided. Generated using `openssl rand -base64 24` | <p>Used to encrypt connection secrets and generate API keys.</p> |
|
||||||
| `SOURCEBOT_LOG_LEVEL` | `info` | <p>The Sourcebot logging level. Valid values are `debug`, `info`, `warn`, `error`, in order of severity.</p> |
|
| `SOURCEBOT_LOG_LEVEL` | `info` | <p>The Sourcebot logging level. Valid values are `debug`, `info`, `warn`, `error`, in order of severity.</p> |
|
||||||
|
| `SOURCEBOT_STRUCTURED_LOGGING_ENABLED` | `false` | <p>Enables/disable structured JSON logging. See [this doc](/docs/configuration/structured-logging) for more info.</p> |
|
||||||
|
| `SOURCEBOT_STRUCTURED_LOGGING_FILE` | - | <p>Optional file to log to if structured logging is enabled</p> |
|
||||||
| `SOURCEBOT_TELEMETRY_DISABLED` | `false` | <p>Enables/disables telemetry collection in Sourcebot. See [this doc](/docs/overview.mdx#telemetry) for more info.</p> |
|
| `SOURCEBOT_TELEMETRY_DISABLED` | `false` | <p>Enables/disables telemetry collection in Sourcebot. See [this doc](/docs/overview.mdx#telemetry) for more info.</p> |
|
||||||
| `TOTAL_MAX_MATCH_COUNT` | `100000` | <p>The maximum number of matches per query</p> |
|
| `TOTAL_MAX_MATCH_COUNT` | `100000` | <p>The maximum number of matches per query</p> |
|
||||||
| `ZOEKT_MAX_WALL_TIME_MS` | `10000` | <p>The maximum real world duration (in milliseconds) per zoekt query</p> |
|
| `ZOEKT_MAX_WALL_TIME_MS` | `10000` | <p>The maximum real world duration (in milliseconds) per zoekt query</p> |
|
||||||
|
|
|
||||||
39
docs/docs/configuration/structured-logging.mdx
Normal file
39
docs/docs/configuration/structured-logging.mdx
Normal file
|
|
@ -0,0 +1,39 @@
|
||||||
|
---
|
||||||
|
title: Structured logging
|
||||||
|
---
|
||||||
|
|
||||||
|
By default, Sourcebot will output logs to the console in a human readable format. If you'd like Sourcebot to output structured JSON logs, set the following env vars:
|
||||||
|
|
||||||
|
- `SOURCEBOT_STRUCTURED_LOGGING_ENABLED` (default: `false`): Controls whether logs are in a structured JSON format
|
||||||
|
- `SOURCEBOT_STRUCTURED_LOGGING_FILE`: If structured logging is enabled and this env var is set, structured logs will be written to this file (ex. `/data/sourcebot.log`)
|
||||||
|
|
||||||
|
### Structured log schema
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||||
|
"type": "object",
|
||||||
|
"title": "SourcebotLog",
|
||||||
|
"properties": {
|
||||||
|
"level": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "The log level (error, warning, info, debug)"
|
||||||
|
},
|
||||||
|
"service": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "The Sourcebot component that generated the log"
|
||||||
|
},
|
||||||
|
"message": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "The log message"
|
||||||
|
},
|
||||||
|
"status": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "The same value as the level field added for datadog support"
|
||||||
|
},
|
||||||
|
"timestamp": {
|
||||||
|
"type": "string",
|
||||||
|
"description": "The timestamp of the log in ISO 8061 format"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
@ -43,7 +43,7 @@ A JSON configuration file is used to specify connections. For example:
|
||||||
|
|
||||||
Configuration files must conform to the [JSON schema](#schema-reference).
|
Configuration files must conform to the [JSON schema](#schema-reference).
|
||||||
|
|
||||||
When running Sourcebot, this file must be mounted in a volume that is accessible to the container, with it's path specified in the `CONFIG_PATH` environment variable. For example:
|
When running Sourcebot, this file must be mounted in a volume that is accessible to the container, with its path specified in the `CONFIG_PATH` environment variable. For example:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker run \
|
docker run \
|
||||||
|
|
|
||||||
|
|
@ -53,6 +53,9 @@ Watch this 1:51 minute video to get a quick overview of how to deploy Sourcebot
|
||||||
</Step>
|
</Step>
|
||||||
|
|
||||||
<Step title="Launch your instance">
|
<Step title="Launch your instance">
|
||||||
|
<Warning>If you're deploying Sourcebot behind a domain, you must set the [AUTH_URL](/docs/configuration/environment-variables) environment variable.</Warning>
|
||||||
|
|
||||||
|
|
||||||
In the same directory as `config.json`, run the following command to start your instance:
|
In the same directory as `config.json`, run the following command to start your instance:
|
||||||
|
|
||||||
``` bash
|
``` bash
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
---
|
---
|
||||||
title: AI Code Review Agent
|
title: AI Code Review Agent
|
||||||
sidebarTitle: AI Code Review Agent
|
sidebarTitle: AI code review agent
|
||||||
---
|
---
|
||||||
|
|
||||||
<Note>
|
<Note>
|
||||||
|
|
|
||||||
|
|
@ -23,8 +23,6 @@
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@gitbeaker/rest": "^40.5.1",
|
"@gitbeaker/rest": "^40.5.1",
|
||||||
"@logtail/node": "^0.5.2",
|
|
||||||
"@logtail/winston": "^0.5.2",
|
|
||||||
"@octokit/rest": "^21.0.2",
|
"@octokit/rest": "^21.0.2",
|
||||||
"@sentry/cli": "^2.42.2",
|
"@sentry/cli": "^2.42.2",
|
||||||
"@sentry/node": "^9.3.0",
|
"@sentry/node": "^9.3.0",
|
||||||
|
|
@ -32,6 +30,7 @@
|
||||||
"@sourcebot/crypto": "workspace:*",
|
"@sourcebot/crypto": "workspace:*",
|
||||||
"@sourcebot/db": "workspace:*",
|
"@sourcebot/db": "workspace:*",
|
||||||
"@sourcebot/error": "workspace:*",
|
"@sourcebot/error": "workspace:*",
|
||||||
|
"@sourcebot/logger": "workspace:*",
|
||||||
"@sourcebot/schemas": "workspace:*",
|
"@sourcebot/schemas": "workspace:*",
|
||||||
"@t3-oss/env-core": "^0.12.0",
|
"@t3-oss/env-core": "^0.12.0",
|
||||||
"@types/express": "^5.0.0",
|
"@types/express": "^5.0.0",
|
||||||
|
|
@ -51,7 +50,6 @@
|
||||||
"prom-client": "^15.1.3",
|
"prom-client": "^15.1.3",
|
||||||
"simple-git": "^3.27.0",
|
"simple-git": "^3.27.0",
|
||||||
"strip-json-comments": "^5.0.1",
|
"strip-json-comments": "^5.0.1",
|
||||||
"winston": "^3.15.0",
|
|
||||||
"zod": "^3.24.3"
|
"zod": "^3.24.3"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@ import { createBitbucketCloudClient } from "@coderabbitai/bitbucket/cloud";
|
||||||
import { createBitbucketServerClient } from "@coderabbitai/bitbucket/server";
|
import { createBitbucketServerClient } from "@coderabbitai/bitbucket/server";
|
||||||
import { BitbucketConnectionConfig } from "@sourcebot/schemas/v3/bitbucket.type";
|
import { BitbucketConnectionConfig } from "@sourcebot/schemas/v3/bitbucket.type";
|
||||||
import type { ClientOptions, ClientPathsWithMethod } from "openapi-fetch";
|
import type { ClientOptions, ClientPathsWithMethod } from "openapi-fetch";
|
||||||
import { createLogger } from "./logger.js";
|
import { createLogger } from "@sourcebot/logger";
|
||||||
import { PrismaClient } from "@sourcebot/db";
|
import { PrismaClient } from "@sourcebot/db";
|
||||||
import { getTokenFromConfig, measure, fetchWithRetry } from "./utils.js";
|
import { getTokenFromConfig, measure, fetchWithRetry } from "./utils.js";
|
||||||
import * as Sentry from "@sentry/node";
|
import * as Sentry from "@sentry/node";
|
||||||
|
|
@ -13,7 +13,7 @@ import { SchemaRestRepository as ServerRepository } from "@coderabbitai/bitbucke
|
||||||
import { processPromiseResults } from "./connectionUtils.js";
|
import { processPromiseResults } from "./connectionUtils.js";
|
||||||
import { throwIfAnyFailed } from "./connectionUtils.js";
|
import { throwIfAnyFailed } from "./connectionUtils.js";
|
||||||
|
|
||||||
const logger = createLogger("Bitbucket");
|
const logger = createLogger('bitbucket');
|
||||||
const BITBUCKET_CLOUD_GIT = 'https://bitbucket.org';
|
const BITBUCKET_CLOUD_GIT = 'https://bitbucket.org';
|
||||||
const BITBUCKET_CLOUD_API = 'https://api.bitbucket.org/2.0';
|
const BITBUCKET_CLOUD_API = 'https://api.bitbucket.org/2.0';
|
||||||
const BITBUCKET_CLOUD = "cloud";
|
const BITBUCKET_CLOUD = "cloud";
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@ import { Connection, ConnectionSyncStatus, PrismaClient, Prisma } from "@sourceb
|
||||||
import { Job, Queue, Worker } from 'bullmq';
|
import { Job, Queue, Worker } from 'bullmq';
|
||||||
import { Settings } from "./types.js";
|
import { Settings } from "./types.js";
|
||||||
import { ConnectionConfig } from "@sourcebot/schemas/v3/connection.type";
|
import { ConnectionConfig } from "@sourcebot/schemas/v3/connection.type";
|
||||||
import { createLogger } from "./logger.js";
|
import { createLogger } from "@sourcebot/logger";
|
||||||
import { Redis } from 'ioredis';
|
import { Redis } from 'ioredis';
|
||||||
import { RepoData, compileGithubConfig, compileGitlabConfig, compileGiteaConfig, compileGerritConfig, compileBitbucketConfig, compileGenericGitHostConfig } from "./repoCompileUtils.js";
|
import { RepoData, compileGithubConfig, compileGitlabConfig, compileGiteaConfig, compileGerritConfig, compileBitbucketConfig, compileGenericGitHostConfig } from "./repoCompileUtils.js";
|
||||||
import { BackendError, BackendException } from "@sourcebot/error";
|
import { BackendError, BackendException } from "@sourcebot/error";
|
||||||
|
|
@ -32,7 +32,7 @@ type JobResult = {
|
||||||
export class ConnectionManager implements IConnectionManager {
|
export class ConnectionManager implements IConnectionManager {
|
||||||
private worker: Worker;
|
private worker: Worker;
|
||||||
private queue: Queue<JobPayload>;
|
private queue: Queue<JobPayload>;
|
||||||
private logger = createLogger('ConnectionManager');
|
private logger = createLogger('connection-manager');
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
private db: PrismaClient,
|
private db: PrismaClient,
|
||||||
|
|
|
||||||
|
|
@ -22,7 +22,6 @@ dotenv.config({
|
||||||
export const env = createEnv({
|
export const env = createEnv({
|
||||||
server: {
|
server: {
|
||||||
SOURCEBOT_ENCRYPTION_KEY: z.string(),
|
SOURCEBOT_ENCRYPTION_KEY: z.string(),
|
||||||
SOURCEBOT_LOG_LEVEL: z.enum(["info", "debug", "warn", "error"]).default("info"),
|
|
||||||
SOURCEBOT_TELEMETRY_DISABLED: booleanSchema.default("false"),
|
SOURCEBOT_TELEMETRY_DISABLED: booleanSchema.default("false"),
|
||||||
SOURCEBOT_INSTALL_ID: z.string().default("unknown"),
|
SOURCEBOT_INSTALL_ID: z.string().default("unknown"),
|
||||||
NEXT_PUBLIC_SOURCEBOT_VERSION: z.string().default("unknown"),
|
NEXT_PUBLIC_SOURCEBOT_VERSION: z.string().default("unknown"),
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
import fetch from 'cross-fetch';
|
import fetch from 'cross-fetch';
|
||||||
import { GerritConnectionConfig } from "@sourcebot/schemas/v3/index.type"
|
import { GerritConnectionConfig } from "@sourcebot/schemas/v3/index.type"
|
||||||
import { createLogger } from './logger.js';
|
import { createLogger } from '@sourcebot/logger';
|
||||||
import micromatch from "micromatch";
|
import micromatch from "micromatch";
|
||||||
import { measure, fetchWithRetry } from './utils.js';
|
import { measure, fetchWithRetry } from './utils.js';
|
||||||
import { BackendError } from '@sourcebot/error';
|
import { BackendError } from '@sourcebot/error';
|
||||||
|
|
@ -33,7 +33,7 @@ interface GerritWebLink {
|
||||||
url: string;
|
url: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
const logger = createLogger('Gerrit');
|
const logger = createLogger('gerrit');
|
||||||
|
|
||||||
export const getGerritReposFromConfig = async (config: GerritConnectionConfig): Promise<GerritProject[]> => {
|
export const getGerritReposFromConfig = async (config: GerritConnectionConfig): Promise<GerritProject[]> => {
|
||||||
const url = config.url.endsWith('/') ? config.url : `${config.url}/`;
|
const url = config.url.endsWith('/') ? config.url : `${config.url}/`;
|
||||||
|
|
@ -95,7 +95,7 @@ const fetchAllProjects = async (url: string): Promise<GerritProject[]> => {
|
||||||
try {
|
try {
|
||||||
response = await fetch(endpointWithParams);
|
response = await fetch(endpointWithParams);
|
||||||
if (!response.ok) {
|
if (!response.ok) {
|
||||||
console.log(`Failed to fetch projects from Gerrit at ${endpointWithParams} with status ${response.status}`);
|
logger.error(`Failed to fetch projects from Gerrit at ${endpointWithParams} with status ${response.status}`);
|
||||||
const e = new BackendException(BackendError.CONNECTION_SYNC_FAILED_TO_FETCH_GERRIT_PROJECTS, {
|
const e = new BackendException(BackendError.CONNECTION_SYNC_FAILED_TO_FETCH_GERRIT_PROJECTS, {
|
||||||
status: response.status,
|
status: response.status,
|
||||||
});
|
});
|
||||||
|
|
@ -109,7 +109,7 @@ const fetchAllProjects = async (url: string): Promise<GerritProject[]> => {
|
||||||
}
|
}
|
||||||
|
|
||||||
const status = (err as any).code;
|
const status = (err as any).code;
|
||||||
console.log(`Failed to fetch projects from Gerrit at ${endpointWithParams} with status ${status}`);
|
logger.error(`Failed to fetch projects from Gerrit at ${endpointWithParams} with status ${status}`);
|
||||||
throw new BackendException(BackendError.CONNECTION_SYNC_FAILED_TO_FETCH_GERRIT_PROJECTS, {
|
throw new BackendException(BackendError.CONNECTION_SYNC_FAILED_TO_FETCH_GERRIT_PROJECTS, {
|
||||||
status: status,
|
status: status,
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -2,14 +2,14 @@ import { Api, giteaApi, HttpResponse, Repository as GiteaRepository } from 'gite
|
||||||
import { GiteaConnectionConfig } from '@sourcebot/schemas/v3/gitea.type';
|
import { GiteaConnectionConfig } from '@sourcebot/schemas/v3/gitea.type';
|
||||||
import { getTokenFromConfig, measure } from './utils.js';
|
import { getTokenFromConfig, measure } from './utils.js';
|
||||||
import fetch from 'cross-fetch';
|
import fetch from 'cross-fetch';
|
||||||
import { createLogger } from './logger.js';
|
import { createLogger } from '@sourcebot/logger';
|
||||||
import micromatch from 'micromatch';
|
import micromatch from 'micromatch';
|
||||||
import { PrismaClient } from '@sourcebot/db';
|
import { PrismaClient } from '@sourcebot/db';
|
||||||
import { processPromiseResults, throwIfAnyFailed } from './connectionUtils.js';
|
import { processPromiseResults, throwIfAnyFailed } from './connectionUtils.js';
|
||||||
import * as Sentry from "@sentry/node";
|
import * as Sentry from "@sentry/node";
|
||||||
import { env } from './env.js';
|
import { env } from './env.js';
|
||||||
|
|
||||||
const logger = createLogger('Gitea');
|
const logger = createLogger('gitea');
|
||||||
const GITEA_CLOUD_HOSTNAME = "gitea.com";
|
const GITEA_CLOUD_HOSTNAME = "gitea.com";
|
||||||
|
|
||||||
export const getGiteaReposFromConfig = async (config: GiteaConnectionConfig, orgId: number, db: PrismaClient) => {
|
export const getGiteaReposFromConfig = async (config: GiteaConnectionConfig, orgId: number, db: PrismaClient) => {
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
import { Octokit } from "@octokit/rest";
|
import { Octokit } from "@octokit/rest";
|
||||||
import { GithubConnectionConfig } from "@sourcebot/schemas/v3/github.type";
|
import { GithubConnectionConfig } from "@sourcebot/schemas/v3/github.type";
|
||||||
import { createLogger } from "./logger.js";
|
import { createLogger } from "@sourcebot/logger";
|
||||||
import { getTokenFromConfig, measure, fetchWithRetry } from "./utils.js";
|
import { getTokenFromConfig, measure, fetchWithRetry } from "./utils.js";
|
||||||
import micromatch from "micromatch";
|
import micromatch from "micromatch";
|
||||||
import { PrismaClient } from "@sourcebot/db";
|
import { PrismaClient } from "@sourcebot/db";
|
||||||
|
|
@ -9,7 +9,7 @@ import { processPromiseResults, throwIfAnyFailed } from "./connectionUtils.js";
|
||||||
import * as Sentry from "@sentry/node";
|
import * as Sentry from "@sentry/node";
|
||||||
import { env } from "./env.js";
|
import { env } from "./env.js";
|
||||||
|
|
||||||
const logger = createLogger("GitHub");
|
const logger = createLogger('github');
|
||||||
const GITHUB_CLOUD_HOSTNAME = "github.com";
|
const GITHUB_CLOUD_HOSTNAME = "github.com";
|
||||||
|
|
||||||
export type OctokitRepository = {
|
export type OctokitRepository = {
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
import { Gitlab, ProjectSchema } from "@gitbeaker/rest";
|
import { Gitlab, ProjectSchema } from "@gitbeaker/rest";
|
||||||
import micromatch from "micromatch";
|
import micromatch from "micromatch";
|
||||||
import { createLogger } from "./logger.js";
|
import { createLogger } from "@sourcebot/logger";
|
||||||
import { GitlabConnectionConfig } from "@sourcebot/schemas/v3/gitlab.type"
|
import { GitlabConnectionConfig } from "@sourcebot/schemas/v3/gitlab.type"
|
||||||
import { getTokenFromConfig, measure, fetchWithRetry } from "./utils.js";
|
import { getTokenFromConfig, measure, fetchWithRetry } from "./utils.js";
|
||||||
import { PrismaClient } from "@sourcebot/db";
|
import { PrismaClient } from "@sourcebot/db";
|
||||||
|
|
@ -8,7 +8,7 @@ import { processPromiseResults, throwIfAnyFailed } from "./connectionUtils.js";
|
||||||
import * as Sentry from "@sentry/node";
|
import * as Sentry from "@sentry/node";
|
||||||
import { env } from "./env.js";
|
import { env } from "./env.js";
|
||||||
|
|
||||||
const logger = createLogger("GitLab");
|
const logger = createLogger('gitlab');
|
||||||
export const GITLAB_CLOUD_HOSTNAME = "gitlab.com";
|
export const GITLAB_CLOUD_HOSTNAME = "gitlab.com";
|
||||||
|
|
||||||
export const getGitLabReposFromConfig = async (config: GitlabConnectionConfig, orgId: number, db: PrismaClient) => {
|
export const getGitLabReposFromConfig = async (config: GitlabConnectionConfig, orgId: number, db: PrismaClient) => {
|
||||||
|
|
|
||||||
|
|
@ -8,31 +8,34 @@ import { AppContext } from "./types.js";
|
||||||
import { main } from "./main.js"
|
import { main } from "./main.js"
|
||||||
import { PrismaClient } from "@sourcebot/db";
|
import { PrismaClient } from "@sourcebot/db";
|
||||||
import { env } from "./env.js";
|
import { env } from "./env.js";
|
||||||
|
import { createLogger } from "@sourcebot/logger";
|
||||||
|
|
||||||
|
const logger = createLogger('index');
|
||||||
|
|
||||||
// Register handler for normal exit
|
// Register handler for normal exit
|
||||||
process.on('exit', (code) => {
|
process.on('exit', (code) => {
|
||||||
console.log(`Process is exiting with code: ${code}`);
|
logger.info(`Process is exiting with code: ${code}`);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Register handlers for abnormal terminations
|
// Register handlers for abnormal terminations
|
||||||
process.on('SIGINT', () => {
|
process.on('SIGINT', () => {
|
||||||
console.log('Process interrupted (SIGINT)');
|
logger.info('Process interrupted (SIGINT)');
|
||||||
process.exit(130);
|
process.exit(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
process.on('SIGTERM', () => {
|
process.on('SIGTERM', () => {
|
||||||
console.log('Process terminated (SIGTERM)');
|
logger.info('Process terminated (SIGTERM)');
|
||||||
process.exit(143);
|
process.exit(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Register handlers for uncaught exceptions and unhandled rejections
|
// Register handlers for uncaught exceptions and unhandled rejections
|
||||||
process.on('uncaughtException', (err) => {
|
process.on('uncaughtException', (err) => {
|
||||||
console.log(`Uncaught exception: ${err.message}`);
|
logger.error(`Uncaught exception: ${err.message}`);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
process.on('unhandledRejection', (reason, promise) => {
|
process.on('unhandledRejection', (reason, promise) => {
|
||||||
console.log(`Unhandled rejection at: ${promise}, reason: ${reason}`);
|
logger.error(`Unhandled rejection at: ${promise}, reason: ${reason}`);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -60,12 +63,12 @@ main(prisma, context)
|
||||||
await prisma.$disconnect();
|
await prisma.$disconnect();
|
||||||
})
|
})
|
||||||
.catch(async (e) => {
|
.catch(async (e) => {
|
||||||
console.error(e);
|
logger.error(e);
|
||||||
Sentry.captureException(e);
|
Sentry.captureException(e);
|
||||||
|
|
||||||
await prisma.$disconnect();
|
await prisma.$disconnect();
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
})
|
})
|
||||||
.finally(() => {
|
.finally(() => {
|
||||||
console.log("Shutting down...");
|
logger.info("Shutting down...");
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,8 @@
|
||||||
import * as Sentry from "@sentry/node";
|
import * as Sentry from "@sentry/node";
|
||||||
import { env } from "./env.js";
|
import { env } from "./env.js";
|
||||||
|
import { createLogger } from "@sourcebot/logger";
|
||||||
|
|
||||||
|
const logger = createLogger('instrument');
|
||||||
|
|
||||||
if (!!env.NEXT_PUBLIC_SENTRY_BACKEND_DSN && !!env.NEXT_PUBLIC_SENTRY_ENVIRONMENT) {
|
if (!!env.NEXT_PUBLIC_SENTRY_BACKEND_DSN && !!env.NEXT_PUBLIC_SENTRY_ENVIRONMENT) {
|
||||||
Sentry.init({
|
Sentry.init({
|
||||||
|
|
@ -8,5 +11,5 @@ if (!!env.NEXT_PUBLIC_SENTRY_BACKEND_DSN && !!env.NEXT_PUBLIC_SENTRY_ENVIRONMENT
|
||||||
environment: env.NEXT_PUBLIC_SENTRY_ENVIRONMENT,
|
environment: env.NEXT_PUBLIC_SENTRY_ENVIRONMENT,
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
console.debug("Sentry was not initialized");
|
logger.debug("Sentry was not initialized");
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,47 +0,0 @@
|
||||||
import winston, { format } from 'winston';
|
|
||||||
import { Logtail } from '@logtail/node';
|
|
||||||
import { LogtailTransport } from '@logtail/winston';
|
|
||||||
import { env } from './env.js';
|
|
||||||
|
|
||||||
const { combine, colorize, timestamp, prettyPrint, errors, printf, label: labelFn } = format;
|
|
||||||
|
|
||||||
|
|
||||||
const createLogger = (label: string) => {
|
|
||||||
return winston.createLogger({
|
|
||||||
level: env.SOURCEBOT_LOG_LEVEL,
|
|
||||||
format: combine(
|
|
||||||
errors({ stack: true }),
|
|
||||||
timestamp(),
|
|
||||||
prettyPrint(),
|
|
||||||
labelFn({
|
|
||||||
label: label,
|
|
||||||
})
|
|
||||||
),
|
|
||||||
transports: [
|
|
||||||
new winston.transports.Console({
|
|
||||||
format: combine(
|
|
||||||
errors({ stack: true }),
|
|
||||||
colorize(),
|
|
||||||
printf(({ level, message, timestamp, stack, label: _label }) => {
|
|
||||||
const label = `[${_label}] `;
|
|
||||||
if (stack) {
|
|
||||||
return `${timestamp} ${level}: ${label}${message}\n${stack}`;
|
|
||||||
}
|
|
||||||
return `${timestamp} ${level}: ${label}${message}`;
|
|
||||||
}),
|
|
||||||
),
|
|
||||||
}),
|
|
||||||
...(env.LOGTAIL_TOKEN && env.LOGTAIL_HOST ? [
|
|
||||||
new LogtailTransport(
|
|
||||||
new Logtail(env.LOGTAIL_TOKEN, {
|
|
||||||
endpoint: env.LOGTAIL_HOST,
|
|
||||||
})
|
|
||||||
)
|
|
||||||
] : []),
|
|
||||||
]
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export {
|
|
||||||
createLogger
|
|
||||||
};
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
import { PrismaClient } from '@sourcebot/db';
|
import { PrismaClient } from '@sourcebot/db';
|
||||||
import { createLogger } from "./logger.js";
|
import { createLogger } from "@sourcebot/logger";
|
||||||
import { AppContext } from "./types.js";
|
import { AppContext } from "./types.js";
|
||||||
import { DEFAULT_SETTINGS } from './constants.js';
|
import { DEFAULT_SETTINGS } from './constants.js';
|
||||||
import { Redis } from 'ioredis';
|
import { Redis } from 'ioredis';
|
||||||
|
|
@ -14,7 +14,7 @@ import { SourcebotConfig } from '@sourcebot/schemas/v3/index.type';
|
||||||
import { indexSchema } from '@sourcebot/schemas/v3/index.schema';
|
import { indexSchema } from '@sourcebot/schemas/v3/index.schema';
|
||||||
import { Ajv } from "ajv";
|
import { Ajv } from "ajv";
|
||||||
|
|
||||||
const logger = createLogger('main');
|
const logger = createLogger('backend-main');
|
||||||
const ajv = new Ajv({
|
const ajv = new Ajv({
|
||||||
validateFormats: false,
|
validateFormats: false,
|
||||||
});
|
});
|
||||||
|
|
@ -56,7 +56,7 @@ export const main = async (db: PrismaClient, context: AppContext) => {
|
||||||
logger.info('Connected to redis');
|
logger.info('Connected to redis');
|
||||||
}).catch((err: unknown) => {
|
}).catch((err: unknown) => {
|
||||||
logger.error('Failed to connect to redis');
|
logger.error('Failed to connect to redis');
|
||||||
console.error(err);
|
logger.error(err);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,8 @@
|
||||||
import express, { Request, Response } from 'express';
|
import express, { Request, Response } from 'express';
|
||||||
import client, { Registry, Counter, Gauge } from 'prom-client';
|
import client, { Registry, Counter, Gauge } from 'prom-client';
|
||||||
|
import { createLogger } from "@sourcebot/logger";
|
||||||
|
|
||||||
|
const logger = createLogger('prometheus-client');
|
||||||
|
|
||||||
export class PromClient {
|
export class PromClient {
|
||||||
private registry: Registry;
|
private registry: Registry;
|
||||||
|
|
@ -96,7 +99,7 @@ export class PromClient {
|
||||||
});
|
});
|
||||||
|
|
||||||
this.app.listen(this.PORT, () => {
|
this.app.listen(this.PORT, () => {
|
||||||
console.log(`Prometheus metrics server is running on port ${this.PORT}`);
|
logger.info(`Prometheus metrics server is running on port ${this.PORT}`);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,7 @@ import { SchemaRepository as BitbucketCloudRepository } from "@coderabbitai/bitb
|
||||||
import { Prisma, PrismaClient } from '@sourcebot/db';
|
import { Prisma, PrismaClient } from '@sourcebot/db';
|
||||||
import { WithRequired } from "./types.js"
|
import { WithRequired } from "./types.js"
|
||||||
import { marshalBool } from "./utils.js";
|
import { marshalBool } from "./utils.js";
|
||||||
import { createLogger } from './logger.js';
|
import { createLogger } from '@sourcebot/logger';
|
||||||
import { BitbucketConnectionConfig, GerritConnectionConfig, GiteaConnectionConfig, GitlabConnectionConfig, GenericGitHostConnectionConfig } from '@sourcebot/schemas/v3/connection.type';
|
import { BitbucketConnectionConfig, GerritConnectionConfig, GiteaConnectionConfig, GitlabConnectionConfig, GenericGitHostConnectionConfig } from '@sourcebot/schemas/v3/connection.type';
|
||||||
import { RepoMetadata } from './types.js';
|
import { RepoMetadata } from './types.js';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
|
@ -20,7 +20,7 @@ import GitUrlParse from 'git-url-parse';
|
||||||
|
|
||||||
export type RepoData = WithRequired<Prisma.RepoCreateInput, 'connections'>;
|
export type RepoData = WithRequired<Prisma.RepoCreateInput, 'connections'>;
|
||||||
|
|
||||||
const logger = createLogger('RepoCompileUtils');
|
const logger = createLogger('repo-compile-utils');
|
||||||
|
|
||||||
export const compileGithubConfig = async (
|
export const compileGithubConfig = async (
|
||||||
config: GithubConnectionConfig,
|
config: GithubConnectionConfig,
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
import { Job, Queue, Worker } from 'bullmq';
|
import { Job, Queue, Worker } from 'bullmq';
|
||||||
import { Redis } from 'ioredis';
|
import { Redis } from 'ioredis';
|
||||||
import { createLogger } from "./logger.js";
|
import { createLogger } from "@sourcebot/logger";
|
||||||
import { Connection, PrismaClient, Repo, RepoToConnection, RepoIndexingStatus, StripeSubscriptionStatus } from "@sourcebot/db";
|
import { Connection, PrismaClient, Repo, RepoToConnection, RepoIndexingStatus, StripeSubscriptionStatus } from "@sourcebot/db";
|
||||||
import { GithubConnectionConfig, GitlabConnectionConfig, GiteaConnectionConfig, BitbucketConnectionConfig } from '@sourcebot/schemas/v3/connection.type';
|
import { GithubConnectionConfig, GitlabConnectionConfig, GiteaConnectionConfig, BitbucketConnectionConfig } from '@sourcebot/schemas/v3/connection.type';
|
||||||
import { AppContext, Settings, repoMetadataSchema } from "./types.js";
|
import { AppContext, Settings, repoMetadataSchema } from "./types.js";
|
||||||
|
|
@ -28,12 +28,13 @@ type RepoGarbageCollectionPayload = {
|
||||||
repo: Repo,
|
repo: Repo,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const logger = createLogger('repo-manager');
|
||||||
|
|
||||||
export class RepoManager implements IRepoManager {
|
export class RepoManager implements IRepoManager {
|
||||||
private indexWorker: Worker;
|
private indexWorker: Worker;
|
||||||
private indexQueue: Queue<RepoIndexingPayload>;
|
private indexQueue: Queue<RepoIndexingPayload>;
|
||||||
private gcWorker: Worker;
|
private gcWorker: Worker;
|
||||||
private gcQueue: Queue<RepoGarbageCollectionPayload>;
|
private gcQueue: Queue<RepoGarbageCollectionPayload>;
|
||||||
private logger = createLogger('RepoManager');
|
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
private db: PrismaClient,
|
private db: PrismaClient,
|
||||||
|
|
@ -113,12 +114,12 @@ export class RepoManager implements IRepoManager {
|
||||||
this.promClient.pendingRepoIndexingJobs.inc({ repo: repo.id.toString() });
|
this.promClient.pendingRepoIndexingJobs.inc({ repo: repo.id.toString() });
|
||||||
});
|
});
|
||||||
|
|
||||||
this.logger.info(`Added ${orgRepos.length} jobs to indexQueue for org ${orgId} with priority ${priority}`);
|
logger.info(`Added ${orgRepos.length} jobs to indexQueue for org ${orgId} with priority ${priority}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}).catch((err: unknown) => {
|
}).catch((err: unknown) => {
|
||||||
this.logger.error(`Failed to add jobs to indexQueue for repos ${repos.map(repo => repo.id).join(', ')}: ${err}`);
|
logger.error(`Failed to add jobs to indexQueue for repos ${repos.map(repo => repo.id).join(', ')}: ${err}`);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -176,7 +177,7 @@ export class RepoManager implements IRepoManager {
|
||||||
if (connection.connectionType === 'github') {
|
if (connection.connectionType === 'github') {
|
||||||
const config = connection.config as unknown as GithubConnectionConfig;
|
const config = connection.config as unknown as GithubConnectionConfig;
|
||||||
if (config.token) {
|
if (config.token) {
|
||||||
const token = await getTokenFromConfig(config.token, connection.orgId, db, this.logger);
|
const token = await getTokenFromConfig(config.token, connection.orgId, db, logger);
|
||||||
return {
|
return {
|
||||||
password: token,
|
password: token,
|
||||||
}
|
}
|
||||||
|
|
@ -186,7 +187,7 @@ export class RepoManager implements IRepoManager {
|
||||||
else if (connection.connectionType === 'gitlab') {
|
else if (connection.connectionType === 'gitlab') {
|
||||||
const config = connection.config as unknown as GitlabConnectionConfig;
|
const config = connection.config as unknown as GitlabConnectionConfig;
|
||||||
if (config.token) {
|
if (config.token) {
|
||||||
const token = await getTokenFromConfig(config.token, connection.orgId, db, this.logger);
|
const token = await getTokenFromConfig(config.token, connection.orgId, db, logger);
|
||||||
return {
|
return {
|
||||||
username: 'oauth2',
|
username: 'oauth2',
|
||||||
password: token,
|
password: token,
|
||||||
|
|
@ -197,7 +198,7 @@ export class RepoManager implements IRepoManager {
|
||||||
else if (connection.connectionType === 'gitea') {
|
else if (connection.connectionType === 'gitea') {
|
||||||
const config = connection.config as unknown as GiteaConnectionConfig;
|
const config = connection.config as unknown as GiteaConnectionConfig;
|
||||||
if (config.token) {
|
if (config.token) {
|
||||||
const token = await getTokenFromConfig(config.token, connection.orgId, db, this.logger);
|
const token = await getTokenFromConfig(config.token, connection.orgId, db, logger);
|
||||||
return {
|
return {
|
||||||
password: token,
|
password: token,
|
||||||
}
|
}
|
||||||
|
|
@ -207,7 +208,7 @@ export class RepoManager implements IRepoManager {
|
||||||
else if (connection.connectionType === 'bitbucket') {
|
else if (connection.connectionType === 'bitbucket') {
|
||||||
const config = connection.config as unknown as BitbucketConnectionConfig;
|
const config = connection.config as unknown as BitbucketConnectionConfig;
|
||||||
if (config.token) {
|
if (config.token) {
|
||||||
const token = await getTokenFromConfig(config.token, connection.orgId, db, this.logger);
|
const token = await getTokenFromConfig(config.token, connection.orgId, db, logger);
|
||||||
const username = config.user ?? 'x-token-auth';
|
const username = config.user ?? 'x-token-auth';
|
||||||
return {
|
return {
|
||||||
username,
|
username,
|
||||||
|
|
@ -228,23 +229,23 @@ export class RepoManager implements IRepoManager {
|
||||||
// If the repo was already in the indexing state, this job was likely killed and picked up again. As a result,
|
// If the repo was already in the indexing state, this job was likely killed and picked up again. As a result,
|
||||||
// to ensure the repo state is valid, we delete the repo if it exists so we get a fresh clone
|
// to ensure the repo state is valid, we delete the repo if it exists so we get a fresh clone
|
||||||
if (repoAlreadyInIndexingState && existsSync(repoPath) && !isReadOnly) {
|
if (repoAlreadyInIndexingState && existsSync(repoPath) && !isReadOnly) {
|
||||||
this.logger.info(`Deleting repo directory ${repoPath} during sync because it was already in the indexing state`);
|
logger.info(`Deleting repo directory ${repoPath} during sync because it was already in the indexing state`);
|
||||||
await promises.rm(repoPath, { recursive: true, force: true });
|
await promises.rm(repoPath, { recursive: true, force: true });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (existsSync(repoPath) && !isReadOnly) {
|
if (existsSync(repoPath) && !isReadOnly) {
|
||||||
this.logger.info(`Fetching ${repo.displayName}...`);
|
logger.info(`Fetching ${repo.displayName}...`);
|
||||||
|
|
||||||
const { durationMs } = await measure(() => fetchRepository(repoPath, ({ method, stage, progress }) => {
|
const { durationMs } = await measure(() => fetchRepository(repoPath, ({ method, stage, progress }) => {
|
||||||
this.logger.debug(`git.${method} ${stage} stage ${progress}% complete for ${repo.displayName}`)
|
logger.debug(`git.${method} ${stage} stage ${progress}% complete for ${repo.displayName}`)
|
||||||
}));
|
}));
|
||||||
const fetchDuration_s = durationMs / 1000;
|
const fetchDuration_s = durationMs / 1000;
|
||||||
|
|
||||||
process.stdout.write('\n');
|
process.stdout.write('\n');
|
||||||
this.logger.info(`Fetched ${repo.displayName} in ${fetchDuration_s}s`);
|
logger.info(`Fetched ${repo.displayName} in ${fetchDuration_s}s`);
|
||||||
|
|
||||||
} else if (!isReadOnly) {
|
} else if (!isReadOnly) {
|
||||||
this.logger.info(`Cloning ${repo.displayName}...`);
|
logger.info(`Cloning ${repo.displayName}...`);
|
||||||
|
|
||||||
const auth = await this.getCloneCredentialsForRepo(repo, this.db);
|
const auth = await this.getCloneCredentialsForRepo(repo, this.db);
|
||||||
const cloneUrl = new URL(repo.cloneUrl);
|
const cloneUrl = new URL(repo.cloneUrl);
|
||||||
|
|
@ -263,12 +264,12 @@ export class RepoManager implements IRepoManager {
|
||||||
}
|
}
|
||||||
|
|
||||||
const { durationMs } = await measure(() => cloneRepository(cloneUrl.toString(), repoPath, ({ method, stage, progress }) => {
|
const { durationMs } = await measure(() => cloneRepository(cloneUrl.toString(), repoPath, ({ method, stage, progress }) => {
|
||||||
this.logger.debug(`git.${method} ${stage} stage ${progress}% complete for ${repo.displayName}`)
|
logger.debug(`git.${method} ${stage} stage ${progress}% complete for ${repo.displayName}`)
|
||||||
}));
|
}));
|
||||||
const cloneDuration_s = durationMs / 1000;
|
const cloneDuration_s = durationMs / 1000;
|
||||||
|
|
||||||
process.stdout.write('\n');
|
process.stdout.write('\n');
|
||||||
this.logger.info(`Cloned ${repo.displayName} in ${cloneDuration_s}s`);
|
logger.info(`Cloned ${repo.displayName} in ${cloneDuration_s}s`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Regardless of clone or fetch, always upsert the git config for the repo.
|
// Regardless of clone or fetch, always upsert the git config for the repo.
|
||||||
|
|
@ -278,14 +279,14 @@ export class RepoManager implements IRepoManager {
|
||||||
await upsertGitConfig(repoPath, metadata.gitConfig);
|
await upsertGitConfig(repoPath, metadata.gitConfig);
|
||||||
}
|
}
|
||||||
|
|
||||||
this.logger.info(`Indexing ${repo.displayName}...`);
|
logger.info(`Indexing ${repo.displayName}...`);
|
||||||
const { durationMs } = await measure(() => indexGitRepository(repo, this.settings, this.ctx));
|
const { durationMs } = await measure(() => indexGitRepository(repo, this.settings, this.ctx));
|
||||||
const indexDuration_s = durationMs / 1000;
|
const indexDuration_s = durationMs / 1000;
|
||||||
this.logger.info(`Indexed ${repo.displayName} in ${indexDuration_s}s`);
|
logger.info(`Indexed ${repo.displayName} in ${indexDuration_s}s`);
|
||||||
}
|
}
|
||||||
|
|
||||||
private async runIndexJob(job: Job<RepoIndexingPayload>) {
|
private async runIndexJob(job: Job<RepoIndexingPayload>) {
|
||||||
this.logger.info(`Running index job (id: ${job.id}) for repo ${job.data.repo.displayName}`);
|
logger.info(`Running index job (id: ${job.id}) for repo ${job.data.repo.displayName}`);
|
||||||
const repo = job.data.repo as RepoWithConnections;
|
const repo = job.data.repo as RepoWithConnections;
|
||||||
|
|
||||||
// We have to use the existing repo object to get the repoIndexingStatus because the repo object
|
// We have to use the existing repo object to get the repoIndexingStatus because the repo object
|
||||||
|
|
@ -296,7 +297,7 @@ export class RepoManager implements IRepoManager {
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
if (!existingRepo) {
|
if (!existingRepo) {
|
||||||
this.logger.error(`Repo ${repo.id} not found`);
|
logger.error(`Repo ${repo.id} not found`);
|
||||||
const e = new Error(`Repo ${repo.id} not found`);
|
const e = new Error(`Repo ${repo.id} not found`);
|
||||||
Sentry.captureException(e);
|
Sentry.captureException(e);
|
||||||
throw e;
|
throw e;
|
||||||
|
|
@ -328,19 +329,19 @@ export class RepoManager implements IRepoManager {
|
||||||
attempts++;
|
attempts++;
|
||||||
this.promClient.repoIndexingReattemptsTotal.inc();
|
this.promClient.repoIndexingReattemptsTotal.inc();
|
||||||
if (attempts === maxAttempts) {
|
if (attempts === maxAttempts) {
|
||||||
this.logger.error(`Failed to sync repository ${repo.name} (id: ${repo.id}) after ${maxAttempts} attempts. Error: ${error}`);
|
logger.error(`Failed to sync repository ${repo.name} (id: ${repo.id}) after ${maxAttempts} attempts. Error: ${error}`);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
|
|
||||||
const sleepDuration = 5000 * Math.pow(2, attempts - 1);
|
const sleepDuration = 5000 * Math.pow(2, attempts - 1);
|
||||||
this.logger.error(`Failed to sync repository ${repo.name} (id: ${repo.id}), attempt ${attempts}/${maxAttempts}. Sleeping for ${sleepDuration / 1000}s... Error: ${error}`);
|
logger.error(`Failed to sync repository ${repo.name} (id: ${repo.id}), attempt ${attempts}/${maxAttempts}. Sleeping for ${sleepDuration / 1000}s... Error: ${error}`);
|
||||||
await new Promise(resolve => setTimeout(resolve, sleepDuration));
|
await new Promise(resolve => setTimeout(resolve, sleepDuration));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async onIndexJobCompleted(job: Job<RepoIndexingPayload>) {
|
private async onIndexJobCompleted(job: Job<RepoIndexingPayload>) {
|
||||||
this.logger.info(`Repo index job for repo ${job.data.repo.displayName} (id: ${job.data.repo.id}, jobId: ${job.id}) completed`);
|
logger.info(`Repo index job for repo ${job.data.repo.displayName} (id: ${job.data.repo.id}, jobId: ${job.id}) completed`);
|
||||||
this.promClient.activeRepoIndexingJobs.dec();
|
this.promClient.activeRepoIndexingJobs.dec();
|
||||||
this.promClient.repoIndexingSuccessTotal.inc();
|
this.promClient.repoIndexingSuccessTotal.inc();
|
||||||
|
|
||||||
|
|
@ -356,7 +357,7 @@ export class RepoManager implements IRepoManager {
|
||||||
}
|
}
|
||||||
|
|
||||||
private async onIndexJobFailed(job: Job<RepoIndexingPayload> | undefined, err: unknown) {
|
private async onIndexJobFailed(job: Job<RepoIndexingPayload> | undefined, err: unknown) {
|
||||||
this.logger.info(`Repo index job for repo ${job?.data.repo.displayName} (id: ${job?.data.repo.id}, jobId: ${job?.id}) failed with error: ${err}`);
|
logger.info(`Repo index job for repo ${job?.data.repo.displayName} (id: ${job?.data.repo.id}, jobId: ${job?.id}) failed with error: ${err}`);
|
||||||
Sentry.captureException(err, {
|
Sentry.captureException(err, {
|
||||||
tags: {
|
tags: {
|
||||||
repoId: job?.data.repo.id,
|
repoId: job?.data.repo.id,
|
||||||
|
|
@ -396,7 +397,7 @@ export class RepoManager implements IRepoManager {
|
||||||
data: { repo },
|
data: { repo },
|
||||||
})));
|
})));
|
||||||
|
|
||||||
this.logger.info(`Added ${repos.length} jobs to gcQueue`);
|
logger.info(`Added ${repos.length} jobs to gcQueue`);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -425,7 +426,7 @@ export class RepoManager implements IRepoManager {
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
if (reposWithNoConnections.length > 0) {
|
if (reposWithNoConnections.length > 0) {
|
||||||
this.logger.info(`Garbage collecting ${reposWithNoConnections.length} repos with no connections: ${reposWithNoConnections.map(repo => repo.id).join(', ')}`);
|
logger.info(`Garbage collecting ${reposWithNoConnections.length} repos with no connections: ${reposWithNoConnections.map(repo => repo.id).join(', ')}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
////////////////////////////////////
|
////////////////////////////////////
|
||||||
|
|
@ -448,7 +449,7 @@ export class RepoManager implements IRepoManager {
|
||||||
});
|
});
|
||||||
|
|
||||||
if (inactiveOrgRepos.length > 0) {
|
if (inactiveOrgRepos.length > 0) {
|
||||||
this.logger.info(`Garbage collecting ${inactiveOrgRepos.length} inactive org repos: ${inactiveOrgRepos.map(repo => repo.id).join(', ')}`);
|
logger.info(`Garbage collecting ${inactiveOrgRepos.length} inactive org repos: ${inactiveOrgRepos.map(repo => repo.id).join(', ')}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const reposToDelete = [...reposWithNoConnections, ...inactiveOrgRepos];
|
const reposToDelete = [...reposWithNoConnections, ...inactiveOrgRepos];
|
||||||
|
|
@ -458,7 +459,7 @@ export class RepoManager implements IRepoManager {
|
||||||
}
|
}
|
||||||
|
|
||||||
private async runGarbageCollectionJob(job: Job<RepoGarbageCollectionPayload>) {
|
private async runGarbageCollectionJob(job: Job<RepoGarbageCollectionPayload>) {
|
||||||
this.logger.info(`Running garbage collection job (id: ${job.id}) for repo ${job.data.repo.displayName} (id: ${job.data.repo.id})`);
|
logger.info(`Running garbage collection job (id: ${job.id}) for repo ${job.data.repo.displayName} (id: ${job.data.repo.id})`);
|
||||||
this.promClient.activeRepoGarbageCollectionJobs.inc();
|
this.promClient.activeRepoGarbageCollectionJobs.inc();
|
||||||
|
|
||||||
const repo = job.data.repo as Repo;
|
const repo = job.data.repo as Repo;
|
||||||
|
|
@ -474,7 +475,7 @@ export class RepoManager implements IRepoManager {
|
||||||
// delete cloned repo
|
// delete cloned repo
|
||||||
const { path: repoPath, isReadOnly } = getRepoPath(repo, this.ctx);
|
const { path: repoPath, isReadOnly } = getRepoPath(repo, this.ctx);
|
||||||
if (existsSync(repoPath) && !isReadOnly) {
|
if (existsSync(repoPath) && !isReadOnly) {
|
||||||
this.logger.info(`Deleting repo directory ${repoPath}`);
|
logger.info(`Deleting repo directory ${repoPath}`);
|
||||||
await promises.rm(repoPath, { recursive: true, force: true });
|
await promises.rm(repoPath, { recursive: true, force: true });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -483,13 +484,13 @@ export class RepoManager implements IRepoManager {
|
||||||
const files = readdirSync(this.ctx.indexPath).filter(file => file.startsWith(shardPrefix));
|
const files = readdirSync(this.ctx.indexPath).filter(file => file.startsWith(shardPrefix));
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
const filePath = `${this.ctx.indexPath}/${file}`;
|
const filePath = `${this.ctx.indexPath}/${file}`;
|
||||||
this.logger.info(`Deleting shard file ${filePath}`);
|
logger.info(`Deleting shard file ${filePath}`);
|
||||||
await promises.rm(filePath, { force: true });
|
await promises.rm(filePath, { force: true });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async onGarbageCollectionJobCompleted(job: Job<RepoGarbageCollectionPayload>) {
|
private async onGarbageCollectionJobCompleted(job: Job<RepoGarbageCollectionPayload>) {
|
||||||
this.logger.info(`Garbage collection job ${job.id} completed`);
|
logger.info(`Garbage collection job ${job.id} completed`);
|
||||||
this.promClient.activeRepoGarbageCollectionJobs.dec();
|
this.promClient.activeRepoGarbageCollectionJobs.dec();
|
||||||
this.promClient.repoGarbageCollectionSuccessTotal.inc();
|
this.promClient.repoGarbageCollectionSuccessTotal.inc();
|
||||||
|
|
||||||
|
|
@ -501,7 +502,7 @@ export class RepoManager implements IRepoManager {
|
||||||
}
|
}
|
||||||
|
|
||||||
private async onGarbageCollectionJobFailed(job: Job<RepoGarbageCollectionPayload> | undefined, err: unknown) {
|
private async onGarbageCollectionJobFailed(job: Job<RepoGarbageCollectionPayload> | undefined, err: unknown) {
|
||||||
this.logger.info(`Garbage collection job failed (id: ${job?.id ?? 'unknown'}) with error: ${err}`);
|
logger.info(`Garbage collection job failed (id: ${job?.id ?? 'unknown'}) with error: ${err}`);
|
||||||
Sentry.captureException(err, {
|
Sentry.captureException(err, {
|
||||||
tags: {
|
tags: {
|
||||||
repoId: job?.data.repo.id,
|
repoId: job?.data.repo.id,
|
||||||
|
|
@ -536,7 +537,7 @@ export class RepoManager implements IRepoManager {
|
||||||
});
|
});
|
||||||
|
|
||||||
if (repos.length > 0) {
|
if (repos.length > 0) {
|
||||||
this.logger.info(`Scheduling ${repos.length} repo timeouts`);
|
logger.info(`Scheduling ${repos.length} repo timeouts`);
|
||||||
await this.scheduleRepoTimeoutsBulk(repos);
|
await this.scheduleRepoTimeoutsBulk(repos);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,7 @@ import { getRepoPath } from "./utils.js";
|
||||||
import { getShardPrefix } from "./utils.js";
|
import { getShardPrefix } from "./utils.js";
|
||||||
import { getBranches, getTags } from "./git.js";
|
import { getBranches, getTags } from "./git.js";
|
||||||
import micromatch from "micromatch";
|
import micromatch from "micromatch";
|
||||||
import { createLogger } from "./logger.js";
|
import { createLogger } from "@sourcebot/logger";
|
||||||
import { captureEvent } from "./posthog.js";
|
import { captureEvent } from "./posthog.js";
|
||||||
|
|
||||||
const logger = createLogger('zoekt');
|
const logger = createLogger('zoekt');
|
||||||
|
|
|
||||||
|
|
@ -25,6 +25,7 @@
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@prisma/client": "6.2.1",
|
"@prisma/client": "6.2.1",
|
||||||
|
"@sourcebot/logger": "workspace:*",
|
||||||
"@types/readline-sync": "^1.4.8",
|
"@types/readline-sync": "^1.4.8",
|
||||||
"readline-sync": "^1.4.10"
|
"readline-sync": "^1.4.10"
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -2,6 +2,7 @@ import { PrismaClient } from "@sourcebot/db";
|
||||||
import { ArgumentParser } from "argparse";
|
import { ArgumentParser } from "argparse";
|
||||||
import { migrateDuplicateConnections } from "./scripts/migrate-duplicate-connections";
|
import { migrateDuplicateConnections } from "./scripts/migrate-duplicate-connections";
|
||||||
import { confirmAction } from "./utils";
|
import { confirmAction } from "./utils";
|
||||||
|
import { createLogger } from "@sourcebot/logger";
|
||||||
|
|
||||||
export interface Script {
|
export interface Script {
|
||||||
run: (prisma: PrismaClient) => Promise<void>;
|
run: (prisma: PrismaClient) => Promise<void>;
|
||||||
|
|
@ -16,17 +17,19 @@ parser.add_argument("--url", { required: true, help: "Database URL" });
|
||||||
parser.add_argument("--script", { required: true, help: "Script to run" });
|
parser.add_argument("--script", { required: true, help: "Script to run" });
|
||||||
const args = parser.parse_args();
|
const args = parser.parse_args();
|
||||||
|
|
||||||
|
const logger = createLogger('db-script-runner');
|
||||||
|
|
||||||
(async () => {
|
(async () => {
|
||||||
if (!(args.script in scripts)) {
|
if (!(args.script in scripts)) {
|
||||||
console.log("Invalid script");
|
logger.error("Invalid script");
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
const selectedScript = scripts[args.script];
|
const selectedScript = scripts[args.script];
|
||||||
|
|
||||||
console.log("\nTo confirm:");
|
logger.info("\nTo confirm:");
|
||||||
console.log(`- Database URL: ${args.url}`);
|
logger.info(`- Database URL: ${args.url}`);
|
||||||
console.log(`- Script: ${args.script}`);
|
logger.info(`- Script: ${args.script}`);
|
||||||
|
|
||||||
confirmAction();
|
confirmAction();
|
||||||
|
|
||||||
|
|
@ -36,7 +39,7 @@ const args = parser.parse_args();
|
||||||
|
|
||||||
await selectedScript.run(prisma);
|
await selectedScript.run(prisma);
|
||||||
|
|
||||||
console.log("\nDone.");
|
logger.info("\nDone.");
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
})();
|
})();
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,9 @@
|
||||||
import { Script } from "../scriptRunner";
|
import { Script } from "../scriptRunner";
|
||||||
import { PrismaClient } from "../../dist";
|
import { PrismaClient } from "../../dist";
|
||||||
import { confirmAction } from "../utils";
|
import { confirmAction } from "../utils";
|
||||||
|
import { createLogger } from "@sourcebot/logger";
|
||||||
|
|
||||||
|
const logger = createLogger('migrate-duplicate-connections');
|
||||||
|
|
||||||
// Handles duplicate connections by renaming them to be unique.
|
// Handles duplicate connections by renaming them to be unique.
|
||||||
// @see: 20250320215449_unique_connection_name_constraint_within_org
|
// @see: 20250320215449_unique_connection_name_constraint_within_org
|
||||||
|
|
@ -15,7 +18,7 @@ export const migrateDuplicateConnections: Script = {
|
||||||
},
|
},
|
||||||
})).filter(({ _count }) => _count._all > 1);
|
})).filter(({ _count }) => _count._all > 1);
|
||||||
|
|
||||||
console.log(`Found ${duplicates.reduce((acc, { _count }) => acc + _count._all, 0)} duplicate connections.`);
|
logger.info(`Found ${duplicates.reduce((acc, { _count }) => acc + _count._all, 0)} duplicate connections.`);
|
||||||
|
|
||||||
confirmAction();
|
confirmAction();
|
||||||
|
|
||||||
|
|
@ -37,7 +40,7 @@ export const migrateDuplicateConnections: Script = {
|
||||||
const connection = connections[i];
|
const connection = connections[i];
|
||||||
const newName = `${name}-${i + 1}`;
|
const newName = `${name}-${i + 1}`;
|
||||||
|
|
||||||
console.log(`Migrating connection with id ${connection.id} from name=${name} to name=${newName}`);
|
logger.info(`Migrating connection with id ${connection.id} from name=${name} to name=${newName}`);
|
||||||
|
|
||||||
await prisma.connection.update({
|
await prisma.connection.update({
|
||||||
where: { id: connection.id },
|
where: { id: connection.id },
|
||||||
|
|
@ -47,6 +50,6 @@ export const migrateDuplicateConnections: Script = {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`Migrated ${migrated} connections.`);
|
logger.info(`Migrated ${migrated} connections.`);
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,17 @@
|
||||||
import readline from 'readline-sync';
|
import readline from 'readline-sync';
|
||||||
|
import { createLogger } from "@sourcebot/logger";
|
||||||
|
|
||||||
|
const logger = createLogger('db-utils');
|
||||||
|
|
||||||
export const confirmAction = (message: string = "Are you sure you want to proceed? [N/y]") => {
|
export const confirmAction = (message: string = "Are you sure you want to proceed? [N/y]") => {
|
||||||
const response = readline.question(message).toLowerCase();
|
const response = readline.question(message).toLowerCase();
|
||||||
if (response !== 'y') {
|
if (response !== 'y') {
|
||||||
console.log("Aborted.");
|
logger.info("Aborted.");
|
||||||
process.exit(0);
|
process.exit(0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const abort = () => {
|
||||||
|
logger.info("Aborted.");
|
||||||
|
process.exit(0);
|
||||||
|
};
|
||||||
|
|
|
||||||
2
packages/logger/.gitignore
vendored
Normal file
2
packages/logger/.gitignore
vendored
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
dist/
|
||||||
|
*.tsbuildinfo
|
||||||
24
packages/logger/package.json
Normal file
24
packages/logger/package.json
Normal file
|
|
@ -0,0 +1,24 @@
|
||||||
|
{
|
||||||
|
"name": "@sourcebot/logger",
|
||||||
|
"version": "0.1.0",
|
||||||
|
"main": "dist/index.js",
|
||||||
|
"type": "module",
|
||||||
|
"private": true,
|
||||||
|
"scripts": {
|
||||||
|
"build": "tsc",
|
||||||
|
"postinstall": "yarn build"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@logtail/node": "^0.5.2",
|
||||||
|
"@logtail/winston": "^0.5.2",
|
||||||
|
"@t3-oss/env-core": "^0.12.0",
|
||||||
|
"dotenv": "^16.4.5",
|
||||||
|
"triple-beam": "^1.4.1",
|
||||||
|
"winston": "^3.15.0",
|
||||||
|
"zod": "^3.24.3"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/node": "^22.7.5",
|
||||||
|
"typescript": "^5.7.3"
|
||||||
|
}
|
||||||
|
}
|
||||||
28
packages/logger/src/env.ts
Normal file
28
packages/logger/src/env.ts
Normal file
|
|
@ -0,0 +1,28 @@
|
||||||
|
import { createEnv } from "@t3-oss/env-core";
|
||||||
|
import { z } from "zod";
|
||||||
|
import dotenv from 'dotenv';
|
||||||
|
|
||||||
|
// Booleans are specified as 'true' or 'false' strings.
|
||||||
|
const booleanSchema = z.enum(["true", "false"]);
|
||||||
|
|
||||||
|
dotenv.config({
|
||||||
|
path: './.env',
|
||||||
|
});
|
||||||
|
|
||||||
|
dotenv.config({
|
||||||
|
path: './.env.local',
|
||||||
|
override: true
|
||||||
|
});
|
||||||
|
|
||||||
|
export const env = createEnv({
|
||||||
|
server: {
|
||||||
|
SOURCEBOT_LOG_LEVEL: z.enum(["info", "debug", "warn", "error"]).default("info"),
|
||||||
|
SOURCEBOT_STRUCTURED_LOGGING_ENABLED: booleanSchema.default("false"),
|
||||||
|
SOURCEBOT_STRUCTURED_LOGGING_FILE: z.string().optional(),
|
||||||
|
LOGTAIL_TOKEN: z.string().optional(),
|
||||||
|
LOGTAIL_HOST: z.string().url().optional(),
|
||||||
|
},
|
||||||
|
runtimeEnv: process.env,
|
||||||
|
emptyStringAsUndefined: true,
|
||||||
|
skipValidation: process.env.SKIP_ENV_VALIDATION === "1",
|
||||||
|
});
|
||||||
87
packages/logger/src/index.ts
Normal file
87
packages/logger/src/index.ts
Normal file
|
|
@ -0,0 +1,87 @@
|
||||||
|
import winston, { format } from 'winston';
|
||||||
|
import { Logtail } from '@logtail/node';
|
||||||
|
import { LogtailTransport } from '@logtail/winston';
|
||||||
|
import { MESSAGE } from 'triple-beam';
|
||||||
|
import { env } from './env.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Logger configuration with support for structured JSON logging.
|
||||||
|
*
|
||||||
|
* When SOURCEBOT_STRUCTURED_LOGGING_ENABLED=true:
|
||||||
|
* - Console output will be in JSON format suitable for Datadog ingestion
|
||||||
|
* - Logs will include structured fields: timestamp, level, message, label, stack (if error)
|
||||||
|
*
|
||||||
|
* When SOURCEBOT_STRUCTURED_LOGGING_ENABLED=false (default):
|
||||||
|
* - Console output will be human-readable with colors
|
||||||
|
* - Logs will be formatted as: "timestamp level: [label] message"
|
||||||
|
*/
|
||||||
|
|
||||||
|
const { combine, colorize, timestamp, prettyPrint, errors, printf, label: labelFn, json } = format;
|
||||||
|
|
||||||
|
const datadogFormat = format((info) => {
|
||||||
|
info.status = info.level.toLowerCase();
|
||||||
|
info.service = info.label;
|
||||||
|
info.label = undefined;
|
||||||
|
|
||||||
|
const msg = info[MESSAGE as unknown as string] as string | undefined;
|
||||||
|
if (msg) {
|
||||||
|
info.message = msg;
|
||||||
|
info[MESSAGE as unknown as string] = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
return info;
|
||||||
|
});
|
||||||
|
|
||||||
|
const humanReadableFormat = printf(({ level, message, timestamp, stack, label: _label }) => {
|
||||||
|
const label = `[${_label}] `;
|
||||||
|
if (stack) {
|
||||||
|
return `${timestamp} ${level}: ${label}${message}\n${stack}`;
|
||||||
|
}
|
||||||
|
return `${timestamp} ${level}: ${label}${message}`;
|
||||||
|
});
|
||||||
|
|
||||||
|
const createLogger = (label: string) => {
|
||||||
|
const isStructuredLoggingEnabled = env.SOURCEBOT_STRUCTURED_LOGGING_ENABLED === 'true';
|
||||||
|
|
||||||
|
return winston.createLogger({
|
||||||
|
level: env.SOURCEBOT_LOG_LEVEL,
|
||||||
|
format: combine(
|
||||||
|
errors({ stack: true }),
|
||||||
|
timestamp(),
|
||||||
|
labelFn({ label: label })
|
||||||
|
),
|
||||||
|
transports: [
|
||||||
|
new winston.transports.Console({
|
||||||
|
format: isStructuredLoggingEnabled
|
||||||
|
? combine(
|
||||||
|
datadogFormat(),
|
||||||
|
json()
|
||||||
|
)
|
||||||
|
: combine(
|
||||||
|
colorize(),
|
||||||
|
humanReadableFormat
|
||||||
|
),
|
||||||
|
}),
|
||||||
|
...(env.SOURCEBOT_STRUCTURED_LOGGING_FILE && isStructuredLoggingEnabled ? [
|
||||||
|
new winston.transports.File({
|
||||||
|
filename: env.SOURCEBOT_STRUCTURED_LOGGING_FILE,
|
||||||
|
format: combine(
|
||||||
|
datadogFormat(),
|
||||||
|
json()
|
||||||
|
),
|
||||||
|
}),
|
||||||
|
] : []),
|
||||||
|
...(env.LOGTAIL_TOKEN && env.LOGTAIL_HOST ? [
|
||||||
|
new LogtailTransport(
|
||||||
|
new Logtail(env.LOGTAIL_TOKEN, {
|
||||||
|
endpoint: env.LOGTAIL_HOST,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
] : []),
|
||||||
|
]
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export {
|
||||||
|
createLogger
|
||||||
|
};
|
||||||
23
packages/logger/tsconfig.json
Normal file
23
packages/logger/tsconfig.json
Normal file
|
|
@ -0,0 +1,23 @@
|
||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"target": "ES2022",
|
||||||
|
"module": "Node16",
|
||||||
|
"moduleResolution": "Node16",
|
||||||
|
"lib": ["ES2023"],
|
||||||
|
"outDir": "dist",
|
||||||
|
"rootDir": "src",
|
||||||
|
"declaration": true,
|
||||||
|
"declarationMap": true,
|
||||||
|
"sourceMap": true,
|
||||||
|
"strict": true,
|
||||||
|
"noImplicitAny": true,
|
||||||
|
"strictNullChecks": true,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"forceConsistentCasingInFileNames": true,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"isolatedModules": true,
|
||||||
|
"resolveJsonModule": true
|
||||||
|
},
|
||||||
|
"include": ["src/**/*"],
|
||||||
|
"exclude": ["node_modules", "dist"]
|
||||||
|
}
|
||||||
|
|
@ -4,7 +4,7 @@ import { FileSourceRequest, FileSourceResponse, ListRepositoriesResponse, Search
|
||||||
import { isServiceError } from './utils.js';
|
import { isServiceError } from './utils.js';
|
||||||
|
|
||||||
export const search = async (request: SearchRequest): Promise<SearchResponse | ServiceError> => {
|
export const search = async (request: SearchRequest): Promise<SearchResponse | ServiceError> => {
|
||||||
console.error(`Executing search request: ${JSON.stringify(request, null, 2)}`);
|
console.debug(`Executing search request: ${JSON.stringify(request, null, 2)}`);
|
||||||
const result = await fetch(`${env.SOURCEBOT_HOST}/api/search`, {
|
const result = await fetch(`${env.SOURCEBOT_HOST}/api/search`, {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: {
|
||||||
|
|
|
||||||
|
|
@ -75,7 +75,7 @@ server.tool(
|
||||||
query += ` case:no`;
|
query += ` case:no`;
|
||||||
}
|
}
|
||||||
|
|
||||||
console.error(`Executing search request: ${query}`);
|
console.debug(`Executing search request: ${query}`);
|
||||||
|
|
||||||
const response = await search({
|
const response = await search({
|
||||||
query,
|
query,
|
||||||
|
|
@ -215,7 +215,7 @@ server.tool(
|
||||||
const runServer = async () => {
|
const runServer = async () => {
|
||||||
const transport = new StdioServerTransport();
|
const transport = new StdioServerTransport();
|
||||||
await server.connect(transport);
|
await server.connect(transport);
|
||||||
console.error('Sourcebot MCP server ready');
|
console.info('Sourcebot MCP server ready');
|
||||||
}
|
}
|
||||||
|
|
||||||
runServer().catch((error) => {
|
runServer().catch((error) => {
|
||||||
|
|
|
||||||
|
|
@ -74,6 +74,7 @@
|
||||||
"@sourcebot/crypto": "workspace:*",
|
"@sourcebot/crypto": "workspace:*",
|
||||||
"@sourcebot/db": "workspace:*",
|
"@sourcebot/db": "workspace:*",
|
||||||
"@sourcebot/error": "workspace:*",
|
"@sourcebot/error": "workspace:*",
|
||||||
|
"@sourcebot/logger": "workspace:*",
|
||||||
"@sourcebot/schemas": "workspace:*",
|
"@sourcebot/schemas": "workspace:*",
|
||||||
"@ssddanbrown/codemirror-lang-twig": "^1.0.0",
|
"@ssddanbrown/codemirror-lang-twig": "^1.0.0",
|
||||||
"@stripe/react-stripe-js": "^3.1.1",
|
"@stripe/react-stripe-js": "^3.1.1",
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,9 @@
|
||||||
// https://docs.sentry.io/platforms/javascript/guides/nextjs/
|
// https://docs.sentry.io/platforms/javascript/guides/nextjs/
|
||||||
|
|
||||||
import * as Sentry from "@sentry/nextjs";
|
import * as Sentry from "@sentry/nextjs";
|
||||||
|
import { createLogger } from "@sourcebot/logger";
|
||||||
|
|
||||||
|
const logger = createLogger('sentry-server-config');
|
||||||
|
|
||||||
if (!!process.env.NEXT_PUBLIC_SENTRY_WEBAPP_DSN && !!process.env.NEXT_PUBLIC_SENTRY_ENVIRONMENT) {
|
if (!!process.env.NEXT_PUBLIC_SENTRY_WEBAPP_DSN && !!process.env.NEXT_PUBLIC_SENTRY_ENVIRONMENT) {
|
||||||
Sentry.init({
|
Sentry.init({
|
||||||
|
|
@ -13,5 +16,5 @@ if (!!process.env.NEXT_PUBLIC_SENTRY_WEBAPP_DSN && !!process.env.NEXT_PUBLIC_SEN
|
||||||
debug: false,
|
debug: false,
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
console.debug("[server] Sentry was not initialized");
|
logger.debug("[server] Sentry was not initialized");
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -33,11 +33,14 @@ import { hasEntitlement } from "./features/entitlements/server";
|
||||||
import { getPublicAccessStatus } from "./ee/features/publicAccess/publicAccess";
|
import { getPublicAccessStatus } from "./ee/features/publicAccess/publicAccess";
|
||||||
import JoinRequestSubmittedEmail from "./emails/joinRequestSubmittedEmail";
|
import JoinRequestSubmittedEmail from "./emails/joinRequestSubmittedEmail";
|
||||||
import JoinRequestApprovedEmail from "./emails/joinRequestApprovedEmail";
|
import JoinRequestApprovedEmail from "./emails/joinRequestApprovedEmail";
|
||||||
|
import { createLogger } from "@sourcebot/logger";
|
||||||
|
|
||||||
const ajv = new Ajv({
|
const ajv = new Ajv({
|
||||||
validateFormats: false,
|
validateFormats: false,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const logger = createLogger('web-actions');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* "Service Error Wrapper".
|
* "Service Error Wrapper".
|
||||||
*
|
*
|
||||||
|
|
@ -49,7 +52,7 @@ export const sew = async <T>(fn: () => Promise<T>): Promise<T | ServiceError> =>
|
||||||
return await fn();
|
return await fn();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
Sentry.captureException(e);
|
Sentry.captureException(e);
|
||||||
console.error(e);
|
logger.error(e);
|
||||||
return unexpectedError(`An unexpected error occurred. Please try again later.`);
|
return unexpectedError(`An unexpected error occurred. Please try again later.`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -64,7 +67,7 @@ export const withAuth = async <T>(fn: (userId: string) => Promise<T>, allowSingl
|
||||||
if (apiKey) {
|
if (apiKey) {
|
||||||
const apiKeyOrError = await verifyApiKey(apiKey);
|
const apiKeyOrError = await verifyApiKey(apiKey);
|
||||||
if (isServiceError(apiKeyOrError)) {
|
if (isServiceError(apiKeyOrError)) {
|
||||||
console.error(`Invalid API key: ${JSON.stringify(apiKey)}. Error: ${JSON.stringify(apiKeyOrError)}`);
|
logger.error(`Invalid API key: ${JSON.stringify(apiKey)}. Error: ${JSON.stringify(apiKeyOrError)}`);
|
||||||
return notAuthenticated();
|
return notAuthenticated();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -75,7 +78,7 @@ export const withAuth = async <T>(fn: (userId: string) => Promise<T>, allowSingl
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!user) {
|
if (!user) {
|
||||||
console.error(`No user found for API key: ${apiKey}`);
|
logger.error(`No user found for API key: ${apiKey}`);
|
||||||
return notAuthenticated();
|
return notAuthenticated();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -97,7 +100,7 @@ export const withAuth = async <T>(fn: (userId: string) => Promise<T>, allowSingl
|
||||||
) {
|
) {
|
||||||
if (!hasEntitlement("public-access")) {
|
if (!hasEntitlement("public-access")) {
|
||||||
const plan = getPlan();
|
const plan = getPlan();
|
||||||
console.error(`Public access isn't supported in your current plan: ${plan}. If you have a valid enterprise license key, pass it via SOURCEBOT_EE_LICENSE_KEY. For support, contact ${SOURCEBOT_SUPPORT_EMAIL}.`);
|
logger.error(`Public access isn't supported in your current plan: ${plan}. If you have a valid enterprise license key, pass it via SOURCEBOT_EE_LICENSE_KEY. For support, contact ${SOURCEBOT_SUPPORT_EMAIL}.`);
|
||||||
return notAuthenticated();
|
return notAuthenticated();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1011,11 +1014,11 @@ export const createInvites = async (emails: string[], domain: string): Promise<{
|
||||||
|
|
||||||
const failed = result.rejected.concat(result.pending).filter(Boolean);
|
const failed = result.rejected.concat(result.pending).filter(Boolean);
|
||||||
if (failed.length > 0) {
|
if (failed.length > 0) {
|
||||||
console.error(`Failed to send invite email to ${email}: ${failed}`);
|
logger.error(`Failed to send invite email to ${email}: ${failed}`);
|
||||||
}
|
}
|
||||||
}));
|
}));
|
||||||
} else {
|
} else {
|
||||||
console.warn(`SMTP_CONNECTION_URL or EMAIL_FROM_ADDRESS not set. Skipping invite email to ${emails.join(", ")}`);
|
logger.warn(`SMTP_CONNECTION_URL or EMAIL_FROM_ADDRESS not set. Skipping invite email to ${emails.join(", ")}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
|
@ -1457,7 +1460,7 @@ export const createAccountRequest = async (userId: string, domain: string) => se
|
||||||
}
|
}
|
||||||
|
|
||||||
if (user.pendingApproval == false) {
|
if (user.pendingApproval == false) {
|
||||||
console.warn(`User ${userId} isn't pending approval. Skipping account request creation.`);
|
logger.warn(`User ${userId} isn't pending approval. Skipping account request creation.`);
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
existingRequest: false,
|
existingRequest: false,
|
||||||
|
|
@ -1484,7 +1487,7 @@ export const createAccountRequest = async (userId: string, domain: string) => se
|
||||||
});
|
});
|
||||||
|
|
||||||
if (existingRequest) {
|
if (existingRequest) {
|
||||||
console.warn(`User ${userId} already has an account request for org ${org.id}. Skipping account request creation.`);
|
logger.warn(`User ${userId} already has an account request for org ${org.id}. Skipping account request creation.`);
|
||||||
return {
|
return {
|
||||||
success: true,
|
success: true,
|
||||||
existingRequest: true,
|
existingRequest: true,
|
||||||
|
|
@ -1516,7 +1519,7 @@ export const createAccountRequest = async (userId: string, domain: string) => se
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!owner) {
|
if (!owner) {
|
||||||
console.error(`Failed to find owner for org ${org.id} when drafting email for account request from ${userId}`);
|
logger.error(`Failed to find owner for org ${org.id} when drafting email for account request from ${userId}`);
|
||||||
} else {
|
} else {
|
||||||
const html = await render(JoinRequestSubmittedEmail({
|
const html = await render(JoinRequestSubmittedEmail({
|
||||||
baseUrl: deploymentUrl,
|
baseUrl: deploymentUrl,
|
||||||
|
|
@ -1541,11 +1544,11 @@ export const createAccountRequest = async (userId: string, domain: string) => se
|
||||||
|
|
||||||
const failed = result.rejected.concat(result.pending).filter(Boolean);
|
const failed = result.rejected.concat(result.pending).filter(Boolean);
|
||||||
if (failed.length > 0) {
|
if (failed.length > 0) {
|
||||||
console.error(`Failed to send account request email to ${owner.email}: ${failed}`);
|
logger.error(`Failed to send account request email to ${owner.email}: ${failed}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
console.warn(`SMTP_CONNECTION_URL or EMAIL_FROM_ADDRESS not set. Skipping account request email to owner`);
|
logger.warn(`SMTP_CONNECTION_URL or EMAIL_FROM_ADDRESS not set. Skipping account request email to owner`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1612,7 +1615,7 @@ export const approveAccountRequest = async (requestId: string, domain: string) =
|
||||||
})
|
})
|
||||||
|
|
||||||
for (const invite of invites) {
|
for (const invite of invites) {
|
||||||
console.log(`Account request approved. Deleting invite ${invite.id} for ${request.requestedBy.email}`);
|
logger.info(`Account request approved. Deleting invite ${invite.id} for ${request.requestedBy.email}`);
|
||||||
await tx.invite.delete({
|
await tx.invite.delete({
|
||||||
where: {
|
where: {
|
||||||
id: invite.id,
|
id: invite.id,
|
||||||
|
|
@ -1651,10 +1654,10 @@ export const approveAccountRequest = async (requestId: string, domain: string) =
|
||||||
|
|
||||||
const failed = result.rejected.concat(result.pending).filter(Boolean);
|
const failed = result.rejected.concat(result.pending).filter(Boolean);
|
||||||
if (failed.length > 0) {
|
if (failed.length > 0) {
|
||||||
console.error(`Failed to send approval email to ${request.requestedBy.email}: ${failed}`);
|
logger.error(`Failed to send approval email to ${request.requestedBy.email}: ${failed}`);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
console.warn(`SMTP_CONNECTION_URL or EMAIL_FROM_ADDRESS not set. Skipping approval email to ${request.requestedBy.email}`);
|
logger.warn(`SMTP_CONNECTION_URL or EMAIL_FROM_ADDRESS not set. Skipping approval email to ${request.requestedBy.email}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,11 @@
|
||||||
'use server';
|
'use server';
|
||||||
|
|
||||||
export const GET = async () => {
|
import { createLogger } from "@sourcebot/logger";
|
||||||
console.log('health check');
|
|
||||||
|
const logger = createLogger('health-check');
|
||||||
|
|
||||||
|
export async function GET() {
|
||||||
|
logger.info('health check');
|
||||||
return Response.json({ status: 'ok' });
|
return Response.json({ status: 'ok' });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,9 @@ import { prisma } from '@/prisma';
|
||||||
import { ConnectionSyncStatus, StripeSubscriptionStatus } from '@sourcebot/db';
|
import { ConnectionSyncStatus, StripeSubscriptionStatus } from '@sourcebot/db';
|
||||||
import { stripeClient } from '@/ee/features/billing/stripe';
|
import { stripeClient } from '@/ee/features/billing/stripe';
|
||||||
import { env } from '@/env.mjs';
|
import { env } from '@/env.mjs';
|
||||||
|
import { createLogger } from "@sourcebot/logger";
|
||||||
|
|
||||||
|
const logger = createLogger('stripe-webhook');
|
||||||
|
|
||||||
export async function POST(req: NextRequest) {
|
export async function POST(req: NextRequest) {
|
||||||
const body = await req.text();
|
const body = await req.text();
|
||||||
|
|
@ -52,7 +55,7 @@ export async function POST(req: NextRequest) {
|
||||||
stripeLastUpdatedAt: new Date()
|
stripeLastUpdatedAt: new Date()
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
console.log(`Org ${org.id} subscription status updated to INACTIVE`);
|
logger.info(`Org ${org.id} subscription status updated to INACTIVE`);
|
||||||
|
|
||||||
return new Response(JSON.stringify({ received: true }), {
|
return new Response(JSON.stringify({ received: true }), {
|
||||||
status: 200
|
status: 200
|
||||||
|
|
@ -80,7 +83,7 @@ export async function POST(req: NextRequest) {
|
||||||
stripeLastUpdatedAt: new Date()
|
stripeLastUpdatedAt: new Date()
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
console.log(`Org ${org.id} subscription status updated to ACTIVE`);
|
logger.info(`Org ${org.id} subscription status updated to ACTIVE`);
|
||||||
|
|
||||||
// mark all of this org's connections for sync, since their repos may have been previously garbage collected
|
// mark all of this org's connections for sync, since their repos may have been previously garbage collected
|
||||||
await prisma.connection.updateMany({
|
await prisma.connection.updateMany({
|
||||||
|
|
@ -96,14 +99,14 @@ export async function POST(req: NextRequest) {
|
||||||
status: 200
|
status: 200
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
console.log(`Received unknown event type: ${event.type}`);
|
logger.info(`Received unknown event type: ${event.type}`);
|
||||||
return new Response(JSON.stringify({ received: true }), {
|
return new Response(JSON.stringify({ received: true }), {
|
||||||
status: 202
|
status: 202
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error('Error processing webhook:', err);
|
logger.error('Error processing webhook:', err);
|
||||||
return new Response(
|
return new Response(
|
||||||
'Webhook error: ' + (err as Error).message,
|
'Webhook error: ' + (err as Error).message,
|
||||||
{ status: 400 }
|
{ status: 400 }
|
||||||
|
|
|
||||||
|
|
@ -9,6 +9,9 @@ import { processGitHubPullRequest } from "@/features/agents/review-agent/app";
|
||||||
import { throttling } from "@octokit/plugin-throttling";
|
import { throttling } from "@octokit/plugin-throttling";
|
||||||
import fs from "fs";
|
import fs from "fs";
|
||||||
import { GitHubPullRequest } from "@/features/agents/review-agent/types";
|
import { GitHubPullRequest } from "@/features/agents/review-agent/types";
|
||||||
|
import { createLogger } from "@sourcebot/logger";
|
||||||
|
|
||||||
|
const logger = createLogger('github-webhook');
|
||||||
|
|
||||||
let githubApp: App | undefined;
|
let githubApp: App | undefined;
|
||||||
if (env.GITHUB_APP_ID && env.GITHUB_APP_WEBHOOK_SECRET && env.GITHUB_APP_PRIVATE_KEY_PATH) {
|
if (env.GITHUB_APP_ID && env.GITHUB_APP_WEBHOOK_SECRET && env.GITHUB_APP_PRIVATE_KEY_PATH) {
|
||||||
|
|
@ -26,7 +29,7 @@ if (env.GITHUB_APP_ID && env.GITHUB_APP_WEBHOOK_SECRET && env.GITHUB_APP_PRIVATE
|
||||||
throttle: {
|
throttle: {
|
||||||
onRateLimit: (retryAfter: number, options: Required<EndpointDefaults>, octokit: Octokit, retryCount: number) => {
|
onRateLimit: (retryAfter: number, options: Required<EndpointDefaults>, octokit: Octokit, retryCount: number) => {
|
||||||
if (retryCount > 3) {
|
if (retryCount > 3) {
|
||||||
console.log(`Rate limit exceeded: ${retryAfter} seconds`);
|
logger.warn(`Rate limit exceeded: ${retryAfter} seconds`);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -35,7 +38,7 @@ if (env.GITHUB_APP_ID && env.GITHUB_APP_WEBHOOK_SECRET && env.GITHUB_APP_PRIVATE
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Error initializing GitHub app: ${error}`);
|
logger.error(`Error initializing GitHub app: ${error}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -53,21 +56,21 @@ export const POST = async (request: NextRequest) => {
|
||||||
|
|
||||||
const githubEvent = headers['x-github-event'] || headers['X-GitHub-Event'];
|
const githubEvent = headers['x-github-event'] || headers['X-GitHub-Event'];
|
||||||
if (githubEvent) {
|
if (githubEvent) {
|
||||||
console.log('GitHub event received:', githubEvent);
|
logger.info('GitHub event received:', githubEvent);
|
||||||
|
|
||||||
if (!githubApp) {
|
if (!githubApp) {
|
||||||
console.warn('Received GitHub webhook event but GitHub app env vars are not set');
|
logger.warn('Received GitHub webhook event but GitHub app env vars are not set');
|
||||||
return Response.json({ status: 'ok' });
|
return Response.json({ status: 'ok' });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (isPullRequestEvent(githubEvent, body)) {
|
if (isPullRequestEvent(githubEvent, body)) {
|
||||||
if (env.REVIEW_AGENT_AUTO_REVIEW_ENABLED === "false") {
|
if (env.REVIEW_AGENT_AUTO_REVIEW_ENABLED === "false") {
|
||||||
console.log('Review agent auto review (REVIEW_AGENT_AUTO_REVIEW_ENABLED) is disabled, skipping');
|
logger.info('Review agent auto review (REVIEW_AGENT_AUTO_REVIEW_ENABLED) is disabled, skipping');
|
||||||
return Response.json({ status: 'ok' });
|
return Response.json({ status: 'ok' });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!body.installation) {
|
if (!body.installation) {
|
||||||
console.error('Received github pull request event but installation is not present');
|
logger.error('Received github pull request event but installation is not present');
|
||||||
return Response.json({ status: 'ok' });
|
return Response.json({ status: 'ok' });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -81,15 +84,15 @@ export const POST = async (request: NextRequest) => {
|
||||||
if (isIssueCommentEvent(githubEvent, body)) {
|
if (isIssueCommentEvent(githubEvent, body)) {
|
||||||
const comment = body.comment.body;
|
const comment = body.comment.body;
|
||||||
if (!comment) {
|
if (!comment) {
|
||||||
console.warn('Received issue comment event but comment body is empty');
|
logger.warn('Received issue comment event but comment body is empty');
|
||||||
return Response.json({ status: 'ok' });
|
return Response.json({ status: 'ok' });
|
||||||
}
|
}
|
||||||
|
|
||||||
if (comment === `/${env.REVIEW_AGENT_REVIEW_COMMAND}`) {
|
if (comment === `/${env.REVIEW_AGENT_REVIEW_COMMAND}`) {
|
||||||
console.log('Review agent review command received, processing');
|
logger.info('Review agent review command received, processing');
|
||||||
|
|
||||||
if (!body.installation) {
|
if (!body.installation) {
|
||||||
console.error('Received github issue comment event but installation is not present');
|
logger.error('Received github issue comment event but installation is not present');
|
||||||
return Response.json({ status: 'ok' });
|
return Response.json({ status: 'ok' });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,9 @@ import { LoginForm } from "./components/loginForm";
|
||||||
import { redirect } from "next/navigation";
|
import { redirect } from "next/navigation";
|
||||||
import { getProviders } from "@/auth";
|
import { getProviders } from "@/auth";
|
||||||
import { Footer } from "@/app/components/footer";
|
import { Footer } from "@/app/components/footer";
|
||||||
|
import { createLogger } from "@sourcebot/logger";
|
||||||
|
|
||||||
|
const logger = createLogger('login-page');
|
||||||
|
|
||||||
interface LoginProps {
|
interface LoginProps {
|
||||||
searchParams: {
|
searchParams: {
|
||||||
|
|
@ -12,10 +15,10 @@ interface LoginProps {
|
||||||
}
|
}
|
||||||
|
|
||||||
export default async function Login({ searchParams }: LoginProps) {
|
export default async function Login({ searchParams }: LoginProps) {
|
||||||
console.log("Login page loaded");
|
logger.info("Login page loaded");
|
||||||
const session = await auth();
|
const session = await auth();
|
||||||
if (session) {
|
if (session) {
|
||||||
console.log("Session found in login page, redirecting to home");
|
logger.info("Session found in login page, redirecting to home");
|
||||||
return redirect("/");
|
return redirect("/");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -19,6 +19,7 @@ import { getSSOProviders, handleJITProvisioning } from '@/ee/sso/sso';
|
||||||
import { hasEntitlement } from '@/features/entitlements/server';
|
import { hasEntitlement } from '@/features/entitlements/server';
|
||||||
import { isServiceError } from './lib/utils';
|
import { isServiceError } from './lib/utils';
|
||||||
import { ServiceErrorException } from './lib/serviceError';
|
import { ServiceErrorException } from './lib/serviceError';
|
||||||
|
import { createLogger } from "@sourcebot/logger";
|
||||||
|
|
||||||
export const runtime = 'nodejs';
|
export const runtime = 'nodejs';
|
||||||
|
|
||||||
|
|
@ -36,6 +37,8 @@ declare module 'next-auth/jwt' {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const logger = createLogger('web-auth');
|
||||||
|
|
||||||
export const getProviders = () => {
|
export const getProviders = () => {
|
||||||
const providers: Provider[] = [];
|
const providers: Provider[] = [];
|
||||||
|
|
||||||
|
|
@ -202,13 +205,13 @@ const onCreateUser = async ({ user }: { user: AuthJsUser }) => {
|
||||||
if (env.AUTH_EE_ENABLE_JIT_PROVISIONING === 'true' && hasEntitlement("sso")) {
|
if (env.AUTH_EE_ENABLE_JIT_PROVISIONING === 'true' && hasEntitlement("sso")) {
|
||||||
const res = await handleJITProvisioning(user.id!, SINGLE_TENANT_ORG_DOMAIN);
|
const res = await handleJITProvisioning(user.id!, SINGLE_TENANT_ORG_DOMAIN);
|
||||||
if (isServiceError(res)) {
|
if (isServiceError(res)) {
|
||||||
console.error(`Failed to provision user ${user.id} for org ${SINGLE_TENANT_ORG_DOMAIN}: ${res.message}`);
|
logger.error(`Failed to provision user ${user.id} for org ${SINGLE_TENANT_ORG_DOMAIN}: ${res.message}`);
|
||||||
throw new ServiceErrorException(res);
|
throw new ServiceErrorException(res);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
const res = await createAccountRequest(user.id!, SINGLE_TENANT_ORG_DOMAIN);
|
const res = await createAccountRequest(user.id!, SINGLE_TENANT_ORG_DOMAIN);
|
||||||
if (isServiceError(res)) {
|
if (isServiceError(res)) {
|
||||||
console.error(`Failed to provision user ${user.id} for org ${SINGLE_TENANT_ORG_DOMAIN}: ${res.message}`);
|
logger.error(`Failed to provision user ${user.id} for org ${SINGLE_TENANT_ORG_DOMAIN}: ${res.message}`);
|
||||||
throw new ServiceErrorException(res);
|
throw new ServiceErrorException(res);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,9 @@ import { StatusCodes } from "http-status-codes";
|
||||||
import { ErrorCode } from "@/lib/errorCodes";
|
import { ErrorCode } from "@/lib/errorCodes";
|
||||||
import { headers } from "next/headers";
|
import { headers } from "next/headers";
|
||||||
import { getSubscriptionForOrg } from "./serverUtils";
|
import { getSubscriptionForOrg } from "./serverUtils";
|
||||||
|
import { createLogger } from "@sourcebot/logger";
|
||||||
|
|
||||||
|
const logger = createLogger('billing-actions');
|
||||||
|
|
||||||
export const createOnboardingSubscription = async (domain: string) => sew(() =>
|
export const createOnboardingSubscription = async (domain: string) => sew(() =>
|
||||||
withAuth(async (userId) =>
|
withAuth(async (userId) =>
|
||||||
|
|
@ -98,7 +101,7 @@ export const createOnboardingSubscription = async (domain: string) => sew(() =>
|
||||||
subscriptionId: subscription.id,
|
subscriptionId: subscription.id,
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
console.error(e);
|
logger.error(e);
|
||||||
return {
|
return {
|
||||||
statusCode: StatusCodes.INTERNAL_SERVER_ERROR,
|
statusCode: StatusCodes.INTERNAL_SERVER_ERROR,
|
||||||
errorCode: ErrorCode.STRIPE_CHECKOUT_ERROR,
|
errorCode: ErrorCode.STRIPE_CHECKOUT_ERROR,
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,9 @@ import { SINGLE_TENANT_ORG_ID, SOURCEBOT_SUPPORT_EMAIL } from "@/lib/constants";
|
||||||
import { prisma } from "@/prisma";
|
import { prisma } from "@/prisma";
|
||||||
import { SearchContext } from "@sourcebot/schemas/v3/index.type";
|
import { SearchContext } from "@sourcebot/schemas/v3/index.type";
|
||||||
import micromatch from "micromatch";
|
import micromatch from "micromatch";
|
||||||
|
import { createLogger } from "@sourcebot/logger";
|
||||||
|
|
||||||
|
const logger = createLogger('sync-search-contexts');
|
||||||
|
|
||||||
export const syncSearchContexts = async (contexts?: { [key: string]: SearchContext }) => {
|
export const syncSearchContexts = async (contexts?: { [key: string]: SearchContext }) => {
|
||||||
if (env.SOURCEBOT_TENANCY_MODE !== 'single') {
|
if (env.SOURCEBOT_TENANCY_MODE !== 'single') {
|
||||||
|
|
@ -13,7 +16,7 @@ export const syncSearchContexts = async (contexts?: { [key: string]: SearchConte
|
||||||
if (!hasEntitlement("search-contexts")) {
|
if (!hasEntitlement("search-contexts")) {
|
||||||
if (contexts) {
|
if (contexts) {
|
||||||
const plan = getPlan();
|
const plan = getPlan();
|
||||||
console.error(`Search contexts are not supported in your current plan: ${plan}. If you have a valid enterprise license key, pass it via SOURCEBOT_EE_LICENSE_KEY. For support, contact ${SOURCEBOT_SUPPORT_EMAIL}.`);
|
logger.error(`Search contexts are not supported in your current plan: ${plan}. If you have a valid enterprise license key, pass it via SOURCEBOT_EE_LICENSE_KEY. For support, contact ${SOURCEBOT_SUPPORT_EMAIL}.`);
|
||||||
}
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
@ -101,7 +104,7 @@ export const syncSearchContexts = async (contexts?: { [key: string]: SearchConte
|
||||||
});
|
});
|
||||||
|
|
||||||
for (const context of deletedContexts) {
|
for (const context of deletedContexts) {
|
||||||
console.log(`Deleting search context with name '${context.name}'. ID: ${context.id}`);
|
logger.info(`Deleting search context with name '${context.name}'. ID: ${context.id}`);
|
||||||
await prisma.searchContext.delete({
|
await prisma.searchContext.delete({
|
||||||
where: {
|
where: {
|
||||||
id: context.id,
|
id: context.id,
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,7 @@ import { env } from "@/env.mjs";
|
||||||
import { GitHubPullRequest } from "@/features/agents/review-agent/types";
|
import { GitHubPullRequest } from "@/features/agents/review-agent/types";
|
||||||
import path from "path";
|
import path from "path";
|
||||||
import fs from "fs";
|
import fs from "fs";
|
||||||
|
import { createLogger } from "@sourcebot/logger";
|
||||||
|
|
||||||
const rules = [
|
const rules = [
|
||||||
"Do NOT provide general feedback, summaries, explanations of changes, or praises for making good additions.",
|
"Do NOT provide general feedback, summaries, explanations of changes, or praises for making good additions.",
|
||||||
|
|
@ -17,11 +18,13 @@ const rules = [
|
||||||
"If there are no issues found on a line range, do NOT respond with any comments. This includes comments such as \"No issues found\" or \"LGTM\"."
|
"If there are no issues found on a line range, do NOT respond with any comments. This includes comments such as \"No issues found\" or \"LGTM\"."
|
||||||
]
|
]
|
||||||
|
|
||||||
|
const logger = createLogger('review-agent');
|
||||||
|
|
||||||
export async function processGitHubPullRequest(octokit: Octokit, pullRequest: GitHubPullRequest) {
|
export async function processGitHubPullRequest(octokit: Octokit, pullRequest: GitHubPullRequest) {
|
||||||
console.log(`Received a pull request event for #${pullRequest.number}`);
|
logger.info(`Received a pull request event for #${pullRequest.number}`);
|
||||||
|
|
||||||
if (!env.OPENAI_API_KEY) {
|
if (!env.OPENAI_API_KEY) {
|
||||||
console.error("OPENAI_API_KEY is not set, skipping review agent");
|
logger.error("OPENAI_API_KEY is not set, skipping review agent");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -42,7 +45,7 @@ export async function processGitHubPullRequest(octokit: Octokit, pullRequest: Gi
|
||||||
hour12: false
|
hour12: false
|
||||||
}).replace(/(\d+)\/(\d+)\/(\d+), (\d+):(\d+):(\d+)/, '$3_$1_$2_$4_$5_$6');
|
}).replace(/(\d+)\/(\d+)\/(\d+), (\d+):(\d+):(\d+)/, '$3_$1_$2_$4_$5_$6');
|
||||||
reviewAgentLogPath = path.join(reviewAgentLogDir, `review-agent-${pullRequest.number}-${timestamp}.log`);
|
reviewAgentLogPath = path.join(reviewAgentLogDir, `review-agent-${pullRequest.number}-${timestamp}.log`);
|
||||||
console.log(`Review agent logging to ${reviewAgentLogPath}`);
|
logger.info(`Review agent logging to ${reviewAgentLogPath}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const prPayload = await githubPrParser(octokit, pullRequest);
|
const prPayload = await githubPrParser(octokit, pullRequest);
|
||||||
|
|
|
||||||
|
|
@ -4,17 +4,19 @@ import { fileSourceResponseSchema } from "@/features/search/schemas";
|
||||||
import { base64Decode } from "@/lib/utils";
|
import { base64Decode } from "@/lib/utils";
|
||||||
import { isServiceError } from "@/lib/utils";
|
import { isServiceError } from "@/lib/utils";
|
||||||
import { env } from "@/env.mjs";
|
import { env } from "@/env.mjs";
|
||||||
|
import { createLogger } from "@sourcebot/logger";
|
||||||
|
|
||||||
|
const logger = createLogger('fetch-file-content');
|
||||||
|
|
||||||
export const fetchFileContent = async (pr_payload: sourcebot_pr_payload, filename: string): Promise<sourcebot_context> => {
|
export const fetchFileContent = async (pr_payload: sourcebot_pr_payload, filename: string): Promise<sourcebot_context> => {
|
||||||
console.log("Executing fetch_file_content");
|
logger.debug("Executing fetch_file_content");
|
||||||
|
|
||||||
const repoPath = pr_payload.hostDomain + "/" + pr_payload.owner + "/" + pr_payload.repo;
|
const repoPath = pr_payload.hostDomain + "/" + pr_payload.owner + "/" + pr_payload.repo;
|
||||||
const fileSourceRequest = {
|
const fileSourceRequest = {
|
||||||
fileName: filename,
|
fileName: filename,
|
||||||
repository: repoPath,
|
repository: repoPath,
|
||||||
}
|
}
|
||||||
console.log(JSON.stringify(fileSourceRequest, null, 2));
|
logger.debug(JSON.stringify(fileSourceRequest, null, 2));
|
||||||
|
|
||||||
const response = await getFileSource(fileSourceRequest, "~", env.REVIEW_AGENT_API_KEY);
|
const response = await getFileSource(fileSourceRequest, "~", env.REVIEW_AGENT_API_KEY);
|
||||||
if (isServiceError(response)) {
|
if (isServiceError(response)) {
|
||||||
|
|
@ -30,6 +32,6 @@ export const fetchFileContent = async (pr_payload: sourcebot_pr_payload, filenam
|
||||||
context: fileContent,
|
context: fileContent,
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log("Completed fetch_file_content");
|
logger.debug("Completed fetch_file_content");
|
||||||
return fileContentContext;
|
return fileContentContext;
|
||||||
}
|
}
|
||||||
|
|
@ -1,8 +1,11 @@
|
||||||
import { sourcebot_diff, sourcebot_context, sourcebot_file_diff_review_schema } from "@/features/agents/review-agent/types";
|
import { sourcebot_diff, sourcebot_context, sourcebot_file_diff_review_schema } from "@/features/agents/review-agent/types";
|
||||||
import { zodToJsonSchema } from "zod-to-json-schema";
|
import { zodToJsonSchema } from "zod-to-json-schema";
|
||||||
|
import { createLogger } from "@sourcebot/logger";
|
||||||
|
|
||||||
|
const logger = createLogger('generate-diff-review-prompt');
|
||||||
|
|
||||||
export const generateDiffReviewPrompt = async (diff: sourcebot_diff, context: sourcebot_context[], rules: string[]) => {
|
export const generateDiffReviewPrompt = async (diff: sourcebot_diff, context: sourcebot_context[], rules: string[]) => {
|
||||||
console.log("Executing generate_diff_review_prompt");
|
logger.debug("Executing generate_diff_review_prompt");
|
||||||
|
|
||||||
const prompt = `
|
const prompt = `
|
||||||
You are an expert software engineer that excells at reviewing code changes. Given the input, additional context, and rules defined below, review the code changes and provide a detailed review. The review you provide
|
You are an expert software engineer that excells at reviewing code changes. Given the input, additional context, and rules defined below, review the code changes and provide a detailed review. The review you provide
|
||||||
|
|
@ -39,6 +42,6 @@ export const generateDiffReviewPrompt = async (diff: sourcebot_diff, context: so
|
||||||
${JSON.stringify(zodToJsonSchema(sourcebot_file_diff_review_schema), null, 2)}
|
${JSON.stringify(zodToJsonSchema(sourcebot_file_diff_review_schema), null, 2)}
|
||||||
`;
|
`;
|
||||||
|
|
||||||
console.log("Completed generate_diff_review_prompt");
|
logger.debug("Completed generate_diff_review_prompt");
|
||||||
return prompt;
|
return prompt;
|
||||||
}
|
}
|
||||||
|
|
@ -2,9 +2,12 @@ import { sourcebot_pr_payload, sourcebot_diff_review, sourcebot_file_diff_review
|
||||||
import { generateDiffReviewPrompt } from "@/features/agents/review-agent/nodes/generateDiffReviewPrompt";
|
import { generateDiffReviewPrompt } from "@/features/agents/review-agent/nodes/generateDiffReviewPrompt";
|
||||||
import { invokeDiffReviewLlm } from "@/features/agents/review-agent/nodes/invokeDiffReviewLlm";
|
import { invokeDiffReviewLlm } from "@/features/agents/review-agent/nodes/invokeDiffReviewLlm";
|
||||||
import { fetchFileContent } from "@/features/agents/review-agent/nodes/fetchFileContent";
|
import { fetchFileContent } from "@/features/agents/review-agent/nodes/fetchFileContent";
|
||||||
|
import { createLogger } from "@sourcebot/logger";
|
||||||
|
|
||||||
|
const logger = createLogger('generate-pr-review');
|
||||||
|
|
||||||
export const generatePrReviews = async (reviewAgentLogPath: string | undefined, pr_payload: sourcebot_pr_payload, rules: string[]): Promise<sourcebot_file_diff_review[]> => {
|
export const generatePrReviews = async (reviewAgentLogPath: string | undefined, pr_payload: sourcebot_pr_payload, rules: string[]): Promise<sourcebot_file_diff_review[]> => {
|
||||||
console.log("Executing generate_pr_reviews");
|
logger.debug("Executing generate_pr_reviews");
|
||||||
|
|
||||||
const file_diff_reviews: sourcebot_file_diff_review[] = [];
|
const file_diff_reviews: sourcebot_file_diff_review[] = [];
|
||||||
for (const file_diff of pr_payload.file_diffs) {
|
for (const file_diff of pr_payload.file_diffs) {
|
||||||
|
|
@ -32,7 +35,7 @@ export const generatePrReviews = async (reviewAgentLogPath: string | undefined,
|
||||||
const diffReview = await invokeDiffReviewLlm(reviewAgentLogPath, prompt);
|
const diffReview = await invokeDiffReviewLlm(reviewAgentLogPath, prompt);
|
||||||
reviews.push(...diffReview.reviews);
|
reviews.push(...diffReview.reviews);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Error generating review for ${file_diff.to}: ${error}`);
|
logger.error(`Error generating review for ${file_diff.to}: ${error}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -44,6 +47,6 @@ export const generatePrReviews = async (reviewAgentLogPath: string | undefined,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log("Completed generate_pr_reviews");
|
logger.debug("Completed generate_pr_reviews");
|
||||||
return file_diff_reviews;
|
return file_diff_reviews;
|
||||||
}
|
}
|
||||||
|
|
@ -2,22 +2,25 @@ import { sourcebot_pr_payload, sourcebot_file_diff, sourcebot_diff } from "@/fea
|
||||||
import parse from "parse-diff";
|
import parse from "parse-diff";
|
||||||
import { Octokit } from "octokit";
|
import { Octokit } from "octokit";
|
||||||
import { GitHubPullRequest } from "@/features/agents/review-agent/types";
|
import { GitHubPullRequest } from "@/features/agents/review-agent/types";
|
||||||
|
import { createLogger } from "@sourcebot/logger";
|
||||||
|
|
||||||
|
const logger = createLogger('github-pr-parser');
|
||||||
|
|
||||||
export const githubPrParser = async (octokit: Octokit, pullRequest: GitHubPullRequest): Promise<sourcebot_pr_payload> => {
|
export const githubPrParser = async (octokit: Octokit, pullRequest: GitHubPullRequest): Promise<sourcebot_pr_payload> => {
|
||||||
console.log("Executing github_pr_parser");
|
logger.debug("Executing github_pr_parser");
|
||||||
|
|
||||||
let parsedDiff: parse.File[] = [];
|
let parsedDiff: parse.File[] = [];
|
||||||
try {
|
try {
|
||||||
const diff = await octokit.request(pullRequest.diff_url);
|
const diff = await octokit.request(pullRequest.diff_url);
|
||||||
parsedDiff = parse(diff.data);
|
parsedDiff = parse(diff.data);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error("Error fetching diff: ", error);
|
logger.error("Error fetching diff: ", error);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
|
|
||||||
const sourcebotFileDiffs: (sourcebot_file_diff | null)[] = parsedDiff.map((file) => {
|
const sourcebotFileDiffs: (sourcebot_file_diff | null)[] = parsedDiff.map((file) => {
|
||||||
if (!file.from || !file.to) {
|
if (!file.from || !file.to) {
|
||||||
console.log(`Skipping file due to missing from (${file.from}) or to (${file.to})`)
|
logger.debug(`Skipping file due to missing from (${file.from}) or to (${file.to})`)
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -50,7 +53,7 @@ export const githubPrParser = async (octokit: Octokit, pullRequest: GitHubPullRe
|
||||||
});
|
});
|
||||||
const filteredSourcebotFileDiffs: sourcebot_file_diff[] = sourcebotFileDiffs.filter((file) => file !== null) as sourcebot_file_diff[];
|
const filteredSourcebotFileDiffs: sourcebot_file_diff[] = sourcebotFileDiffs.filter((file) => file !== null) as sourcebot_file_diff[];
|
||||||
|
|
||||||
console.log("Completed github_pr_parser");
|
logger.debug("Completed github_pr_parser");
|
||||||
return {
|
return {
|
||||||
title: pullRequest.title,
|
title: pullRequest.title,
|
||||||
description: pullRequest.body ?? "",
|
description: pullRequest.body ?? "",
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,11 @@
|
||||||
import { Octokit } from "octokit";
|
import { Octokit } from "octokit";
|
||||||
import { sourcebot_pr_payload, sourcebot_file_diff_review } from "@/features/agents/review-agent/types";
|
import { sourcebot_pr_payload, sourcebot_file_diff_review } from "@/features/agents/review-agent/types";
|
||||||
|
import { createLogger } from "@sourcebot/logger";
|
||||||
|
|
||||||
|
const logger = createLogger('github-push-pr-reviews');
|
||||||
|
|
||||||
export const githubPushPrReviews = async (octokit: Octokit, pr_payload: sourcebot_pr_payload, file_diff_reviews: sourcebot_file_diff_review[]) => {
|
export const githubPushPrReviews = async (octokit: Octokit, pr_payload: sourcebot_pr_payload, file_diff_reviews: sourcebot_file_diff_review[]) => {
|
||||||
console.log("Executing github_push_pr_reviews");
|
logger.info("Executing github_push_pr_reviews");
|
||||||
|
|
||||||
try {
|
try {
|
||||||
for (const file_diff_review of file_diff_reviews) {
|
for (const file_diff_review of file_diff_reviews) {
|
||||||
|
|
@ -25,13 +28,13 @@ export const githubPushPrReviews = async (octokit: Octokit, pr_payload: sourcebo
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Error pushing pr reviews for ${file_diff_review.filename}: ${error}`);
|
logger.error(`Error pushing pr reviews for ${file_diff_review.filename}: ${error}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Error pushing pr reviews: ${error}`);
|
logger.error(`Error pushing pr reviews: ${error}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log("Completed github_push_pr_reviews");
|
logger.info("Completed github_push_pr_reviews");
|
||||||
}
|
}
|
||||||
|
|
@ -2,12 +2,15 @@ import OpenAI from "openai";
|
||||||
import { sourcebot_file_diff_review, sourcebot_file_diff_review_schema } from "@/features/agents/review-agent/types";
|
import { sourcebot_file_diff_review, sourcebot_file_diff_review_schema } from "@/features/agents/review-agent/types";
|
||||||
import { env } from "@/env.mjs";
|
import { env } from "@/env.mjs";
|
||||||
import fs from "fs";
|
import fs from "fs";
|
||||||
|
import { createLogger } from "@sourcebot/logger";
|
||||||
|
|
||||||
|
const logger = createLogger('invoke-diff-review-llm');
|
||||||
|
|
||||||
export const invokeDiffReviewLlm = async (reviewAgentLogPath: string | undefined, prompt: string): Promise<sourcebot_file_diff_review> => {
|
export const invokeDiffReviewLlm = async (reviewAgentLogPath: string | undefined, prompt: string): Promise<sourcebot_file_diff_review> => {
|
||||||
console.log("Executing invoke_diff_review_llm");
|
logger.debug("Executing invoke_diff_review_llm");
|
||||||
|
|
||||||
if (!env.OPENAI_API_KEY) {
|
if (!env.OPENAI_API_KEY) {
|
||||||
console.error("OPENAI_API_KEY is not set, skipping review agent");
|
logger.error("OPENAI_API_KEY is not set, skipping review agent");
|
||||||
throw new Error("OPENAI_API_KEY is not set, skipping review agent");
|
throw new Error("OPENAI_API_KEY is not set, skipping review agent");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -39,10 +42,10 @@ export const invokeDiffReviewLlm = async (reviewAgentLogPath: string | undefined
|
||||||
throw new Error(`Invalid diff review format: ${diffReview.error}`);
|
throw new Error(`Invalid diff review format: ${diffReview.error}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log("Completed invoke_diff_review_llm");
|
logger.debug("Completed invoke_diff_review_llm");
|
||||||
return diffReview.data;
|
return diffReview.data;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error calling OpenAI:', error);
|
logger.error('Error calling OpenAI:', error);
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -3,6 +3,9 @@ import { Entitlement, entitlementsByPlan, Plan } from "./constants"
|
||||||
import { base64Decode } from "@/lib/utils";
|
import { base64Decode } from "@/lib/utils";
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
import { SOURCEBOT_SUPPORT_EMAIL } from "@/lib/constants";
|
import { SOURCEBOT_SUPPORT_EMAIL } from "@/lib/constants";
|
||||||
|
import { createLogger } from "@sourcebot/logger";
|
||||||
|
|
||||||
|
const logger = createLogger('entitlements');
|
||||||
|
|
||||||
const eeLicenseKeyPrefix = "sourcebot_ee_";
|
const eeLicenseKeyPrefix = "sourcebot_ee_";
|
||||||
export const SOURCEBOT_UNLIMITED_SEATS = -1;
|
export const SOURCEBOT_UNLIMITED_SEATS = -1;
|
||||||
|
|
@ -22,7 +25,7 @@ const decodeLicenseKeyPayload = (payload: string): LicenseKeyPayload => {
|
||||||
const payloadJson = JSON.parse(decodedPayload);
|
const payloadJson = JSON.parse(decodedPayload);
|
||||||
return eeLicenseKeyPayloadSchema.parse(payloadJson);
|
return eeLicenseKeyPayloadSchema.parse(payloadJson);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error(`Failed to decode license key payload: ${error}`);
|
logger.error(`Failed to decode license key payload: ${error}`);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -49,12 +52,13 @@ export const getPlan = (): Plan => {
|
||||||
if (licenseKey) {
|
if (licenseKey) {
|
||||||
const expiryDate = new Date(licenseKey.expiryDate);
|
const expiryDate = new Date(licenseKey.expiryDate);
|
||||||
if (expiryDate.getTime() < new Date().getTime()) {
|
if (expiryDate.getTime() < new Date().getTime()) {
|
||||||
console.error(`The provided license key has expired (${expiryDate.toLocaleString()}). Falling back to oss plan. Please contact ${SOURCEBOT_SUPPORT_EMAIL} for support.`);
|
logger.error(`The provided license key has expired (${expiryDate.toLocaleString()}). Falling back to oss plan. Please contact ${SOURCEBOT_SUPPORT_EMAIL} for support.`);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
return licenseKey.seats === SOURCEBOT_UNLIMITED_SEATS ? "self-hosted:enterprise-unlimited" : "self-hosted:enterprise";
|
return licenseKey.seats === SOURCEBOT_UNLIMITED_SEATS ? "self-hosted:enterprise-unlimited" : "self-hosted:enterprise";
|
||||||
} else {
|
} else {
|
||||||
|
logger.info(`No valid license key found. Falling back to oss plan.`);
|
||||||
return "oss";
|
return "oss";
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -15,6 +15,9 @@ import { createGuestUser, setPublicAccessStatus } from '@/ee/features/publicAcce
|
||||||
import { isServiceError } from './lib/utils';
|
import { isServiceError } from './lib/utils';
|
||||||
import { ServiceErrorException } from './lib/serviceError';
|
import { ServiceErrorException } from './lib/serviceError';
|
||||||
import { SOURCEBOT_SUPPORT_EMAIL } from "@/lib/constants";
|
import { SOURCEBOT_SUPPORT_EMAIL } from "@/lib/constants";
|
||||||
|
import { createLogger } from "@sourcebot/logger";
|
||||||
|
|
||||||
|
const logger = createLogger('web-initialize');
|
||||||
|
|
||||||
const ajv = new Ajv({
|
const ajv = new Ajv({
|
||||||
validateFormats: false,
|
validateFormats: false,
|
||||||
|
|
@ -73,7 +76,7 @@ const syncConnections = async (connections?: { [key: string]: ConnectionConfig }
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log(`Upserted connection with name '${key}'. Connection ID: ${connectionDb.id}`);
|
logger.info(`Upserted connection with name '${key}'. Connection ID: ${connectionDb.id}`);
|
||||||
|
|
||||||
// Re-try any repos that failed to index.
|
// Re-try any repos that failed to index.
|
||||||
const failedRepos = currentConnection?.repos.filter(repo => repo.repo.repoIndexingStatus === RepoIndexingStatus.FAILED).map(repo => repo.repo.id) ?? [];
|
const failedRepos = currentConnection?.repos.filter(repo => repo.repo.repoIndexingStatus === RepoIndexingStatus.FAILED).map(repo => repo.repo.id) ?? [];
|
||||||
|
|
@ -104,7 +107,7 @@ const syncConnections = async (connections?: { [key: string]: ConnectionConfig }
|
||||||
});
|
});
|
||||||
|
|
||||||
for (const connection of deletedConnections) {
|
for (const connection of deletedConnections) {
|
||||||
console.log(`Deleting connection with name '${connection.name}'. Connection ID: ${connection.id}`);
|
logger.info(`Deleting connection with name '${connection.name}'. Connection ID: ${connection.id}`);
|
||||||
await prisma.connection.delete({
|
await prisma.connection.delete({
|
||||||
where: {
|
where: {
|
||||||
id: connection.id,
|
id: connection.id,
|
||||||
|
|
@ -142,12 +145,12 @@ const syncDeclarativeConfig = async (configPath: string) => {
|
||||||
const hasPublicAccessEntitlement = hasEntitlement("public-access");
|
const hasPublicAccessEntitlement = hasEntitlement("public-access");
|
||||||
const enablePublicAccess = config.settings?.enablePublicAccess;
|
const enablePublicAccess = config.settings?.enablePublicAccess;
|
||||||
if (enablePublicAccess !== undefined && !hasPublicAccessEntitlement) {
|
if (enablePublicAccess !== undefined && !hasPublicAccessEntitlement) {
|
||||||
console.error(`Public access flag is set in the config file but your license doesn't have public access entitlement. Please contact ${SOURCEBOT_SUPPORT_EMAIL} to request a license upgrade.`);
|
logger.error(`Public access flag is set in the config file but your license doesn't have public access entitlement. Please contact ${SOURCEBOT_SUPPORT_EMAIL} to request a license upgrade.`);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (hasPublicAccessEntitlement) {
|
if (hasPublicAccessEntitlement) {
|
||||||
console.log(`Setting public access status to ${!!enablePublicAccess} for org ${SINGLE_TENANT_ORG_DOMAIN}`);
|
logger.info(`Setting public access status to ${!!enablePublicAccess} for org ${SINGLE_TENANT_ORG_DOMAIN}`);
|
||||||
const res = await setPublicAccessStatus(SINGLE_TENANT_ORG_DOMAIN, !!enablePublicAccess);
|
const res = await setPublicAccessStatus(SINGLE_TENANT_ORG_DOMAIN, !!enablePublicAccess);
|
||||||
if (isServiceError(res)) {
|
if (isServiceError(res)) {
|
||||||
throw new ServiceErrorException(res);
|
throw new ServiceErrorException(res);
|
||||||
|
|
@ -179,7 +182,7 @@ const pruneOldGuestUser = async () => {
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log(`Deleted old guest user ${guestUser.userId}`);
|
logger.info(`Deleted old guest user ${guestUser.userId}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -227,7 +230,7 @@ const initSingleTenancy = async () => {
|
||||||
// watch for changes assuming it is a local file
|
// watch for changes assuming it is a local file
|
||||||
if (!isRemotePath(configPath)) {
|
if (!isRemotePath(configPath)) {
|
||||||
watch(configPath, () => {
|
watch(configPath, () => {
|
||||||
console.log(`Config file ${configPath} changed. Re-syncing...`);
|
logger.info(`Config file ${configPath} changed. Re-syncing...`);
|
||||||
syncDeclarativeConfig(configPath);
|
syncDeclarativeConfig(configPath);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
@ -237,7 +240,7 @@ const initSingleTenancy = async () => {
|
||||||
const initMultiTenancy = async () => {
|
const initMultiTenancy = async () => {
|
||||||
const hasMultiTenancyEntitlement = hasEntitlement("multi-tenancy");
|
const hasMultiTenancyEntitlement = hasEntitlement("multi-tenancy");
|
||||||
if (!hasMultiTenancyEntitlement) {
|
if (!hasMultiTenancyEntitlement) {
|
||||||
console.error(`SOURCEBOT_TENANCY_MODE is set to ${env.SOURCEBOT_TENANCY_MODE} but your license doesn't have multi-tenancy entitlement. Please contact ${SOURCEBOT_SUPPORT_EMAIL} to request a license upgrade.`);
|
logger.error(`SOURCEBOT_TENANCY_MODE is set to ${env.SOURCEBOT_TENANCY_MODE} but your license doesn't have multi-tenancy entitlement. Please contact ${SOURCEBOT_SUPPORT_EMAIL} to request a license upgrade.`);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,12 @@
|
||||||
import { NewsItem } from "./types";
|
import { NewsItem } from "./types";
|
||||||
|
|
||||||
export const newsData: NewsItem[] = [
|
export const newsData: NewsItem[] = [
|
||||||
|
{
|
||||||
|
unique_id: "structured-logging",
|
||||||
|
header: "Structured logging",
|
||||||
|
sub_header: "We've added support for structured logging",
|
||||||
|
url: "https://docs.sourcebot.dev/docs/configuration/structured-logging"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
unique_id: "code-nav",
|
unique_id: "code-nav",
|
||||||
header: "Code navigation",
|
header: "Code navigation",
|
||||||
|
|
|
||||||
24
yarn.lock
24
yarn.lock
|
|
@ -5759,8 +5759,6 @@ __metadata:
|
||||||
resolution: "@sourcebot/backend@workspace:packages/backend"
|
resolution: "@sourcebot/backend@workspace:packages/backend"
|
||||||
dependencies:
|
dependencies:
|
||||||
"@gitbeaker/rest": "npm:^40.5.1"
|
"@gitbeaker/rest": "npm:^40.5.1"
|
||||||
"@logtail/node": "npm:^0.5.2"
|
|
||||||
"@logtail/winston": "npm:^0.5.2"
|
|
||||||
"@octokit/rest": "npm:^21.0.2"
|
"@octokit/rest": "npm:^21.0.2"
|
||||||
"@sentry/cli": "npm:^2.42.2"
|
"@sentry/cli": "npm:^2.42.2"
|
||||||
"@sentry/node": "npm:^9.3.0"
|
"@sentry/node": "npm:^9.3.0"
|
||||||
|
|
@ -5768,6 +5766,7 @@ __metadata:
|
||||||
"@sourcebot/crypto": "workspace:*"
|
"@sourcebot/crypto": "workspace:*"
|
||||||
"@sourcebot/db": "workspace:*"
|
"@sourcebot/db": "workspace:*"
|
||||||
"@sourcebot/error": "workspace:*"
|
"@sourcebot/error": "workspace:*"
|
||||||
|
"@sourcebot/logger": "workspace:*"
|
||||||
"@sourcebot/schemas": "workspace:*"
|
"@sourcebot/schemas": "workspace:*"
|
||||||
"@t3-oss/env-core": "npm:^0.12.0"
|
"@t3-oss/env-core": "npm:^0.12.0"
|
||||||
"@types/argparse": "npm:^2.0.16"
|
"@types/argparse": "npm:^2.0.16"
|
||||||
|
|
@ -5796,7 +5795,6 @@ __metadata:
|
||||||
tsx: "npm:^4.19.1"
|
tsx: "npm:^4.19.1"
|
||||||
typescript: "npm:^5.6.2"
|
typescript: "npm:^5.6.2"
|
||||||
vitest: "npm:^2.1.9"
|
vitest: "npm:^2.1.9"
|
||||||
winston: "npm:^3.15.0"
|
|
||||||
zod: "npm:^3.24.3"
|
zod: "npm:^3.24.3"
|
||||||
languageName: unknown
|
languageName: unknown
|
||||||
linkType: soft
|
linkType: soft
|
||||||
|
|
@ -5816,6 +5814,7 @@ __metadata:
|
||||||
resolution: "@sourcebot/db@workspace:packages/db"
|
resolution: "@sourcebot/db@workspace:packages/db"
|
||||||
dependencies:
|
dependencies:
|
||||||
"@prisma/client": "npm:6.2.1"
|
"@prisma/client": "npm:6.2.1"
|
||||||
|
"@sourcebot/logger": "workspace:*"
|
||||||
"@types/argparse": "npm:^2.0.16"
|
"@types/argparse": "npm:^2.0.16"
|
||||||
"@types/readline-sync": "npm:^1.4.8"
|
"@types/readline-sync": "npm:^1.4.8"
|
||||||
argparse: "npm:^2.0.1"
|
argparse: "npm:^2.0.1"
|
||||||
|
|
@ -5835,6 +5834,22 @@ __metadata:
|
||||||
languageName: unknown
|
languageName: unknown
|
||||||
linkType: soft
|
linkType: soft
|
||||||
|
|
||||||
|
"@sourcebot/logger@workspace:*, @sourcebot/logger@workspace:packages/logger":
|
||||||
|
version: 0.0.0-use.local
|
||||||
|
resolution: "@sourcebot/logger@workspace:packages/logger"
|
||||||
|
dependencies:
|
||||||
|
"@logtail/node": "npm:^0.5.2"
|
||||||
|
"@logtail/winston": "npm:^0.5.2"
|
||||||
|
"@t3-oss/env-core": "npm:^0.12.0"
|
||||||
|
"@types/node": "npm:^22.7.5"
|
||||||
|
dotenv: "npm:^16.4.5"
|
||||||
|
triple-beam: "npm:^1.4.1"
|
||||||
|
typescript: "npm:^5.7.3"
|
||||||
|
winston: "npm:^3.15.0"
|
||||||
|
zod: "npm:^3.24.3"
|
||||||
|
languageName: unknown
|
||||||
|
linkType: soft
|
||||||
|
|
||||||
"@sourcebot/mcp@workspace:packages/mcp":
|
"@sourcebot/mcp@workspace:packages/mcp":
|
||||||
version: 0.0.0-use.local
|
version: 0.0.0-use.local
|
||||||
resolution: "@sourcebot/mcp@workspace:packages/mcp"
|
resolution: "@sourcebot/mcp@workspace:packages/mcp"
|
||||||
|
|
@ -5933,6 +5948,7 @@ __metadata:
|
||||||
"@sourcebot/crypto": "workspace:*"
|
"@sourcebot/crypto": "workspace:*"
|
||||||
"@sourcebot/db": "workspace:*"
|
"@sourcebot/db": "workspace:*"
|
||||||
"@sourcebot/error": "workspace:*"
|
"@sourcebot/error": "workspace:*"
|
||||||
|
"@sourcebot/logger": "workspace:*"
|
||||||
"@sourcebot/schemas": "workspace:*"
|
"@sourcebot/schemas": "workspace:*"
|
||||||
"@ssddanbrown/codemirror-lang-twig": "npm:^1.0.0"
|
"@ssddanbrown/codemirror-lang-twig": "npm:^1.0.0"
|
||||||
"@stripe/react-stripe-js": "npm:^3.1.1"
|
"@stripe/react-stripe-js": "npm:^3.1.1"
|
||||||
|
|
@ -15525,7 +15541,7 @@ __metadata:
|
||||||
languageName: node
|
languageName: node
|
||||||
linkType: hard
|
linkType: hard
|
||||||
|
|
||||||
"triple-beam@npm:^1.3.0":
|
"triple-beam@npm:^1.3.0, triple-beam@npm:^1.4.1":
|
||||||
version: 1.4.1
|
version: 1.4.1
|
||||||
resolution: "triple-beam@npm:1.4.1"
|
resolution: "triple-beam@npm:1.4.1"
|
||||||
checksum: 10c0/4bf1db71e14fe3ff1c3adbe3c302f1fdb553b74d7591a37323a7badb32dc8e9c290738996cbb64f8b10dc5a3833645b5d8c26221aaaaa12e50d1251c9aba2fea
|
checksum: 10c0/4bf1db71e14fe3ff1c3adbe3c302f1fdb553b74d7591a37323a7badb32dc8e9c290738996cbb64f8b10dc5a3833645b5d8c26221aaaaa12e50d1251c9aba2fea
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue