mirror of
https://github.com/sourcebot-dev/sourcebot.git
synced 2025-12-12 04:15:30 +00:00
Merge branch 'main' into ghes-review-agent
This commit is contained in:
commit
ad56a07849
53 changed files with 938 additions and 301 deletions
18
CHANGELOG.md
18
CHANGELOG.md
|
|
@ -7,10 +7,26 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||||
|
|
||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Fixed spurious infinite loads with explore panel, file tree, and file search command. [#617](https://github.com/sourcebot-dev/sourcebot/pull/617)
|
||||||
|
- Wipe search context on init if entitlement no longer exists [#618](https://github.com/sourcebot-dev/sourcebot/pull/618)
|
||||||
|
- Fixed review agent so that it works with GHES instances [#611](https://github.com/sourcebot-dev/sourcebot/pull/611)
|
||||||
|
|
||||||
|
## [4.9.2] - 2025-11-13
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Bumped the default requested search result count from 5k to 10k after optimization pass. [#615](https://github.com/sourcebot-dev/sourcebot/pull/615)
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
- Fixed incorrect shutdown of PostHog SDK in the worker. [#609](https://github.com/sourcebot-dev/sourcebot/pull/609)
|
- Fixed incorrect shutdown of PostHog SDK in the worker. [#609](https://github.com/sourcebot-dev/sourcebot/pull/609)
|
||||||
- Fixed race condition in job schedulers. [#607](https://github.com/sourcebot-dev/sourcebot/pull/607)
|
- Fixed race condition in job schedulers. [#607](https://github.com/sourcebot-dev/sourcebot/pull/607)
|
||||||
- Fixed review agent so that it works with GHES instances [#611](https://github.com/sourcebot-dev/sourcebot/pull/611)
|
- Fixed connection sync jobs getting stuck in pending or in progress after restarting the worker. [#612](https://github.com/sourcebot-dev/sourcebot/pull/612)
|
||||||
|
- Fixed issue where connections would always sync on startup, regardless if they changed or not. [#613](https://github.com/sourcebot-dev/sourcebot/pull/613)
|
||||||
|
- Fixed performance bottleneck in search api. Result is a order of magnitutde improvement to average search time according to benchmarks. [#615](https://github.com/sourcebot-dev/sourcebot/pull/615)
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Added force resync buttons for connections and repositories. [#610](https://github.com/sourcebot-dev/sourcebot/pull/610)
|
||||||
|
- Added environment variable to configure default search result count. [#616](https://github.com/sourcebot-dev/sourcebot/pull/616)
|
||||||
|
|
||||||
## [4.9.1] - 2025-11-07
|
## [4.9.1] - 2025-11-07
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -34,6 +34,7 @@ The following environment variables allow you to configure your Sourcebot deploy
|
||||||
| `SOURCEBOT_STRUCTURED_LOGGING_ENABLED` | `false` | <p>Enables/disable structured JSON logging. See [this doc](/docs/configuration/structured-logging) for more info.</p> |
|
| `SOURCEBOT_STRUCTURED_LOGGING_ENABLED` | `false` | <p>Enables/disable structured JSON logging. See [this doc](/docs/configuration/structured-logging) for more info.</p> |
|
||||||
| `SOURCEBOT_STRUCTURED_LOGGING_FILE` | - | <p>Optional file to log to if structured logging is enabled</p> |
|
| `SOURCEBOT_STRUCTURED_LOGGING_FILE` | - | <p>Optional file to log to if structured logging is enabled</p> |
|
||||||
| `SOURCEBOT_TELEMETRY_DISABLED` | `false` | <p>Enables/disables telemetry collection in Sourcebot. See [this doc](/docs/overview.mdx#telemetry) for more info.</p> |
|
| `SOURCEBOT_TELEMETRY_DISABLED` | `false` | <p>Enables/disables telemetry collection in Sourcebot. See [this doc](/docs/overview.mdx#telemetry) for more info.</p> |
|
||||||
|
| `DEFAULT_MAX_MATCH_COUNT` | `10000` | <p>The default maximum number of search results to return when using search in the web app.</p> |
|
||||||
|
|
||||||
### Enterprise Environment Variables
|
### Enterprise Environment Variables
|
||||||
| Variable | Default | Description |
|
| Variable | Default | Description |
|
||||||
|
|
|
||||||
|
|
@ -40,6 +40,8 @@
|
||||||
"cross-fetch": "^4.0.0",
|
"cross-fetch": "^4.0.0",
|
||||||
"dotenv": "^16.4.5",
|
"dotenv": "^16.4.5",
|
||||||
"express": "^4.21.2",
|
"express": "^4.21.2",
|
||||||
|
"express-async-errors": "^3.1.1",
|
||||||
|
"fast-deep-equal": "^3.1.3",
|
||||||
"git-url-parse": "^16.1.0",
|
"git-url-parse": "^16.1.0",
|
||||||
"gitea-js": "^1.22.0",
|
"gitea-js": "^1.22.0",
|
||||||
"glob": "^11.0.0",
|
"glob": "^11.0.0",
|
||||||
|
|
|
||||||
103
packages/backend/src/api.ts
Normal file
103
packages/backend/src/api.ts
Normal file
|
|
@ -0,0 +1,103 @@
|
||||||
|
import { PrismaClient, RepoIndexingJobType } from '@sourcebot/db';
|
||||||
|
import { createLogger } from '@sourcebot/shared';
|
||||||
|
import express, { Request, Response } from 'express';
|
||||||
|
import 'express-async-errors';
|
||||||
|
import * as http from "http";
|
||||||
|
import z from 'zod';
|
||||||
|
import { ConnectionManager } from './connectionManager.js';
|
||||||
|
import { PromClient } from './promClient.js';
|
||||||
|
import { RepoIndexManager } from './repoIndexManager.js';
|
||||||
|
|
||||||
|
const logger = createLogger('api');
|
||||||
|
const PORT = 3060;
|
||||||
|
|
||||||
|
export class Api {
|
||||||
|
private server: http.Server;
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
promClient: PromClient,
|
||||||
|
private prisma: PrismaClient,
|
||||||
|
private connectionManager: ConnectionManager,
|
||||||
|
private repoIndexManager: RepoIndexManager,
|
||||||
|
) {
|
||||||
|
const app = express();
|
||||||
|
app.use(express.json());
|
||||||
|
app.use(express.urlencoded({ extended: true }));
|
||||||
|
|
||||||
|
// Prometheus metrics endpoint
|
||||||
|
app.use('/metrics', async (_req: Request, res: Response) => {
|
||||||
|
res.set('Content-Type', promClient.registry.contentType);
|
||||||
|
const metrics = await promClient.registry.metrics();
|
||||||
|
res.end(metrics);
|
||||||
|
});
|
||||||
|
|
||||||
|
app.post('/api/sync-connection', this.syncConnection.bind(this));
|
||||||
|
app.post('/api/index-repo', this.indexRepo.bind(this));
|
||||||
|
|
||||||
|
this.server = app.listen(PORT, () => {
|
||||||
|
logger.info(`API server is running on port ${PORT}`);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
private async syncConnection(req: Request, res: Response) {
|
||||||
|
const schema = z.object({
|
||||||
|
connectionId: z.number(),
|
||||||
|
}).strict();
|
||||||
|
|
||||||
|
const parsed = schema.safeParse(req.body);
|
||||||
|
if (!parsed.success) {
|
||||||
|
res.status(400).json({ error: parsed.error.message });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { connectionId } = parsed.data;
|
||||||
|
const connection = await this.prisma.connection.findUnique({
|
||||||
|
where: {
|
||||||
|
id: connectionId,
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!connection) {
|
||||||
|
res.status(404).json({ error: 'Connection not found' });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const [jobId] = await this.connectionManager.createJobs([connection]);
|
||||||
|
|
||||||
|
res.status(200).json({ jobId });
|
||||||
|
}
|
||||||
|
|
||||||
|
private async indexRepo(req: Request, res: Response) {
|
||||||
|
const schema = z.object({
|
||||||
|
repoId: z.number(),
|
||||||
|
}).strict();
|
||||||
|
|
||||||
|
const parsed = schema.safeParse(req.body);
|
||||||
|
if (!parsed.success) {
|
||||||
|
res.status(400).json({ error: parsed.error.message });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { repoId } = parsed.data;
|
||||||
|
const repo = await this.prisma.repo.findUnique({
|
||||||
|
where: { id: repoId },
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!repo) {
|
||||||
|
res.status(404).json({ error: 'Repo not found' });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const [jobId] = await this.repoIndexManager.createJobs([repo], RepoIndexingJobType.INDEX);
|
||||||
|
res.status(200).json({ jobId });
|
||||||
|
}
|
||||||
|
|
||||||
|
public async dispose() {
|
||||||
|
return new Promise<void>((resolve, reject) => {
|
||||||
|
this.server.close((err) => {
|
||||||
|
if (err) reject(err);
|
||||||
|
else resolve(undefined);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -6,6 +6,7 @@ import chokidar, { FSWatcher } from 'chokidar';
|
||||||
import { ConnectionManager } from "./connectionManager.js";
|
import { ConnectionManager } from "./connectionManager.js";
|
||||||
import { SINGLE_TENANT_ORG_ID } from "./constants.js";
|
import { SINGLE_TENANT_ORG_ID } from "./constants.js";
|
||||||
import { syncSearchContexts } from "./ee/syncSearchContexts.js";
|
import { syncSearchContexts } from "./ee/syncSearchContexts.js";
|
||||||
|
import isEqual from 'fast-deep-equal';
|
||||||
|
|
||||||
const logger = createLogger('config-manager');
|
const logger = createLogger('config-manager');
|
||||||
|
|
||||||
|
|
@ -64,8 +65,8 @@ export class ConfigManager {
|
||||||
|
|
||||||
const existingConnectionConfig = existingConnection ? existingConnection.config as unknown as ConnectionConfig : undefined;
|
const existingConnectionConfig = existingConnection ? existingConnection.config as unknown as ConnectionConfig : undefined;
|
||||||
const connectionNeedsSyncing =
|
const connectionNeedsSyncing =
|
||||||
!existingConnection ||
|
!existingConnectionConfig ||
|
||||||
(JSON.stringify(existingConnectionConfig) !== JSON.stringify(newConnectionConfig));
|
!isEqual(existingConnectionConfig, newConnectionConfig);
|
||||||
|
|
||||||
// Either update the existing connection or create a new one.
|
// Either update the existing connection or create a new one.
|
||||||
const connection = existingConnection ?
|
const connection = existingConnection ?
|
||||||
|
|
@ -93,8 +94,8 @@ export class ConfigManager {
|
||||||
});
|
});
|
||||||
|
|
||||||
if (connectionNeedsSyncing) {
|
if (connectionNeedsSyncing) {
|
||||||
const [jobId] = await this.connectionManager.createJobs([connection]);
|
logger.info(`Change detected for connection '${key}' (id: ${connection.id}). Creating sync job.`);
|
||||||
logger.info(`Change detected for connection '${key}' (id: ${connection.id}). Created sync job ${jobId}.`);
|
await this.connectionManager.createJobs([connection]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -11,10 +11,12 @@ import { groupmqLifecycleExceptionWrapper, setIntervalAsync } from "./utils.js";
|
||||||
import { syncSearchContexts } from "./ee/syncSearchContexts.js";
|
import { syncSearchContexts } from "./ee/syncSearchContexts.js";
|
||||||
import { captureEvent } from "./posthog.js";
|
import { captureEvent } from "./posthog.js";
|
||||||
import { PromClient } from "./promClient.js";
|
import { PromClient } from "./promClient.js";
|
||||||
|
import { GROUPMQ_WORKER_STOP_GRACEFUL_TIMEOUT_MS } from "./constants.js";
|
||||||
|
|
||||||
const LOG_TAG = 'connection-manager';
|
const LOG_TAG = 'connection-manager';
|
||||||
const logger = createLogger(LOG_TAG);
|
const logger = createLogger(LOG_TAG);
|
||||||
const createJobLogger = (jobId: string) => createLogger(`${LOG_TAG}:job:${jobId}`);
|
const createJobLogger = (jobId: string) => createLogger(`${LOG_TAG}:job:${jobId}`);
|
||||||
|
const QUEUE_NAME = 'connection-sync-queue';
|
||||||
|
|
||||||
type JobPayload = {
|
type JobPayload = {
|
||||||
jobId: string,
|
jobId: string,
|
||||||
|
|
@ -30,19 +32,19 @@ type JobResult = {
|
||||||
const JOB_TIMEOUT_MS = 1000 * 60 * 60 * 2; // 2 hour timeout
|
const JOB_TIMEOUT_MS = 1000 * 60 * 60 * 2; // 2 hour timeout
|
||||||
|
|
||||||
export class ConnectionManager {
|
export class ConnectionManager {
|
||||||
private worker: Worker;
|
private worker: Worker<JobPayload>;
|
||||||
private queue: Queue<JobPayload>;
|
private queue: Queue<JobPayload>;
|
||||||
private interval?: NodeJS.Timeout;
|
private interval?: NodeJS.Timeout;
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
private db: PrismaClient,
|
private db: PrismaClient,
|
||||||
private settings: Settings,
|
private settings: Settings,
|
||||||
redis: Redis,
|
private redis: Redis,
|
||||||
private promClient: PromClient,
|
private promClient: PromClient,
|
||||||
) {
|
) {
|
||||||
this.queue = new Queue<JobPayload>({
|
this.queue = new Queue<JobPayload>({
|
||||||
redis,
|
redis,
|
||||||
namespace: 'connection-sync-queue',
|
namespace: QUEUE_NAME,
|
||||||
jobTimeoutMs: JOB_TIMEOUT_MS,
|
jobTimeoutMs: JOB_TIMEOUT_MS,
|
||||||
maxAttempts: 3,
|
maxAttempts: 3,
|
||||||
logger: env.DEBUG_ENABLE_GROUPMQ_LOGGING === 'true',
|
logger: env.DEBUG_ENABLE_GROUPMQ_LOGGING === 'true',
|
||||||
|
|
@ -62,6 +64,10 @@ export class ConnectionManager {
|
||||||
this.worker.on('failed', this.onJobFailed.bind(this));
|
this.worker.on('failed', this.onJobFailed.bind(this));
|
||||||
this.worker.on('stalled', this.onJobStalled.bind(this));
|
this.worker.on('stalled', this.onJobStalled.bind(this));
|
||||||
this.worker.on('error', this.onWorkerError.bind(this));
|
this.worker.on('error', this.onWorkerError.bind(this));
|
||||||
|
// graceful-timeout is triggered when a job is still processing after
|
||||||
|
// worker.close() is called and the timeout period has elapsed. In this case,
|
||||||
|
// we fail the job with no retry.
|
||||||
|
this.worker.on('graceful-timeout', this.onJobGracefulTimeout.bind(this));
|
||||||
}
|
}
|
||||||
|
|
||||||
public startScheduler() {
|
public startScheduler() {
|
||||||
|
|
@ -128,6 +134,7 @@ export class ConnectionManager {
|
||||||
});
|
});
|
||||||
|
|
||||||
for (const job of jobs) {
|
for (const job of jobs) {
|
||||||
|
logger.info(`Scheduling job ${job.id} for connection ${job.connection.name} (id: ${job.connectionId})`);
|
||||||
await this.queue.add({
|
await this.queue.add({
|
||||||
groupId: `connection:${job.connectionId}`,
|
groupId: `connection:${job.connectionId}`,
|
||||||
data: {
|
data: {
|
||||||
|
|
@ -150,6 +157,22 @@ export class ConnectionManager {
|
||||||
const logger = createJobLogger(jobId);
|
const logger = createJobLogger(jobId);
|
||||||
logger.info(`Running connection sync job ${jobId} for connection ${connectionName} (id: ${job.data.connectionId}) (attempt ${job.attempts + 1} / ${job.maxAttempts})`);
|
logger.info(`Running connection sync job ${jobId} for connection ${connectionName} (id: ${job.data.connectionId}) (attempt ${job.attempts + 1} / ${job.maxAttempts})`);
|
||||||
|
|
||||||
|
const currentStatus = await this.db.connectionSyncJob.findUniqueOrThrow({
|
||||||
|
where: {
|
||||||
|
id: jobId,
|
||||||
|
},
|
||||||
|
select: {
|
||||||
|
status: true,
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fail safe: if the job is not PENDING (first run) or IN_PROGRESS (retry), it indicates the job
|
||||||
|
// is in an invalid state and should be skipped.
|
||||||
|
if (currentStatus.status !== ConnectionSyncJobStatus.PENDING && currentStatus.status !== ConnectionSyncJobStatus.IN_PROGRESS) {
|
||||||
|
throw new Error(`Job ${jobId} is not in a valid state. Expected: ${ConnectionSyncJobStatus.PENDING} or ${ConnectionSyncJobStatus.IN_PROGRESS}. Actual: ${currentStatus.status}. Skipping.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
this.promClient.pendingConnectionSyncJobs.dec({ connection: connectionName });
|
this.promClient.pendingConnectionSyncJobs.dec({ connection: connectionName });
|
||||||
this.promClient.activeConnectionSyncJobs.inc({ connection: connectionName });
|
this.promClient.activeConnectionSyncJobs.inc({ connection: connectionName });
|
||||||
|
|
||||||
|
|
@ -178,7 +201,7 @@ export class ConnectionManager {
|
||||||
const result = await (async () => {
|
const result = await (async () => {
|
||||||
switch (config.type) {
|
switch (config.type) {
|
||||||
case 'github': {
|
case 'github': {
|
||||||
return await compileGithubConfig(config, job.data.connectionId, abortController);
|
return await compileGithubConfig(config, job.data.connectionId, abortController.signal);
|
||||||
}
|
}
|
||||||
case 'gitlab': {
|
case 'gitlab': {
|
||||||
return await compileGitlabConfig(config, job.data.connectionId);
|
return await compileGitlabConfig(config, job.data.connectionId);
|
||||||
|
|
@ -200,7 +223,7 @@ export class ConnectionManager {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})();
|
})();
|
||||||
|
|
||||||
let { repoData, warnings } = result;
|
let { repoData, warnings } = result;
|
||||||
|
|
||||||
await this.db.connectionSyncJob.update({
|
await this.db.connectionSyncJob.update({
|
||||||
|
|
@ -383,6 +406,33 @@ export class ConnectionManager {
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
private onJobGracefulTimeout = async (job: Job<JobPayload>) =>
|
||||||
|
groupmqLifecycleExceptionWrapper('onJobGracefulTimeout', logger, async () => {
|
||||||
|
const logger = createJobLogger(job.id);
|
||||||
|
|
||||||
|
const { connection } = await this.db.connectionSyncJob.update({
|
||||||
|
where: { id: job.id },
|
||||||
|
data: {
|
||||||
|
status: ConnectionSyncJobStatus.FAILED,
|
||||||
|
completedAt: new Date(),
|
||||||
|
errorMessage: 'Job timed out',
|
||||||
|
},
|
||||||
|
select: {
|
||||||
|
connection: true,
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
this.promClient.activeConnectionSyncJobs.dec({ connection: connection.name });
|
||||||
|
this.promClient.connectionSyncJobFailTotal.inc({ connection: connection.name });
|
||||||
|
|
||||||
|
logger.error(`Job ${job.id} timed out for connection ${connection.name} (id: ${connection.id})`);
|
||||||
|
|
||||||
|
captureEvent('backend_connection_sync_job_failed', {
|
||||||
|
connectionId: connection.id,
|
||||||
|
error: 'Job timed out',
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
private async onWorkerError(error: Error) {
|
private async onWorkerError(error: Error) {
|
||||||
Sentry.captureException(error);
|
Sentry.captureException(error);
|
||||||
logger.error(`Connection syncer worker error.`, error);
|
logger.error(`Connection syncer worker error.`, error);
|
||||||
|
|
@ -392,8 +442,28 @@ export class ConnectionManager {
|
||||||
if (this.interval) {
|
if (this.interval) {
|
||||||
clearInterval(this.interval);
|
clearInterval(this.interval);
|
||||||
}
|
}
|
||||||
await this.worker.close();
|
|
||||||
await this.queue.close();
|
const inProgressJobs = this.worker.getCurrentJobs();
|
||||||
|
await this.worker.close(GROUPMQ_WORKER_STOP_GRACEFUL_TIMEOUT_MS);
|
||||||
|
|
||||||
|
// Manually release group locks for in progress jobs to prevent deadlocks.
|
||||||
|
// @see: https://github.com/Openpanel-dev/groupmq/issues/8
|
||||||
|
for (const { job } of inProgressJobs) {
|
||||||
|
const lockKey = `groupmq:${QUEUE_NAME}:lock:${job.groupId}`;
|
||||||
|
logger.debug(`Releasing group lock ${lockKey} for in progress job ${job.id}`);
|
||||||
|
try {
|
||||||
|
await this.redis.del(lockKey);
|
||||||
|
} catch (error) {
|
||||||
|
Sentry.captureException(error);
|
||||||
|
logger.error(`Failed to release group lock ${lockKey} for in progress job ${job.id}. Error: `, error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// @note: As of groupmq v1.0.0, queue.close() will just close the underlying
|
||||||
|
// redis connection. Since we share the same redis client between, skip this
|
||||||
|
// step and close the redis client directly in index.ts.
|
||||||
|
// @see: https://github.com/Openpanel-dev/groupmq/blob/main/src/queue.ts#L1900
|
||||||
|
// await this.queue.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -10,4 +10,24 @@ export const PERMISSION_SYNC_SUPPORTED_CODE_HOST_TYPES: CodeHostType[] = [
|
||||||
];
|
];
|
||||||
|
|
||||||
export const REPOS_CACHE_DIR = path.join(env.DATA_CACHE_DIR, 'repos');
|
export const REPOS_CACHE_DIR = path.join(env.DATA_CACHE_DIR, 'repos');
|
||||||
export const INDEX_CACHE_DIR = path.join(env.DATA_CACHE_DIR, 'index');
|
export const INDEX_CACHE_DIR = path.join(env.DATA_CACHE_DIR, 'index');
|
||||||
|
|
||||||
|
// Maximum time to wait for current job to finish
|
||||||
|
export const GROUPMQ_WORKER_STOP_GRACEFUL_TIMEOUT_MS = 5 * 1000; // 5 seconds
|
||||||
|
|
||||||
|
// List of shutdown signals
|
||||||
|
export const SHUTDOWN_SIGNALS: string[] = [
|
||||||
|
'SIGHUP',
|
||||||
|
'SIGINT',
|
||||||
|
'SIGQUIT',
|
||||||
|
'SIGILL',
|
||||||
|
'SIGTRAP',
|
||||||
|
'SIGABRT',
|
||||||
|
'SIGBUS',
|
||||||
|
'SIGFPE',
|
||||||
|
'SIGSEGV',
|
||||||
|
'SIGUSR2',
|
||||||
|
'SIGTERM',
|
||||||
|
// @note: SIGKILL and SIGSTOP cannot have listeners installed.
|
||||||
|
// @see: https://nodejs.org/api/process.html#signal-events
|
||||||
|
];
|
||||||
|
|
|
||||||
|
|
@ -1,20 +1,22 @@
|
||||||
import "./instrument.js";
|
import "./instrument.js";
|
||||||
|
|
||||||
|
import * as Sentry from "@sentry/node";
|
||||||
import { PrismaClient } from "@sourcebot/db";
|
import { PrismaClient } from "@sourcebot/db";
|
||||||
import { createLogger } from "@sourcebot/shared";
|
import { createLogger, env, getConfigSettings, getDBConnectionString, hasEntitlement } from "@sourcebot/shared";
|
||||||
import { env, getConfigSettings, hasEntitlement, getDBConnectionString } from '@sourcebot/shared';
|
import 'express-async-errors';
|
||||||
import { existsSync } from 'fs';
|
import { existsSync } from 'fs';
|
||||||
import { mkdir } from 'fs/promises';
|
import { mkdir } from 'fs/promises';
|
||||||
import { Redis } from 'ioredis';
|
import { Redis } from 'ioredis';
|
||||||
|
import { Api } from "./api.js";
|
||||||
import { ConfigManager } from "./configManager.js";
|
import { ConfigManager } from "./configManager.js";
|
||||||
import { ConnectionManager } from './connectionManager.js';
|
import { ConnectionManager } from './connectionManager.js';
|
||||||
import { INDEX_CACHE_DIR, REPOS_CACHE_DIR } from './constants.js';
|
import { INDEX_CACHE_DIR, REPOS_CACHE_DIR, SHUTDOWN_SIGNALS } from './constants.js';
|
||||||
|
import { AccountPermissionSyncer } from "./ee/accountPermissionSyncer.js";
|
||||||
import { GithubAppManager } from "./ee/githubAppManager.js";
|
import { GithubAppManager } from "./ee/githubAppManager.js";
|
||||||
import { RepoPermissionSyncer } from './ee/repoPermissionSyncer.js';
|
import { RepoPermissionSyncer } from './ee/repoPermissionSyncer.js';
|
||||||
import { AccountPermissionSyncer } from "./ee/accountPermissionSyncer.js";
|
import { shutdownPosthog } from "./posthog.js";
|
||||||
import { PromClient } from './promClient.js';
|
import { PromClient } from './promClient.js';
|
||||||
import { RepoIndexManager } from "./repoIndexManager.js";
|
import { RepoIndexManager } from "./repoIndexManager.js";
|
||||||
import { shutdownPosthog } from "./posthog.js";
|
|
||||||
|
|
||||||
|
|
||||||
const logger = createLogger('backend-entrypoint');
|
const logger = createLogger('backend-entrypoint');
|
||||||
|
|
@ -40,13 +42,14 @@ const prisma = new PrismaClient({
|
||||||
const redis = new Redis(env.REDIS_URL, {
|
const redis = new Redis(env.REDIS_URL, {
|
||||||
maxRetriesPerRequest: null
|
maxRetriesPerRequest: null
|
||||||
});
|
});
|
||||||
redis.ping().then(() => {
|
|
||||||
|
try {
|
||||||
|
await redis.ping();
|
||||||
logger.info('Connected to redis');
|
logger.info('Connected to redis');
|
||||||
}).catch((err: unknown) => {
|
} catch (err: unknown) {
|
||||||
logger.error('Failed to connect to redis');
|
logger.error('Failed to connect to redis. Error:', err);
|
||||||
logger.error(err);
|
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
});
|
}
|
||||||
|
|
||||||
const promClient = new PromClient();
|
const promClient = new PromClient();
|
||||||
|
|
||||||
|
|
@ -74,47 +77,74 @@ else if (env.EXPERIMENT_EE_PERMISSION_SYNC_ENABLED === 'true' && hasEntitlement(
|
||||||
accountPermissionSyncer.startScheduler();
|
accountPermissionSyncer.startScheduler();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const api = new Api(
|
||||||
|
promClient,
|
||||||
|
prisma,
|
||||||
|
connectionManager,
|
||||||
|
repoIndexManager,
|
||||||
|
);
|
||||||
|
|
||||||
logger.info('Worker started.');
|
logger.info('Worker started.');
|
||||||
|
|
||||||
const cleanup = async (signal: string) => {
|
const listenToShutdownSignals = () => {
|
||||||
logger.info(`Received ${signal}, cleaning up...`);
|
const signals = SHUTDOWN_SIGNALS;
|
||||||
|
|
||||||
const shutdownTimeout = 30000; // 30 seconds
|
let receivedSignal = false;
|
||||||
|
|
||||||
try {
|
const cleanup = async (signal: string) => {
|
||||||
await Promise.race([
|
try {
|
||||||
Promise.all([
|
if (receivedSignal) {
|
||||||
repoIndexManager.dispose(),
|
logger.debug(`Recieved repeat signal ${signal}, ignoring.`);
|
||||||
connectionManager.dispose(),
|
return;
|
||||||
repoPermissionSyncer.dispose(),
|
}
|
||||||
accountPermissionSyncer.dispose(),
|
receivedSignal = true;
|
||||||
promClient.dispose(),
|
|
||||||
configManager.dispose(),
|
logger.info(`Received ${signal}, cleaning up...`);
|
||||||
]),
|
|
||||||
new Promise((_, reject) =>
|
await repoIndexManager.dispose()
|
||||||
setTimeout(() => reject(new Error('Shutdown timeout')), shutdownTimeout)
|
await connectionManager.dispose()
|
||||||
)
|
await repoPermissionSyncer.dispose()
|
||||||
]);
|
await accountPermissionSyncer.dispose()
|
||||||
logger.info('All workers shut down gracefully');
|
await configManager.dispose()
|
||||||
} catch (error) {
|
|
||||||
logger.warn('Shutdown timeout or error, forcing exit:', error instanceof Error ? error.message : String(error));
|
await prisma.$disconnect();
|
||||||
|
await redis.quit();
|
||||||
|
await api.dispose();
|
||||||
|
await shutdownPosthog();
|
||||||
|
|
||||||
|
|
||||||
|
logger.info('All workers shut down gracefully');
|
||||||
|
signals.forEach(sig => process.removeListener(sig, cleanup));
|
||||||
|
} catch (error) {
|
||||||
|
Sentry.captureException(error);
|
||||||
|
logger.error('Error shutting down worker:', error);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
await prisma.$disconnect();
|
signals.forEach(signal => {
|
||||||
await redis.quit();
|
process.on(signal, (err) => {
|
||||||
await shutdownPosthog();
|
cleanup(err).finally(() => {
|
||||||
|
process.kill(process.pid, signal);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Register handlers for uncaught exceptions and unhandled rejections
|
||||||
|
process.on('uncaughtException', (err) => {
|
||||||
|
logger.error(`Uncaught exception: ${err.message}`);
|
||||||
|
cleanup('uncaughtException').finally(() => {
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
process.on('unhandledRejection', (reason, promise) => {
|
||||||
|
logger.error(`Unhandled rejection at: ${promise}, reason: ${reason}`);
|
||||||
|
cleanup('unhandledRejection').finally(() => {
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
process.on('SIGINT', () => cleanup('SIGINT').finally(() => process.exit(0)));
|
listenToShutdownSignals();
|
||||||
process.on('SIGTERM', () => cleanup('SIGTERM').finally(() => process.exit(0)));
|
|
||||||
|
|
||||||
// Register handlers for uncaught exceptions and unhandled rejections
|
|
||||||
process.on('uncaughtException', (err) => {
|
|
||||||
logger.error(`Uncaught exception: ${err.message}`);
|
|
||||||
cleanup('uncaughtException').finally(() => process.exit(1));
|
|
||||||
});
|
|
||||||
|
|
||||||
process.on('unhandledRejection', (reason, promise) => {
|
|
||||||
logger.error(`Unhandled rejection at: ${promise}, reason: ${reason}`);
|
|
||||||
cleanup('unhandledRejection').finally(() => process.exit(1));
|
|
||||||
});
|
|
||||||
|
|
|
||||||
|
|
@ -1,14 +1,6 @@
|
||||||
import express, { Request, Response } from 'express';
|
|
||||||
import { Server } from 'http';
|
|
||||||
import client, { Registry, Counter, Gauge } from 'prom-client';
|
import client, { Registry, Counter, Gauge } from 'prom-client';
|
||||||
import { createLogger } from "@sourcebot/shared";
|
|
||||||
|
|
||||||
const logger = createLogger('prometheus-client');
|
|
||||||
|
|
||||||
export class PromClient {
|
export class PromClient {
|
||||||
private registry: Registry;
|
public registry: Registry;
|
||||||
private app: express.Application;
|
|
||||||
private server: Server;
|
|
||||||
|
|
||||||
public activeRepoIndexJobs: Gauge<string>;
|
public activeRepoIndexJobs: Gauge<string>;
|
||||||
public pendingRepoIndexJobs: Gauge<string>;
|
public pendingRepoIndexJobs: Gauge<string>;
|
||||||
|
|
@ -22,8 +14,6 @@ export class PromClient {
|
||||||
public connectionSyncJobFailTotal: Counter<string>;
|
public connectionSyncJobFailTotal: Counter<string>;
|
||||||
public connectionSyncJobSuccessTotal: Counter<string>;
|
public connectionSyncJobSuccessTotal: Counter<string>;
|
||||||
|
|
||||||
public readonly PORT = 3060;
|
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
this.registry = new Registry();
|
this.registry = new Registry();
|
||||||
|
|
||||||
|
|
@ -100,26 +90,5 @@ export class PromClient {
|
||||||
client.collectDefaultMetrics({
|
client.collectDefaultMetrics({
|
||||||
register: this.registry,
|
register: this.registry,
|
||||||
});
|
});
|
||||||
|
|
||||||
this.app = express();
|
|
||||||
this.app.get('/metrics', async (req: Request, res: Response) => {
|
|
||||||
res.set('Content-Type', this.registry.contentType);
|
|
||||||
|
|
||||||
const metrics = await this.registry.metrics();
|
|
||||||
res.end(metrics);
|
|
||||||
});
|
|
||||||
|
|
||||||
this.server = this.app.listen(this.PORT, () => {
|
|
||||||
logger.info(`Prometheus metrics server is running on port ${this.PORT}`);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async dispose() {
|
|
||||||
return new Promise<void>((resolve, reject) => {
|
|
||||||
this.server.close((err) => {
|
|
||||||
if (err) reject(err);
|
|
||||||
else resolve();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -39,8 +39,8 @@ type CompileResult = {
|
||||||
export const compileGithubConfig = async (
|
export const compileGithubConfig = async (
|
||||||
config: GithubConnectionConfig,
|
config: GithubConnectionConfig,
|
||||||
connectionId: number,
|
connectionId: number,
|
||||||
abortController: AbortController): Promise<CompileResult> => {
|
signal: AbortSignal): Promise<CompileResult> => {
|
||||||
const gitHubReposResult = await getGitHubReposFromConfig(config, abortController.signal);
|
const gitHubReposResult = await getGitHubReposFromConfig(config, signal);
|
||||||
const gitHubRepos = gitHubReposResult.repos;
|
const gitHubRepos = gitHubReposResult.repos;
|
||||||
const warnings = gitHubReposResult.warnings;
|
const warnings = gitHubReposResult.warnings;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@ import { readdir, rm } from 'fs/promises';
|
||||||
import { Job, Queue, ReservedJob, Worker } from "groupmq";
|
import { Job, Queue, ReservedJob, Worker } from "groupmq";
|
||||||
import { Redis } from 'ioredis';
|
import { Redis } from 'ioredis';
|
||||||
import micromatch from 'micromatch';
|
import micromatch from 'micromatch';
|
||||||
import { INDEX_CACHE_DIR } from './constants.js';
|
import { GROUPMQ_WORKER_STOP_GRACEFUL_TIMEOUT_MS, INDEX_CACHE_DIR } from './constants.js';
|
||||||
import { cloneRepository, fetchRepository, getBranches, getCommitHashForRefName, getTags, isPathAValidGitRepoRoot, unsetGitConfig, upsertGitConfig } from './git.js';
|
import { cloneRepository, fetchRepository, getBranches, getCommitHashForRefName, getTags, isPathAValidGitRepoRoot, unsetGitConfig, upsertGitConfig } from './git.js';
|
||||||
import { captureEvent } from './posthog.js';
|
import { captureEvent } from './posthog.js';
|
||||||
import { PromClient } from './promClient.js';
|
import { PromClient } from './promClient.js';
|
||||||
|
|
@ -45,7 +45,7 @@ export class RepoIndexManager {
|
||||||
constructor(
|
constructor(
|
||||||
private db: PrismaClient,
|
private db: PrismaClient,
|
||||||
private settings: Settings,
|
private settings: Settings,
|
||||||
redis: Redis,
|
private redis: Redis,
|
||||||
private promClient: PromClient,
|
private promClient: PromClient,
|
||||||
) {
|
) {
|
||||||
this.queue = new Queue<JobPayload>({
|
this.queue = new Queue<JobPayload>({
|
||||||
|
|
@ -70,6 +70,10 @@ export class RepoIndexManager {
|
||||||
this.worker.on('failed', this.onJobFailed.bind(this));
|
this.worker.on('failed', this.onJobFailed.bind(this));
|
||||||
this.worker.on('stalled', this.onJobStalled.bind(this));
|
this.worker.on('stalled', this.onJobStalled.bind(this));
|
||||||
this.worker.on('error', this.onWorkerError.bind(this));
|
this.worker.on('error', this.onWorkerError.bind(this));
|
||||||
|
// graceful-timeout is triggered when a job is still processing after
|
||||||
|
// worker.close() is called and the timeout period has elapsed. In this case,
|
||||||
|
// we fail the job with no retry.
|
||||||
|
this.worker.on('graceful-timeout', this.onJobGracefulTimeout.bind(this));
|
||||||
}
|
}
|
||||||
|
|
||||||
public startScheduler() {
|
public startScheduler() {
|
||||||
|
|
@ -192,7 +196,7 @@ export class RepoIndexManager {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async createJobs(repos: Repo[], type: RepoIndexingJobType) {
|
public async createJobs(repos: Repo[], type: RepoIndexingJobType) {
|
||||||
// @note: we don't perform this in a transaction because
|
// @note: we don't perform this in a transaction because
|
||||||
// we want to avoid the situation where a job is created and run
|
// we want to avoid the situation where a job is created and run
|
||||||
// prior to the transaction being committed.
|
// prior to the transaction being committed.
|
||||||
|
|
@ -221,6 +225,8 @@ export class RepoIndexManager {
|
||||||
const jobTypeLabel = getJobTypePrometheusLabel(type);
|
const jobTypeLabel = getJobTypePrometheusLabel(type);
|
||||||
this.promClient.pendingRepoIndexJobs.inc({ repo: job.repo.name, type: jobTypeLabel });
|
this.promClient.pendingRepoIndexJobs.inc({ repo: job.repo.name, type: jobTypeLabel });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return jobs.map(job => job.id);
|
||||||
}
|
}
|
||||||
|
|
||||||
private async runJob(job: ReservedJob<JobPayload>) {
|
private async runJob(job: ReservedJob<JobPayload>) {
|
||||||
|
|
@ -228,6 +234,23 @@ export class RepoIndexManager {
|
||||||
const logger = createJobLogger(id);
|
const logger = createJobLogger(id);
|
||||||
logger.info(`Running ${job.data.type} job ${id} for repo ${job.data.repoName} (id: ${job.data.repoId}) (attempt ${job.attempts + 1} / ${job.maxAttempts})`);
|
logger.info(`Running ${job.data.type} job ${id} for repo ${job.data.repoName} (id: ${job.data.repoId}) (attempt ${job.attempts + 1} / ${job.maxAttempts})`);
|
||||||
|
|
||||||
|
const currentStatus = await this.db.repoIndexingJob.findUniqueOrThrow({
|
||||||
|
where: {
|
||||||
|
id,
|
||||||
|
},
|
||||||
|
select: {
|
||||||
|
status: true,
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Fail safe: if the job is not PENDING (first run) or IN_PROGRESS (retry), it indicates the job
|
||||||
|
// is in an invalid state and should be skipped.
|
||||||
|
if (
|
||||||
|
currentStatus.status !== RepoIndexingJobStatus.PENDING &&
|
||||||
|
currentStatus.status !== RepoIndexingJobStatus.IN_PROGRESS
|
||||||
|
) {
|
||||||
|
throw new Error(`Job ${id} is not in a valid state. Expected: ${RepoIndexingJobStatus.PENDING} or ${RepoIndexingJobStatus.IN_PROGRESS}. Actual: ${currentStatus.status}. Skipping.`);
|
||||||
|
}
|
||||||
|
|
||||||
const { repo, type: jobType } = await this.db.repoIndexingJob.update({
|
const { repo, type: jobType } = await this.db.repoIndexingJob.update({
|
||||||
where: {
|
where: {
|
||||||
|
|
@ -538,6 +561,28 @@ export class RepoIndexManager {
|
||||||
logger.error(`Job ${jobId} stalled for repo ${repo.name} (id: ${repo.id})`);
|
logger.error(`Job ${jobId} stalled for repo ${repo.name} (id: ${repo.id})`);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
private onJobGracefulTimeout = async (job: Job<JobPayload>) =>
|
||||||
|
groupmqLifecycleExceptionWrapper('onJobGracefulTimeout', logger, async () => {
|
||||||
|
const logger = createJobLogger(job.data.jobId);
|
||||||
|
const jobTypeLabel = getJobTypePrometheusLabel(job.data.type);
|
||||||
|
|
||||||
|
const { repo } = await this.db.repoIndexingJob.update({
|
||||||
|
where: { id: job.data.jobId },
|
||||||
|
data: {
|
||||||
|
status: RepoIndexingJobStatus.FAILED,
|
||||||
|
completedAt: new Date(),
|
||||||
|
errorMessage: 'Job timed out',
|
||||||
|
},
|
||||||
|
select: { repo: true }
|
||||||
|
});
|
||||||
|
|
||||||
|
this.promClient.activeRepoIndexJobs.dec({ repo: job.data.repoName, type: jobTypeLabel });
|
||||||
|
this.promClient.repoIndexJobFailTotal.inc({ repo: job.data.repoName, type: jobTypeLabel });
|
||||||
|
|
||||||
|
logger.error(`Job ${job.data.jobId} timed out for repo ${repo.name} (id: ${repo.id}). Failing job.`);
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
private async onWorkerError(error: Error) {
|
private async onWorkerError(error: Error) {
|
||||||
Sentry.captureException(error);
|
Sentry.captureException(error);
|
||||||
logger.error(`Index syncer worker error.`, error);
|
logger.error(`Index syncer worker error.`, error);
|
||||||
|
|
@ -547,8 +592,20 @@ export class RepoIndexManager {
|
||||||
if (this.interval) {
|
if (this.interval) {
|
||||||
clearInterval(this.interval);
|
clearInterval(this.interval);
|
||||||
}
|
}
|
||||||
await this.worker.close();
|
const inProgressJobs = this.worker.getCurrentJobs();
|
||||||
await this.queue.close();
|
await this.worker.close(GROUPMQ_WORKER_STOP_GRACEFUL_TIMEOUT_MS);
|
||||||
|
// Manually release group locks for in progress jobs to prevent deadlocks.
|
||||||
|
// @see: https://github.com/Openpanel-dev/groupmq/issues/8
|
||||||
|
for (const { job } of inProgressJobs) {
|
||||||
|
const lockKey = `groupmq:repo-index-queue:lock:${job.groupId}`;
|
||||||
|
logger.debug(`Releasing group lock ${lockKey} for in progress job ${job.id}`);
|
||||||
|
await this.redis.del(lockKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
// @note: As of groupmq v1.0.0, queue.close() will just close the underlying
|
||||||
|
// redis connection. Since we share the same redis client between, skip this
|
||||||
|
// step and close the redis client directly in index.ts.
|
||||||
|
// await this.queue.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -216,6 +216,9 @@ export const env = createEnv({
|
||||||
SOURCEBOT_LOG_LEVEL: z.enum(["info", "debug", "warn", "error"]).default("info"),
|
SOURCEBOT_LOG_LEVEL: z.enum(["info", "debug", "warn", "error"]).default("info"),
|
||||||
SOURCEBOT_STRUCTURED_LOGGING_ENABLED: booleanSchema.default("false"),
|
SOURCEBOT_STRUCTURED_LOGGING_ENABLED: booleanSchema.default("false"),
|
||||||
SOURCEBOT_STRUCTURED_LOGGING_FILE: z.string().optional(),
|
SOURCEBOT_STRUCTURED_LOGGING_FILE: z.string().optional(),
|
||||||
|
|
||||||
|
// Configure the default maximum number of search results to return by default.
|
||||||
|
DEFAULT_MAX_MATCH_COUNT: numberSchema.default(10_000),
|
||||||
},
|
},
|
||||||
runtimeEnv,
|
runtimeEnv,
|
||||||
emptyStringAsUndefined: true,
|
emptyStringAsUndefined: true,
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,10 @@
|
||||||
import { getRepoInfoByName } from "@/actions";
|
import { getRepoInfoByName } from "@/actions";
|
||||||
import { PathHeader } from "@/app/[domain]/components/pathHeader";
|
import { PathHeader } from "@/app/[domain]/components/pathHeader";
|
||||||
import { Separator } from "@/components/ui/separator";
|
import { Separator } from "@/components/ui/separator";
|
||||||
import { getFileSource } from "@/features/search/fileSourceApi";
|
|
||||||
import { cn, getCodeHostInfoForRepo, isServiceError } from "@/lib/utils";
|
import { cn, getCodeHostInfoForRepo, isServiceError } from "@/lib/utils";
|
||||||
import Image from "next/image";
|
import Image from "next/image";
|
||||||
import { PureCodePreviewPanel } from "./pureCodePreviewPanel";
|
import { PureCodePreviewPanel } from "./pureCodePreviewPanel";
|
||||||
|
import { getFileSource } from "@/features/search/fileSourceApi";
|
||||||
|
|
||||||
interface CodePreviewPanelProps {
|
interface CodePreviewPanelProps {
|
||||||
path: string;
|
path: string;
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,12 @@
|
||||||
'use client';
|
'use client';
|
||||||
|
|
||||||
import { useRef } from "react";
|
import { useRef } from "react";
|
||||||
import { FileTreeItem } from "@/features/fileTree/actions";
|
|
||||||
import { FileTreeItemComponent } from "@/features/fileTree/components/fileTreeItemComponent";
|
import { FileTreeItemComponent } from "@/features/fileTree/components/fileTreeItemComponent";
|
||||||
import { getBrowsePath } from "../../hooks/utils";
|
import { getBrowsePath } from "../../hooks/utils";
|
||||||
import { ScrollArea } from "@/components/ui/scroll-area";
|
import { ScrollArea } from "@/components/ui/scroll-area";
|
||||||
import { useBrowseParams } from "../../hooks/useBrowseParams";
|
import { useBrowseParams } from "../../hooks/useBrowseParams";
|
||||||
import { useDomain } from "@/hooks/useDomain";
|
import { useDomain } from "@/hooks/useDomain";
|
||||||
|
import { FileTreeItem } from "@/features/fileTree/types";
|
||||||
|
|
||||||
interface PureTreePreviewPanelProps {
|
interface PureTreePreviewPanelProps {
|
||||||
items: FileTreeItem[];
|
items: FileTreeItem[];
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@
|
||||||
import { Separator } from "@/components/ui/separator";
|
import { Separator } from "@/components/ui/separator";
|
||||||
import { getRepoInfoByName } from "@/actions";
|
import { getRepoInfoByName } from "@/actions";
|
||||||
import { PathHeader } from "@/app/[domain]/components/pathHeader";
|
import { PathHeader } from "@/app/[domain]/components/pathHeader";
|
||||||
import { getFolderContents } from "@/features/fileTree/actions";
|
import { getFolderContents } from "@/features/fileTree/api";
|
||||||
import { isServiceError } from "@/lib/utils";
|
import { isServiceError } from "@/lib/utils";
|
||||||
import { PureTreePreviewPanel } from "./pureTreePreviewPanel";
|
import { PureTreePreviewPanel } from "./pureTreePreviewPanel";
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -5,7 +5,6 @@ import { useState, useRef, useMemo, useEffect, useCallback } from "react";
|
||||||
import { useHotkeys } from "react-hotkeys-hook";
|
import { useHotkeys } from "react-hotkeys-hook";
|
||||||
import { useQuery } from "@tanstack/react-query";
|
import { useQuery } from "@tanstack/react-query";
|
||||||
import { unwrapServiceError } from "@/lib/utils";
|
import { unwrapServiceError } from "@/lib/utils";
|
||||||
import { FileTreeItem, getFiles } from "@/features/fileTree/actions";
|
|
||||||
import { Dialog, DialogContent, DialogDescription, DialogTitle } from "@/components/ui/dialog";
|
import { Dialog, DialogContent, DialogDescription, DialogTitle } from "@/components/ui/dialog";
|
||||||
import { useBrowseNavigation } from "../hooks/useBrowseNavigation";
|
import { useBrowseNavigation } from "../hooks/useBrowseNavigation";
|
||||||
import { useBrowseState } from "../hooks/useBrowseState";
|
import { useBrowseState } from "../hooks/useBrowseState";
|
||||||
|
|
@ -13,6 +12,8 @@ import { useBrowseParams } from "../hooks/useBrowseParams";
|
||||||
import { FileTreeItemIcon } from "@/features/fileTree/components/fileTreeItemIcon";
|
import { FileTreeItemIcon } from "@/features/fileTree/components/fileTreeItemIcon";
|
||||||
import { useLocalStorage } from "usehooks-ts";
|
import { useLocalStorage } from "usehooks-ts";
|
||||||
import { Skeleton } from "@/components/ui/skeleton";
|
import { Skeleton } from "@/components/ui/skeleton";
|
||||||
|
import { FileTreeItem } from "@/features/fileTree/types";
|
||||||
|
import { getFiles } from "@/app/api/(client)/client";
|
||||||
|
|
||||||
const MAX_RESULTS = 100;
|
const MAX_RESULTS = 100;
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -55,7 +55,7 @@ export const useSuggestionsData = ({
|
||||||
query: `file:${suggestionQuery}`,
|
query: `file:${suggestionQuery}`,
|
||||||
matches: 15,
|
matches: 15,
|
||||||
contextLines: 1,
|
contextLines: 1,
|
||||||
}, domain),
|
}),
|
||||||
select: (data): Suggestion[] => {
|
select: (data): Suggestion[] => {
|
||||||
if (isServiceError(data)) {
|
if (isServiceError(data)) {
|
||||||
return [];
|
return [];
|
||||||
|
|
@ -75,7 +75,7 @@ export const useSuggestionsData = ({
|
||||||
query: `sym:${suggestionQuery.length > 0 ? suggestionQuery : ".*"}`,
|
query: `sym:${suggestionQuery.length > 0 ? suggestionQuery : ".*"}`,
|
||||||
matches: 15,
|
matches: 15,
|
||||||
contextLines: 1,
|
contextLines: 1,
|
||||||
}, domain),
|
}),
|
||||||
select: (data): Suggestion[] => {
|
select: (data): Suggestion[] => {
|
||||||
if (isServiceError(data)) {
|
if (isServiceError(data)) {
|
||||||
return [];
|
return [];
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
import { sew } from "@/actions"
|
import { getCurrentUserRole, sew } from "@/actions"
|
||||||
import { Badge } from "@/components/ui/badge"
|
import { Badge } from "@/components/ui/badge"
|
||||||
import { Button } from "@/components/ui/button"
|
import { Button } from "@/components/ui/button"
|
||||||
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card"
|
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card"
|
||||||
|
|
@ -19,6 +19,7 @@ import { BackButton } from "../../components/backButton"
|
||||||
import { DisplayDate } from "../../components/DisplayDate"
|
import { DisplayDate } from "../../components/DisplayDate"
|
||||||
import { RepoBranchesTable } from "../components/repoBranchesTable"
|
import { RepoBranchesTable } from "../components/repoBranchesTable"
|
||||||
import { RepoJobsTable } from "../components/repoJobsTable"
|
import { RepoJobsTable } from "../components/repoJobsTable"
|
||||||
|
import { OrgRole } from "@sourcebot/db"
|
||||||
|
|
||||||
export default async function RepoDetailPage({ params }: { params: Promise<{ id: string }> }) {
|
export default async function RepoDetailPage({ params }: { params: Promise<{ id: string }> }) {
|
||||||
const { id } = await params
|
const { id } = await params
|
||||||
|
|
@ -51,6 +52,11 @@ export default async function RepoDetailPage({ params }: { params: Promise<{ id:
|
||||||
|
|
||||||
const repoMetadata = repoMetadataSchema.parse(repo.metadata);
|
const repoMetadata = repoMetadataSchema.parse(repo.metadata);
|
||||||
|
|
||||||
|
const userRole = await getCurrentUserRole(SINGLE_TENANT_ORG_DOMAIN);
|
||||||
|
if (isServiceError(userRole)) {
|
||||||
|
throw new ServiceErrorException(userRole);
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<div className="mb-6">
|
<div className="mb-6">
|
||||||
|
|
@ -172,7 +178,11 @@ export default async function RepoDetailPage({ params }: { params: Promise<{ id:
|
||||||
</CardHeader>
|
</CardHeader>
|
||||||
<CardContent>
|
<CardContent>
|
||||||
<Suspense fallback={<Skeleton className="h-96 w-full" />}>
|
<Suspense fallback={<Skeleton className="h-96 w-full" />}>
|
||||||
<RepoJobsTable data={repo.jobs} />
|
<RepoJobsTable
|
||||||
|
data={repo.jobs}
|
||||||
|
repoId={repo.id}
|
||||||
|
isIndexButtonVisible={userRole === OrgRole.OWNER}
|
||||||
|
/>
|
||||||
</Suspense>
|
</Suspense>
|
||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,7 @@ import {
|
||||||
useReactTable,
|
useReactTable,
|
||||||
} from "@tanstack/react-table"
|
} from "@tanstack/react-table"
|
||||||
import { cva } from "class-variance-authority"
|
import { cva } from "class-variance-authority"
|
||||||
import { AlertCircle, ArrowUpDown, RefreshCwIcon } from "lucide-react"
|
import { AlertCircle, ArrowUpDown, PlusCircleIcon, RefreshCwIcon } from "lucide-react"
|
||||||
import * as React from "react"
|
import * as React from "react"
|
||||||
import { CopyIconButton } from "../../components/copyIconButton"
|
import { CopyIconButton } from "../../components/copyIconButton"
|
||||||
import { useMemo } from "react"
|
import { useMemo } from "react"
|
||||||
|
|
@ -26,6 +26,9 @@ import { LightweightCodeHighlighter } from "../../components/lightweightCodeHigh
|
||||||
import { useRouter } from "next/navigation"
|
import { useRouter } from "next/navigation"
|
||||||
import { useToast } from "@/components/hooks/use-toast"
|
import { useToast } from "@/components/hooks/use-toast"
|
||||||
import { DisplayDate } from "../../components/DisplayDate"
|
import { DisplayDate } from "../../components/DisplayDate"
|
||||||
|
import { LoadingButton } from "@/components/ui/loading-button"
|
||||||
|
import { indexRepo } from "@/features/workerApi/actions"
|
||||||
|
import { isServiceError } from "@/lib/utils"
|
||||||
|
|
||||||
// @see: https://v0.app/chat/repo-indexing-status-uhjdDim8OUS
|
// @see: https://v0.app/chat/repo-indexing-status-uhjdDim8OUS
|
||||||
|
|
||||||
|
|
@ -129,7 +132,7 @@ export const columns: ColumnDef<RepoIndexingJob>[] = [
|
||||||
</Button>
|
</Button>
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
cell: ({ row }) => <DisplayDate date={row.getValue("createdAt") as Date} className="ml-3"/>,
|
cell: ({ row }) => <DisplayDate date={row.getValue("createdAt") as Date} className="ml-3" />,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
accessorKey: "completedAt",
|
accessorKey: "completedAt",
|
||||||
|
|
@ -147,7 +150,7 @@ export const columns: ColumnDef<RepoIndexingJob>[] = [
|
||||||
return "-";
|
return "-";
|
||||||
}
|
}
|
||||||
|
|
||||||
return <DisplayDate date={completedAt} className="ml-3"/>
|
return <DisplayDate date={completedAt} className="ml-3" />
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
|
|
@ -176,13 +179,41 @@ export const columns: ColumnDef<RepoIndexingJob>[] = [
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
export const RepoJobsTable = ({ data }: { data: RepoIndexingJob[] }) => {
|
export const RepoJobsTable = ({
|
||||||
|
data,
|
||||||
|
repoId,
|
||||||
|
isIndexButtonVisible,
|
||||||
|
}: {
|
||||||
|
data: RepoIndexingJob[],
|
||||||
|
repoId: number,
|
||||||
|
isIndexButtonVisible: boolean,
|
||||||
|
}) => {
|
||||||
const [sorting, setSorting] = React.useState<SortingState>([{ id: "createdAt", desc: true }])
|
const [sorting, setSorting] = React.useState<SortingState>([{ id: "createdAt", desc: true }])
|
||||||
const [columnFilters, setColumnFilters] = React.useState<ColumnFiltersState>([])
|
const [columnFilters, setColumnFilters] = React.useState<ColumnFiltersState>([])
|
||||||
const [columnVisibility, setColumnVisibility] = React.useState<VisibilityState>({})
|
const [columnVisibility, setColumnVisibility] = React.useState<VisibilityState>({})
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const { toast } = useToast();
|
const { toast } = useToast();
|
||||||
|
|
||||||
|
const [isIndexSubmitting, setIsIndexSubmitting] = React.useState(false);
|
||||||
|
const onIndexButtonClick = React.useCallback(async () => {
|
||||||
|
setIsIndexSubmitting(true);
|
||||||
|
const response = await indexRepo(repoId);
|
||||||
|
|
||||||
|
if (!isServiceError(response)) {
|
||||||
|
const { jobId } = response;
|
||||||
|
toast({
|
||||||
|
description: `✅ Repository sync triggered successfully. Job ID: ${jobId}`,
|
||||||
|
})
|
||||||
|
router.refresh();
|
||||||
|
} else {
|
||||||
|
toast({
|
||||||
|
description: `❌ Failed to index repository. ${response.message}`,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
setIsIndexSubmitting(false);
|
||||||
|
}, [repoId, router, toast]);
|
||||||
|
|
||||||
const table = useReactTable({
|
const table = useReactTable({
|
||||||
data,
|
data,
|
||||||
columns,
|
columns,
|
||||||
|
|
@ -247,19 +278,31 @@ export const RepoJobsTable = ({ data }: { data: RepoIndexingJob[] }) => {
|
||||||
</SelectContent>
|
</SelectContent>
|
||||||
</Select>
|
</Select>
|
||||||
|
|
||||||
<Button
|
<div className="ml-auto flex items-center gap-2">
|
||||||
variant="outline"
|
<Button
|
||||||
className="ml-auto"
|
variant="outline"
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
router.refresh();
|
router.refresh();
|
||||||
toast({
|
toast({
|
||||||
description: "Page refreshed",
|
description: "Page refreshed",
|
||||||
});
|
});
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<RefreshCwIcon className="w-3 h-3" />
|
<RefreshCwIcon className="w-3 h-3" />
|
||||||
Refresh
|
Refresh
|
||||||
</Button>
|
</Button>
|
||||||
|
|
||||||
|
{isIndexButtonVisible && (
|
||||||
|
<LoadingButton
|
||||||
|
onClick={onIndexButtonClick}
|
||||||
|
loading={isIndexSubmitting}
|
||||||
|
variant="outline"
|
||||||
|
>
|
||||||
|
<PlusCircleIcon className="w-3 h-3" />
|
||||||
|
Trigger sync
|
||||||
|
</LoadingButton>
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="rounded-md border">
|
<div className="rounded-md border">
|
||||||
|
|
|
||||||
|
|
@ -5,8 +5,8 @@ import { CodePreview } from "./codePreview";
|
||||||
import { SearchResultFile } from "@/features/search/types";
|
import { SearchResultFile } from "@/features/search/types";
|
||||||
import { SymbolIcon } from "@radix-ui/react-icons";
|
import { SymbolIcon } from "@radix-ui/react-icons";
|
||||||
import { SetStateAction, Dispatch, useMemo } from "react";
|
import { SetStateAction, Dispatch, useMemo } from "react";
|
||||||
import { getFileSource } from "@/features/search/fileSourceApi";
|
|
||||||
import { unwrapServiceError } from "@/lib/utils";
|
import { unwrapServiceError } from "@/lib/utils";
|
||||||
|
import { getFileSource } from "@/app/api/(client)/client";
|
||||||
|
|
||||||
interface CodePreviewPanelProps {
|
interface CodePreviewPanelProps {
|
||||||
previewedFile: SearchResultFile;
|
previewedFile: SearchResultFile;
|
||||||
|
|
|
||||||
|
|
@ -35,14 +35,14 @@ import { FilterPanel } from "./filterPanel";
|
||||||
import { useFilteredMatches } from "./filterPanel/useFilterMatches";
|
import { useFilteredMatches } from "./filterPanel/useFilterMatches";
|
||||||
import { SearchResultsPanel } from "./searchResultsPanel";
|
import { SearchResultsPanel } from "./searchResultsPanel";
|
||||||
|
|
||||||
const DEFAULT_MAX_MATCH_COUNT = 5000;
|
|
||||||
|
|
||||||
interface SearchResultsPageProps {
|
interface SearchResultsPageProps {
|
||||||
searchQuery: string;
|
searchQuery: string;
|
||||||
|
defaultMaxMatchCount: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const SearchResultsPage = ({
|
export const SearchResultsPage = ({
|
||||||
searchQuery,
|
searchQuery,
|
||||||
|
defaultMaxMatchCount,
|
||||||
}: SearchResultsPageProps) => {
|
}: SearchResultsPageProps) => {
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const { setSearchHistory } = useSearchHistory();
|
const { setSearchHistory } = useSearchHistory();
|
||||||
|
|
@ -51,8 +51,8 @@ export const SearchResultsPage = ({
|
||||||
const { toast } = useToast();
|
const { toast } = useToast();
|
||||||
|
|
||||||
// Encodes the number of matches to return in the search response.
|
// Encodes the number of matches to return in the search response.
|
||||||
const _maxMatchCount = parseInt(useNonEmptyQueryParam(SearchQueryParams.matches) ?? `${DEFAULT_MAX_MATCH_COUNT}`);
|
const _maxMatchCount = parseInt(useNonEmptyQueryParam(SearchQueryParams.matches) ?? `${defaultMaxMatchCount}`);
|
||||||
const maxMatchCount = isNaN(_maxMatchCount) ? DEFAULT_MAX_MATCH_COUNT : _maxMatchCount;
|
const maxMatchCount = isNaN(_maxMatchCount) ? defaultMaxMatchCount : _maxMatchCount;
|
||||||
|
|
||||||
const {
|
const {
|
||||||
data: searchResponse,
|
data: searchResponse,
|
||||||
|
|
@ -66,7 +66,7 @@ export const SearchResultsPage = ({
|
||||||
matches: maxMatchCount,
|
matches: maxMatchCount,
|
||||||
contextLines: 3,
|
contextLines: 3,
|
||||||
whole: false,
|
whole: false,
|
||||||
}, domain)), "client.search"),
|
})), "client.search"),
|
||||||
select: ({ data, durationMs }) => ({
|
select: ({ data, durationMs }) => ({
|
||||||
...data,
|
...data,
|
||||||
totalClientSearchDurationMs: durationMs,
|
totalClientSearchDurationMs: durationMs,
|
||||||
|
|
|
||||||
|
|
@ -1,3 +1,4 @@
|
||||||
|
import { env } from "@sourcebot/shared";
|
||||||
import { SearchLandingPage } from "./components/searchLandingPage";
|
import { SearchLandingPage } from "./components/searchLandingPage";
|
||||||
import { SearchResultsPage } from "./components/searchResultsPage";
|
import { SearchResultsPage } from "./components/searchResultsPage";
|
||||||
|
|
||||||
|
|
@ -18,6 +19,7 @@ export default async function SearchPage(props: SearchPageProps) {
|
||||||
return (
|
return (
|
||||||
<SearchResultsPage
|
<SearchResultsPage
|
||||||
searchQuery={query}
|
searchQuery={query}
|
||||||
|
defaultMaxMatchCount={env.DEFAULT_MAX_MATCH_COUNT}
|
||||||
/>
|
/>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -4,13 +4,13 @@ import { DisplayDate } from "@/app/[domain]/components/DisplayDate";
|
||||||
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
|
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
|
||||||
import { Skeleton } from "@/components/ui/skeleton";
|
import { Skeleton } from "@/components/ui/skeleton";
|
||||||
import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip";
|
import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip";
|
||||||
import { env } from "@sourcebot/shared";
|
|
||||||
import { SINGLE_TENANT_ORG_DOMAIN } from "@/lib/constants";
|
import { SINGLE_TENANT_ORG_DOMAIN } from "@/lib/constants";
|
||||||
import { notFound, ServiceErrorException } from "@/lib/serviceError";
|
import { notFound as notFoundServiceError, ServiceErrorException } from "@/lib/serviceError";
|
||||||
|
import { notFound } from "next/navigation";
|
||||||
import { isServiceError } from "@/lib/utils";
|
import { isServiceError } from "@/lib/utils";
|
||||||
import { withAuthV2 } from "@/withAuthV2";
|
import { withAuthV2 } from "@/withAuthV2";
|
||||||
import { AzureDevOpsConnectionConfig, BitbucketConnectionConfig, GenericGitHostConnectionConfig, GerritConnectionConfig, GiteaConnectionConfig, GithubConnectionConfig, GitlabConnectionConfig } from "@sourcebot/schemas/v3/index.type";
|
import { AzureDevOpsConnectionConfig, BitbucketConnectionConfig, GenericGitHostConnectionConfig, GerritConnectionConfig, GiteaConnectionConfig, GithubConnectionConfig, GitlabConnectionConfig } from "@sourcebot/schemas/v3/index.type";
|
||||||
import { getConfigSettings } from "@sourcebot/shared";
|
import { env, getConfigSettings } from "@sourcebot/shared";
|
||||||
import { Info } from "lucide-react";
|
import { Info } from "lucide-react";
|
||||||
import Link from "next/link";
|
import Link from "next/link";
|
||||||
import { Suspense } from "react";
|
import { Suspense } from "react";
|
||||||
|
|
@ -22,12 +22,16 @@ interface ConnectionDetailPageProps {
|
||||||
}>
|
}>
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
export default async function ConnectionDetailPage(props: ConnectionDetailPageProps) {
|
export default async function ConnectionDetailPage(props: ConnectionDetailPageProps) {
|
||||||
const params = await props.params;
|
const params = await props.params;
|
||||||
const { id } = params;
|
const { id } = params;
|
||||||
|
|
||||||
const connection = await getConnectionWithJobs(Number.parseInt(id));
|
const connectionId = Number.parseInt(id);
|
||||||
|
if (isNaN(connectionId)) {
|
||||||
|
return notFound();
|
||||||
|
}
|
||||||
|
|
||||||
|
const connection = await getConnectionWithJobs(connectionId);
|
||||||
if (isServiceError(connection)) {
|
if (isServiceError(connection)) {
|
||||||
throw new ServiceErrorException(connection);
|
throw new ServiceErrorException(connection);
|
||||||
}
|
}
|
||||||
|
|
@ -172,7 +176,10 @@ export default async function ConnectionDetailPage(props: ConnectionDetailPagePr
|
||||||
</CardHeader>
|
</CardHeader>
|
||||||
<CardContent>
|
<CardContent>
|
||||||
<Suspense fallback={<Skeleton className="h-96 w-full" />}>
|
<Suspense fallback={<Skeleton className="h-96 w-full" />}>
|
||||||
<ConnectionJobsTable data={connection.syncJobs} />
|
<ConnectionJobsTable
|
||||||
|
data={connection.syncJobs}
|
||||||
|
connectionId={connectionId}
|
||||||
|
/>
|
||||||
</Suspense>
|
</Suspense>
|
||||||
</CardContent>
|
</CardContent>
|
||||||
</Card>
|
</Card>
|
||||||
|
|
@ -197,7 +204,7 @@ const getConnectionWithJobs = async (id: number) => sew(() =>
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!connection) {
|
if (!connection) {
|
||||||
return notFound();
|
return notFoundServiceError();
|
||||||
}
|
}
|
||||||
|
|
||||||
return connection;
|
return connection;
|
||||||
|
|
|
||||||
|
|
@ -18,7 +18,7 @@ import {
|
||||||
useReactTable,
|
useReactTable,
|
||||||
} from "@tanstack/react-table"
|
} from "@tanstack/react-table"
|
||||||
import { cva } from "class-variance-authority"
|
import { cva } from "class-variance-authority"
|
||||||
import { AlertCircle, AlertTriangle, ArrowUpDown, RefreshCwIcon } from "lucide-react"
|
import { AlertCircle, AlertTriangle, ArrowUpDown, PlusCircleIcon, RefreshCwIcon } from "lucide-react"
|
||||||
import * as React from "react"
|
import * as React from "react"
|
||||||
import { CopyIconButton } from "@/app/[domain]/components/copyIconButton"
|
import { CopyIconButton } from "@/app/[domain]/components/copyIconButton"
|
||||||
import { useMemo } from "react"
|
import { useMemo } from "react"
|
||||||
|
|
@ -26,6 +26,9 @@ import { LightweightCodeHighlighter } from "@/app/[domain]/components/lightweigh
|
||||||
import { useRouter } from "next/navigation"
|
import { useRouter } from "next/navigation"
|
||||||
import { useToast } from "@/components/hooks/use-toast"
|
import { useToast } from "@/components/hooks/use-toast"
|
||||||
import { DisplayDate } from "@/app/[domain]/components/DisplayDate"
|
import { DisplayDate } from "@/app/[domain]/components/DisplayDate"
|
||||||
|
import { LoadingButton } from "@/components/ui/loading-button"
|
||||||
|
import { syncConnection } from "@/features/workerApi/actions"
|
||||||
|
import { isServiceError } from "@/lib/utils"
|
||||||
|
|
||||||
|
|
||||||
export type ConnectionSyncJob = {
|
export type ConnectionSyncJob = {
|
||||||
|
|
@ -181,13 +184,33 @@ export const columns: ColumnDef<ConnectionSyncJob>[] = [
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
export const ConnectionJobsTable = ({ data }: { data: ConnectionSyncJob[] }) => {
|
export const ConnectionJobsTable = ({ data, connectionId }: { data: ConnectionSyncJob[], connectionId: number }) => {
|
||||||
const [sorting, setSorting] = React.useState<SortingState>([{ id: "createdAt", desc: true }])
|
const [sorting, setSorting] = React.useState<SortingState>([{ id: "createdAt", desc: true }])
|
||||||
const [columnFilters, setColumnFilters] = React.useState<ColumnFiltersState>([])
|
const [columnFilters, setColumnFilters] = React.useState<ColumnFiltersState>([])
|
||||||
const [columnVisibility, setColumnVisibility] = React.useState<VisibilityState>({})
|
const [columnVisibility, setColumnVisibility] = React.useState<VisibilityState>({})
|
||||||
const router = useRouter();
|
const router = useRouter();
|
||||||
const { toast } = useToast();
|
const { toast } = useToast();
|
||||||
|
|
||||||
|
const [isSyncSubmitting, setIsSyncSubmitting] = React.useState(false);
|
||||||
|
const onSyncButtonClick = React.useCallback(async () => {
|
||||||
|
setIsSyncSubmitting(true);
|
||||||
|
const response = await syncConnection(connectionId);
|
||||||
|
|
||||||
|
if (!isServiceError(response)) {
|
||||||
|
const { jobId } = response;
|
||||||
|
toast({
|
||||||
|
description: `✅ Connection synced successfully. Job ID: ${jobId}`,
|
||||||
|
})
|
||||||
|
router.refresh();
|
||||||
|
} else {
|
||||||
|
toast({
|
||||||
|
description: `❌ Failed to sync connection. ${response.message}`,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
setIsSyncSubmitting(false);
|
||||||
|
}, [connectionId, router, toast]);
|
||||||
|
|
||||||
const table = useReactTable({
|
const table = useReactTable({
|
||||||
data,
|
data,
|
||||||
columns,
|
columns,
|
||||||
|
|
@ -238,19 +261,29 @@ export const ConnectionJobsTable = ({ data }: { data: ConnectionSyncJob[] }) =>
|
||||||
</SelectContent>
|
</SelectContent>
|
||||||
</Select>
|
</Select>
|
||||||
|
|
||||||
<Button
|
<div className="ml-auto flex items-center gap-2">
|
||||||
variant="outline"
|
<Button
|
||||||
className="ml-auto"
|
variant="outline"
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
router.refresh();
|
router.refresh();
|
||||||
toast({
|
toast({
|
||||||
description: "Page refreshed",
|
description: "Page refreshed",
|
||||||
});
|
});
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<RefreshCwIcon className="w-3 h-3" />
|
<RefreshCwIcon className="w-3 h-3" />
|
||||||
Refresh
|
Refresh
|
||||||
</Button>
|
</Button>
|
||||||
|
|
||||||
|
<LoadingButton
|
||||||
|
onClick={onSyncButtonClick}
|
||||||
|
loading={isSyncSubmitting}
|
||||||
|
variant="outline"
|
||||||
|
>
|
||||||
|
<PlusCircleIcon className="w-3 h-3" />
|
||||||
|
Trigger sync
|
||||||
|
</LoadingButton>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div className="rounded-md border">
|
<div className="rounded-md border">
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,5 @@
|
||||||
'use client';
|
'use client';
|
||||||
|
|
||||||
import { getVersionResponseSchema, getReposResponseSchema } from "@/lib/schemas";
|
|
||||||
import { ServiceError } from "@/lib/serviceError";
|
import { ServiceError } from "@/lib/serviceError";
|
||||||
import { GetVersionResponse, GetReposResponse } from "@/lib/types";
|
import { GetVersionResponse, GetReposResponse } from "@/lib/types";
|
||||||
import { isServiceError } from "@/lib/utils";
|
import { isServiceError } from "@/lib/utils";
|
||||||
|
|
@ -11,16 +10,21 @@ import {
|
||||||
SearchResponse,
|
SearchResponse,
|
||||||
} from "@/features/search/types";
|
} from "@/features/search/types";
|
||||||
import {
|
import {
|
||||||
fileSourceResponseSchema,
|
FindRelatedSymbolsRequest,
|
||||||
searchResponseSchema,
|
FindRelatedSymbolsResponse,
|
||||||
} from "@/features/search/schemas";
|
} from "@/features/codeNav/types";
|
||||||
|
import {
|
||||||
|
GetFilesRequest,
|
||||||
|
GetFilesResponse,
|
||||||
|
GetTreeRequest,
|
||||||
|
GetTreeResponse,
|
||||||
|
} from "@/features/fileTree/types";
|
||||||
|
|
||||||
export const search = async (body: SearchRequest, domain: string): Promise<SearchResponse | ServiceError> => {
|
export const search = async (body: SearchRequest): Promise<SearchResponse | ServiceError> => {
|
||||||
const result = await fetch("/api/search", {
|
const result = await fetch("/api/search", {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
"X-Org-Domain": domain,
|
|
||||||
},
|
},
|
||||||
body: JSON.stringify(body),
|
body: JSON.stringify(body),
|
||||||
}).then(response => response.json());
|
}).then(response => response.json());
|
||||||
|
|
@ -29,20 +33,19 @@ export const search = async (body: SearchRequest, domain: string): Promise<Searc
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
return searchResponseSchema.parse(result);
|
return result as SearchResponse | ServiceError;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const fetchFileSource = async (body: FileSourceRequest, domain: string): Promise<FileSourceResponse> => {
|
export const getFileSource = async (body: FileSourceRequest): Promise<FileSourceResponse | ServiceError> => {
|
||||||
const result = await fetch("/api/source", {
|
const result = await fetch("/api/source", {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
"X-Org-Domain": domain,
|
|
||||||
},
|
},
|
||||||
body: JSON.stringify(body),
|
body: JSON.stringify(body),
|
||||||
}).then(response => response.json());
|
}).then(response => response.json());
|
||||||
|
|
||||||
return fileSourceResponseSchema.parse(result);
|
return result as FileSourceResponse | ServiceError;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const getRepos = async (): Promise<GetReposResponse> => {
|
export const getRepos = async (): Promise<GetReposResponse> => {
|
||||||
|
|
@ -53,7 +56,7 @@ export const getRepos = async (): Promise<GetReposResponse> => {
|
||||||
},
|
},
|
||||||
}).then(response => response.json());
|
}).then(response => response.json());
|
||||||
|
|
||||||
return getReposResponseSchema.parse(result);
|
return result as GetReposResponse | ServiceError;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const getVersion = async (): Promise<GetVersionResponse> => {
|
export const getVersion = async (): Promise<GetVersionResponse> => {
|
||||||
|
|
@ -63,5 +66,37 @@ export const getVersion = async (): Promise<GetVersionResponse> => {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
},
|
},
|
||||||
}).then(response => response.json());
|
}).then(response => response.json());
|
||||||
return getVersionResponseSchema.parse(result);
|
return result as GetVersionResponse;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const findSearchBasedSymbolReferences = async (body: FindRelatedSymbolsRequest): Promise<FindRelatedSymbolsResponse | ServiceError> => {
|
||||||
|
const result = await fetch("/api/find_references", {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify(body),
|
||||||
|
}).then(response => response.json());
|
||||||
|
return result as FindRelatedSymbolsResponse | ServiceError;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const findSearchBasedSymbolDefinitions = async (body: FindRelatedSymbolsRequest): Promise<FindRelatedSymbolsResponse | ServiceError> => {
|
||||||
|
const result = await fetch("/api/find_definitions", {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify(body),
|
||||||
|
}).then(response => response.json());
|
||||||
|
return result as FindRelatedSymbolsResponse | ServiceError;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getTree = async (body: GetTreeRequest): Promise<GetTreeResponse | ServiceError> => {
|
||||||
|
const result = await fetch("/api/tree", {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify(body),
|
||||||
|
}).then(response => response.json());
|
||||||
|
return result as GetTreeResponse | ServiceError;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const getFiles = async (body: GetFilesRequest): Promise<GetFilesResponse | ServiceError> => {
|
||||||
|
const result = await fetch("/api/files", {
|
||||||
|
method: "POST",
|
||||||
|
body: JSON.stringify(body),
|
||||||
|
}).then(response => response.json());
|
||||||
|
return result as GetFilesResponse | ServiceError;
|
||||||
|
}
|
||||||
23
packages/web/src/app/api/(server)/files/route.ts
Normal file
23
packages/web/src/app/api/(server)/files/route.ts
Normal file
|
|
@ -0,0 +1,23 @@
|
||||||
|
'use server';
|
||||||
|
|
||||||
|
import { getFiles } from "@/features/fileTree/api";
|
||||||
|
import { getFilesRequestSchema } from "@/features/fileTree/types";
|
||||||
|
import { schemaValidationError, serviceErrorResponse } from "@/lib/serviceError";
|
||||||
|
import { isServiceError } from "@/lib/utils";
|
||||||
|
import { NextRequest } from "next/server";
|
||||||
|
|
||||||
|
export const POST = async (request: NextRequest) => {
|
||||||
|
const body = await request.json();
|
||||||
|
const parsed = await getFilesRequestSchema.safeParseAsync(body);
|
||||||
|
if (!parsed.success) {
|
||||||
|
return serviceErrorResponse(schemaValidationError(parsed.error));
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await getFiles(parsed.data);
|
||||||
|
if (isServiceError(response)) {
|
||||||
|
return serviceErrorResponse(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Response.json(response);
|
||||||
|
}
|
||||||
|
|
||||||
22
packages/web/src/app/api/(server)/find_definitions/route.ts
Normal file
22
packages/web/src/app/api/(server)/find_definitions/route.ts
Normal file
|
|
@ -0,0 +1,22 @@
|
||||||
|
'use server';
|
||||||
|
|
||||||
|
import { findSearchBasedSymbolDefinitions } from "@/features/codeNav/api";
|
||||||
|
import { findRelatedSymbolsRequestSchema } from "@/features/codeNav/types";
|
||||||
|
import { schemaValidationError, serviceErrorResponse } from "@/lib/serviceError";
|
||||||
|
import { isServiceError } from "@/lib/utils";
|
||||||
|
import { NextRequest } from "next/server";
|
||||||
|
|
||||||
|
export const POST = async (request: NextRequest) => {
|
||||||
|
const body = await request.json();
|
||||||
|
const parsed = await findRelatedSymbolsRequestSchema.safeParseAsync(body);
|
||||||
|
if (!parsed.success) {
|
||||||
|
return serviceErrorResponse(schemaValidationError(parsed.error));
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await findSearchBasedSymbolDefinitions(parsed.data);
|
||||||
|
if (isServiceError(response)) {
|
||||||
|
return serviceErrorResponse(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Response.json(response);
|
||||||
|
}
|
||||||
20
packages/web/src/app/api/(server)/find_references/route.ts
Normal file
20
packages/web/src/app/api/(server)/find_references/route.ts
Normal file
|
|
@ -0,0 +1,20 @@
|
||||||
|
import { findSearchBasedSymbolReferences } from "@/features/codeNav/api";
|
||||||
|
import { findRelatedSymbolsRequestSchema } from "@/features/codeNav/types";
|
||||||
|
import { schemaValidationError, serviceErrorResponse } from "@/lib/serviceError";
|
||||||
|
import { isServiceError } from "@/lib/utils";
|
||||||
|
import { NextRequest } from "next/server";
|
||||||
|
|
||||||
|
export const POST = async (request: NextRequest) => {
|
||||||
|
const body = await request.json();
|
||||||
|
const parsed = await findRelatedSymbolsRequestSchema.safeParseAsync(body);
|
||||||
|
if (!parsed.success) {
|
||||||
|
return serviceErrorResponse(schemaValidationError(parsed.error));
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await findSearchBasedSymbolReferences(parsed.data);
|
||||||
|
if (isServiceError(response)) {
|
||||||
|
return serviceErrorResponse(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Response.json(response);
|
||||||
|
}
|
||||||
23
packages/web/src/app/api/(server)/tree/route.ts
Normal file
23
packages/web/src/app/api/(server)/tree/route.ts
Normal file
|
|
@ -0,0 +1,23 @@
|
||||||
|
'use server';
|
||||||
|
|
||||||
|
import { getTree } from "@/features/fileTree/api";
|
||||||
|
import { getTreeRequestSchema } from "@/features/fileTree/types";
|
||||||
|
import { schemaValidationError, serviceErrorResponse } from "@/lib/serviceError";
|
||||||
|
import { isServiceError } from "@/lib/utils";
|
||||||
|
import { NextRequest } from "next/server";
|
||||||
|
|
||||||
|
export const POST = async (request: NextRequest) => {
|
||||||
|
const body = await request.json();
|
||||||
|
const parsed = await getTreeRequestSchema.safeParseAsync(body);
|
||||||
|
if (!parsed.success) {
|
||||||
|
return serviceErrorResponse(schemaValidationError(parsed.error));
|
||||||
|
}
|
||||||
|
|
||||||
|
const response = await getTree(parsed.data);
|
||||||
|
if (isServiceError(response)) {
|
||||||
|
return serviceErrorResponse(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Response.json(response);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
@ -1,11 +1,11 @@
|
||||||
'use client';
|
'use client';
|
||||||
|
|
||||||
import { useBrowseState } from "@/app/[domain]/browse/hooks/useBrowseState";
|
import { useBrowseState } from "@/app/[domain]/browse/hooks/useBrowseState";
|
||||||
|
import { findSearchBasedSymbolReferences, findSearchBasedSymbolDefinitions} from "@/app/api/(client)/client";
|
||||||
import { AnimatedResizableHandle } from "@/components/ui/animatedResizableHandle";
|
import { AnimatedResizableHandle } from "@/components/ui/animatedResizableHandle";
|
||||||
import { Badge } from "@/components/ui/badge";
|
import { Badge } from "@/components/ui/badge";
|
||||||
import { ResizablePanel, ResizablePanelGroup } from "@/components/ui/resizable";
|
import { ResizablePanel, ResizablePanelGroup } from "@/components/ui/resizable";
|
||||||
import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip";
|
import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip";
|
||||||
import { findSearchBasedSymbolDefinitions, findSearchBasedSymbolReferences } from "@/features/codeNav/actions";
|
|
||||||
import { useDomain } from "@/hooks/useDomain";
|
import { useDomain } from "@/hooks/useDomain";
|
||||||
import { unwrapServiceError } from "@/lib/utils";
|
import { unwrapServiceError } from "@/lib/utils";
|
||||||
import { useQuery } from "@tanstack/react-query";
|
import { useQuery } from "@tanstack/react-query";
|
||||||
|
|
@ -46,7 +46,7 @@ export const ExploreMenu = ({
|
||||||
symbolName: selectedSymbolInfo.symbolName,
|
symbolName: selectedSymbolInfo.symbolName,
|
||||||
language: selectedSymbolInfo.language,
|
language: selectedSymbolInfo.language,
|
||||||
revisionName: selectedSymbolInfo.revisionName,
|
revisionName: selectedSymbolInfo.revisionName,
|
||||||
}, domain)
|
})
|
||||||
),
|
),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -62,7 +62,7 @@ export const ExploreMenu = ({
|
||||||
symbolName: selectedSymbolInfo.symbolName,
|
symbolName: selectedSymbolInfo.symbolName,
|
||||||
language: selectedSymbolInfo.language,
|
language: selectedSymbolInfo.language,
|
||||||
revisionName: selectedSymbolInfo.revisionName,
|
revisionName: selectedSymbolInfo.revisionName,
|
||||||
}, domain)
|
})
|
||||||
),
|
),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
import { findSearchBasedSymbolDefinitions } from "@/features/codeNav/actions";
|
import { findSearchBasedSymbolDefinitions } from "@/app/api/(client)/client";
|
||||||
import { SourceRange } from "@/features/search/types";
|
import { SourceRange } from "@/features/search/types";
|
||||||
import { useDomain } from "@/hooks/useDomain";
|
import { useDomain } from "@/hooks/useDomain";
|
||||||
import { unwrapServiceError } from "@/lib/utils";
|
import { unwrapServiceError } from "@/lib/utils";
|
||||||
|
|
@ -56,7 +56,7 @@ export const useHoveredOverSymbolInfo = ({
|
||||||
symbolName: symbolName!,
|
symbolName: symbolName!,
|
||||||
language,
|
language,
|
||||||
revisionName,
|
revisionName,
|
||||||
}, domain)
|
})
|
||||||
),
|
),
|
||||||
select: ((data) => {
|
select: ((data) => {
|
||||||
return data.files.flatMap((file) => {
|
return data.files.flatMap((file) => {
|
||||||
|
|
|
||||||
|
|
@ -251,7 +251,6 @@ const resolveFileSource = async ({ path, repo, revision }: FileSource) => {
|
||||||
fileName: path,
|
fileName: path,
|
||||||
repository: repo,
|
repository: repo,
|
||||||
branch: revision,
|
branch: revision,
|
||||||
// @todo: handle multi-tenancy.
|
|
||||||
});
|
});
|
||||||
|
|
||||||
if (isServiceError(fileSource)) {
|
if (isServiceError(fileSource)) {
|
||||||
|
|
|
||||||
|
|
@ -41,7 +41,7 @@ export const useSuggestionsData = ({
|
||||||
query,
|
query,
|
||||||
matches: 10,
|
matches: 10,
|
||||||
contextLines: 1,
|
contextLines: 1,
|
||||||
}, domain))
|
}))
|
||||||
},
|
},
|
||||||
select: (data): FileSuggestion[] => {
|
select: (data): FileSuggestion[] => {
|
||||||
return data.files.map((file) => {
|
return data.files.map((file) => {
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
'use client';
|
'use client';
|
||||||
|
|
||||||
import { fetchFileSource } from "@/app/api/(client)/client";
|
import { getFileSource } from "@/app/api/(client)/client";
|
||||||
import { VscodeFileIcon } from "@/app/components/vscodeFileIcon";
|
import { VscodeFileIcon } from "@/app/components/vscodeFileIcon";
|
||||||
import { ScrollArea } from "@/components/ui/scroll-area";
|
import { ScrollArea } from "@/components/ui/scroll-area";
|
||||||
import { Skeleton } from "@/components/ui/skeleton";
|
import { Skeleton } from "@/components/ui/skeleton";
|
||||||
|
|
@ -99,11 +99,11 @@ export const ReferencedSourcesListView = ({
|
||||||
const fileSourceQueries = useQueries({
|
const fileSourceQueries = useQueries({
|
||||||
queries: referencedFileSources.map((file) => ({
|
queries: referencedFileSources.map((file) => ({
|
||||||
queryKey: ['fileSource', file.path, file.repo, file.revision, domain],
|
queryKey: ['fileSource', file.path, file.repo, file.revision, domain],
|
||||||
queryFn: () => unwrapServiceError(fetchFileSource({
|
queryFn: () => unwrapServiceError(getFileSource({
|
||||||
fileName: file.path,
|
fileName: file.path,
|
||||||
repository: file.repo,
|
repository: file.repo,
|
||||||
branch: file.revision,
|
branch: file.revision,
|
||||||
}, domain)),
|
})),
|
||||||
staleTime: Infinity,
|
staleTime: Infinity,
|
||||||
})),
|
})),
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,9 @@
|
||||||
import { z } from "zod"
|
import { z } from "zod"
|
||||||
import { search } from "@/features/search/searchApi"
|
import { search } from "@/features/search/searchApi"
|
||||||
import { SINGLE_TENANT_ORG_DOMAIN } from "@/lib/constants"
|
|
||||||
import { InferToolInput, InferToolOutput, InferUITool, tool, ToolUIPart } from "ai";
|
import { InferToolInput, InferToolOutput, InferUITool, tool, ToolUIPart } from "ai";
|
||||||
import { isServiceError } from "@/lib/utils";
|
import { isServiceError } from "@/lib/utils";
|
||||||
import { getFileSource } from "../search/fileSourceApi";
|
import { getFileSource } from "../search/fileSourceApi";
|
||||||
import { findSearchBasedSymbolDefinitions, findSearchBasedSymbolReferences } from "../codeNav/actions";
|
import { findSearchBasedSymbolDefinitions, findSearchBasedSymbolReferences } from "../codeNav/api";
|
||||||
import { FileSourceResponse } from "../search/types";
|
import { FileSourceResponse } from "../search/types";
|
||||||
import { addLineNumbers, buildSearchQuery } from "./utils";
|
import { addLineNumbers, buildSearchQuery } from "./utils";
|
||||||
import { toolNames } from "./constants";
|
import { toolNames } from "./constants";
|
||||||
|
|
@ -36,8 +35,7 @@ export const findSymbolReferencesTool = tool({
|
||||||
symbolName: symbol,
|
symbolName: symbol,
|
||||||
language,
|
language,
|
||||||
revisionName: "HEAD",
|
revisionName: "HEAD",
|
||||||
// @todo(mt): handle multi-tenancy.
|
});
|
||||||
}, SINGLE_TENANT_ORG_DOMAIN);
|
|
||||||
|
|
||||||
if (isServiceError(response)) {
|
if (isServiceError(response)) {
|
||||||
return response;
|
return response;
|
||||||
|
|
@ -74,8 +72,7 @@ export const findSymbolDefinitionsTool = tool({
|
||||||
symbolName: symbol,
|
symbolName: symbol,
|
||||||
language,
|
language,
|
||||||
revisionName: revision,
|
revisionName: revision,
|
||||||
// @todo(mt): handle multi-tenancy.
|
});
|
||||||
}, SINGLE_TENANT_ORG_DOMAIN);
|
|
||||||
|
|
||||||
if (isServiceError(response)) {
|
if (isServiceError(response)) {
|
||||||
return response;
|
return response;
|
||||||
|
|
|
||||||
|
|
@ -1,60 +1,43 @@
|
||||||
'use server';
|
import 'server-only';
|
||||||
|
|
||||||
import { sew, withAuth, withOrgMembership } from "@/actions";
|
import { sew } from "@/actions";
|
||||||
import { searchResponseSchema } from "@/features/search/schemas";
|
import { searchResponseSchema } from "@/features/search/schemas";
|
||||||
import { search } from "@/features/search/searchApi";
|
import { search } from "@/features/search/searchApi";
|
||||||
import { isServiceError } from "@/lib/utils";
|
|
||||||
import { FindRelatedSymbolsResponse } from "./types";
|
|
||||||
import { ServiceError } from "@/lib/serviceError";
|
import { ServiceError } from "@/lib/serviceError";
|
||||||
|
import { isServiceError } from "@/lib/utils";
|
||||||
|
import { withOptionalAuthV2 } from "@/withAuthV2";
|
||||||
import { SearchResponse } from "../search/types";
|
import { SearchResponse } from "../search/types";
|
||||||
import { OrgRole } from "@sourcebot/db";
|
import { FindRelatedSymbolsRequest, FindRelatedSymbolsResponse } from "./types";
|
||||||
|
|
||||||
// The maximum number of matches to return from the search API.
|
// The maximum number of matches to return from the search API.
|
||||||
const MAX_REFERENCE_COUNT = 1000;
|
const MAX_REFERENCE_COUNT = 1000;
|
||||||
|
|
||||||
export const findSearchBasedSymbolReferences = async (
|
export const findSearchBasedSymbolReferences = async (props: FindRelatedSymbolsRequest): Promise<FindRelatedSymbolsResponse | ServiceError> => sew(() =>
|
||||||
props: {
|
withOptionalAuthV2(async () => {
|
||||||
symbolName: string,
|
const {
|
||||||
language: string,
|
symbolName,
|
||||||
revisionName?: string,
|
language,
|
||||||
},
|
revisionName = "HEAD",
|
||||||
domain: string,
|
} = props;
|
||||||
): Promise<FindRelatedSymbolsResponse | ServiceError> => sew(() =>
|
|
||||||
withAuth((session) =>
|
|
||||||
withOrgMembership(session, domain, async () => {
|
|
||||||
const {
|
|
||||||
symbolName,
|
|
||||||
language,
|
|
||||||
revisionName = "HEAD",
|
|
||||||
} = props;
|
|
||||||
|
|
||||||
const query = `\\b${symbolName}\\b rev:${revisionName} ${getExpandedLanguageFilter(language)} case:yes`;
|
const query = `\\b${symbolName}\\b rev:${revisionName} ${getExpandedLanguageFilter(language)} case:yes`;
|
||||||
|
|
||||||
const searchResult = await search({
|
const searchResult = await search({
|
||||||
query,
|
query,
|
||||||
matches: MAX_REFERENCE_COUNT,
|
matches: MAX_REFERENCE_COUNT,
|
||||||
contextLines: 0,
|
contextLines: 0,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (isServiceError(searchResult)) {
|
if (isServiceError(searchResult)) {
|
||||||
return searchResult;
|
return searchResult;
|
||||||
}
|
}
|
||||||
|
|
||||||
return parseRelatedSymbolsSearchResponse(searchResult);
|
return parseRelatedSymbolsSearchResponse(searchResult);
|
||||||
}, /* minRequiredRole = */ OrgRole.GUEST), /* allowAnonymousAccess = */ true)
|
}));
|
||||||
);
|
|
||||||
|
|
||||||
|
|
||||||
export const findSearchBasedSymbolDefinitions = async (
|
export const findSearchBasedSymbolDefinitions = async (props: FindRelatedSymbolsRequest): Promise<FindRelatedSymbolsResponse | ServiceError> => sew(() =>
|
||||||
props: {
|
withOptionalAuthV2(async () => {
|
||||||
symbolName: string,
|
|
||||||
language: string,
|
|
||||||
revisionName?: string,
|
|
||||||
},
|
|
||||||
domain: string,
|
|
||||||
): Promise<FindRelatedSymbolsResponse | ServiceError> => sew(() =>
|
|
||||||
withAuth((session) =>
|
|
||||||
withOrgMembership(session, domain, async () => {
|
|
||||||
const {
|
const {
|
||||||
symbolName,
|
symbolName,
|
||||||
language,
|
language,
|
||||||
|
|
@ -74,8 +57,7 @@ export const findSearchBasedSymbolDefinitions = async (
|
||||||
}
|
}
|
||||||
|
|
||||||
return parseRelatedSymbolsSearchResponse(searchResult);
|
return parseRelatedSymbolsSearchResponse(searchResult);
|
||||||
}, /* minRequiredRole = */ OrgRole.GUEST), /* allowAnonymousAccess = */ true)
|
}));
|
||||||
);
|
|
||||||
|
|
||||||
const parseRelatedSymbolsSearchResponse = (searchResult: SearchResponse) => {
|
const parseRelatedSymbolsSearchResponse = (searchResult: SearchResponse) => {
|
||||||
const parser = searchResponseSchema.transform(async ({ files }) => ({
|
const parser = searchResponseSchema.transform(async ({ files }) => ({
|
||||||
|
|
@ -1,20 +0,0 @@
|
||||||
import { rangeSchema, repositoryInfoSchema } from "../search/schemas";
|
|
||||||
import { z } from "zod";
|
|
||||||
|
|
||||||
export const findRelatedSymbolsResponseSchema = z.object({
|
|
||||||
stats: z.object({
|
|
||||||
matchCount: z.number(),
|
|
||||||
}),
|
|
||||||
files: z.array(z.object({
|
|
||||||
fileName: z.string(),
|
|
||||||
repository: z.string(),
|
|
||||||
repositoryId: z.number(),
|
|
||||||
webUrl: z.string().optional(),
|
|
||||||
language: z.string(),
|
|
||||||
matches: z.array(z.object({
|
|
||||||
lineContent: z.string(),
|
|
||||||
range: rangeSchema,
|
|
||||||
}))
|
|
||||||
})),
|
|
||||||
repositoryInfo: z.array(repositoryInfoSchema),
|
|
||||||
});
|
|
||||||
|
|
@ -1,4 +1,29 @@
|
||||||
import { z } from "zod";
|
import { z } from "zod";
|
||||||
import { findRelatedSymbolsResponseSchema } from "./schemas";
|
import { rangeSchema, repositoryInfoSchema } from "../search/schemas";
|
||||||
|
|
||||||
|
export const findRelatedSymbolsRequestSchema = z.object({
|
||||||
|
symbolName: z.string(),
|
||||||
|
language: z.string(),
|
||||||
|
revisionName: z.string().optional(),
|
||||||
|
});
|
||||||
|
export type FindRelatedSymbolsRequest = z.infer<typeof findRelatedSymbolsRequestSchema>;
|
||||||
|
|
||||||
|
export const findRelatedSymbolsResponseSchema = z.object({
|
||||||
|
stats: z.object({
|
||||||
|
matchCount: z.number(),
|
||||||
|
}),
|
||||||
|
files: z.array(z.object({
|
||||||
|
fileName: z.string(),
|
||||||
|
repository: z.string(),
|
||||||
|
repositoryId: z.number(),
|
||||||
|
webUrl: z.string().optional(),
|
||||||
|
language: z.string(),
|
||||||
|
matches: z.array(z.object({
|
||||||
|
lineContent: z.string(),
|
||||||
|
range: rangeSchema,
|
||||||
|
}))
|
||||||
|
})),
|
||||||
|
repositoryInfo: z.array(repositoryInfoSchema),
|
||||||
|
});
|
||||||
|
|
||||||
export type FindRelatedSymbolsResponse = z.infer<typeof findRelatedSymbolsResponseSchema>;
|
export type FindRelatedSymbolsResponse = z.infer<typeof findRelatedSymbolsResponseSchema>;
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,4 @@
|
||||||
'use server';
|
import 'server-only';
|
||||||
|
|
||||||
import { sew } from '@/actions';
|
import { sew } from '@/actions';
|
||||||
import { env } from '@sourcebot/shared';
|
import { env } from '@sourcebot/shared';
|
||||||
|
|
@ -8,19 +8,10 @@ import { Repo } from '@sourcebot/db';
|
||||||
import { createLogger } from '@sourcebot/shared';
|
import { createLogger } from '@sourcebot/shared';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { simpleGit } from 'simple-git';
|
import { simpleGit } from 'simple-git';
|
||||||
|
import { FileTreeItem, FileTreeNode } from './types';
|
||||||
|
|
||||||
const logger = createLogger('file-tree');
|
const logger = createLogger('file-tree');
|
||||||
|
|
||||||
export type FileTreeItem = {
|
|
||||||
type: string;
|
|
||||||
path: string;
|
|
||||||
name: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export type FileTreeNode = FileTreeItem & {
|
|
||||||
children: FileTreeNode[];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the tree of files (blobs) and directories (trees) for a given repository,
|
* Returns the tree of files (blobs) and directories (trees) for a given repository,
|
||||||
* at a given revision.
|
* at a given revision.
|
||||||
|
|
@ -218,7 +209,7 @@ const buildFileTree = (flatList: { type: string, path: string }[]): FileTreeNode
|
||||||
const part = parts[i];
|
const part = parts[i];
|
||||||
const isLeaf = i === parts.length - 1;
|
const isLeaf = i === parts.length - 1;
|
||||||
const nodeType = isLeaf ? item.type : 'tree';
|
const nodeType = isLeaf ? item.type : 'tree';
|
||||||
let next = current.children.find(child => child.name === part && child.type === nodeType);
|
let next = current.children.find((child: FileTreeNode) => child.name === part && child.type === nodeType);
|
||||||
|
|
||||||
if (!next) {
|
if (!next) {
|
||||||
next = {
|
next = {
|
||||||
|
|
@ -240,7 +231,7 @@ const buildFileTree = (flatList: { type: string, path: string }[]): FileTreeNode
|
||||||
|
|
||||||
const sortedChildren = node.children
|
const sortedChildren = node.children
|
||||||
.map(sortTree)
|
.map(sortTree)
|
||||||
.sort((a, b) => {
|
.sort((a: FileTreeNode, b: FileTreeNode) => {
|
||||||
if (a.type !== b.type) {
|
if (a.type !== b.type) {
|
||||||
return a.type === 'tree' ? -1 : 1;
|
return a.type === 'tree' ? -1 : 1;
|
||||||
}
|
}
|
||||||
|
|
@ -1,12 +1,12 @@
|
||||||
'use client';
|
'use client';
|
||||||
|
|
||||||
import { FileTreeItem } from "../actions";
|
|
||||||
import { useEffect, useRef } from "react";
|
import { useEffect, useRef } from "react";
|
||||||
import clsx from "clsx";
|
import clsx from "clsx";
|
||||||
import scrollIntoView from 'scroll-into-view-if-needed';
|
import scrollIntoView from 'scroll-into-view-if-needed';
|
||||||
import { ChevronDownIcon, ChevronRightIcon } from "@radix-ui/react-icons";
|
import { ChevronDownIcon, ChevronRightIcon } from "@radix-ui/react-icons";
|
||||||
import { FileTreeItemIcon } from "./fileTreeItemIcon";
|
import { FileTreeItemIcon } from "./fileTreeItemIcon";
|
||||||
import Link from "next/link";
|
import Link from "next/link";
|
||||||
|
import { FileTreeItem } from "../types";
|
||||||
|
|
||||||
export const FileTreeItemComponent = ({
|
export const FileTreeItemComponent = ({
|
||||||
node,
|
node,
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,9 @@
|
||||||
'use client';
|
'use client';
|
||||||
|
|
||||||
import { FileTreeItem } from "../actions";
|
|
||||||
import { useMemo } from "react";
|
import { useMemo } from "react";
|
||||||
import { VscodeFolderIcon } from "@/app/components/vscodeFolderIcon";
|
import { VscodeFolderIcon } from "@/app/components/vscodeFolderIcon";
|
||||||
import { VscodeFileIcon } from "@/app/components/vscodeFileIcon";
|
import { VscodeFileIcon } from "@/app/components/vscodeFileIcon";
|
||||||
|
import { FileTreeItem } from "../types";
|
||||||
|
|
||||||
interface FileTreeItemIconProps {
|
interface FileTreeItemIconProps {
|
||||||
item: FileTreeItem;
|
item: FileTreeItem;
|
||||||
|
|
|
||||||
|
|
@ -1,26 +1,25 @@
|
||||||
'use client';
|
'use client';
|
||||||
|
|
||||||
import { getTree } from "../actions";
|
import { useBrowseParams } from "@/app/[domain]/browse/hooks/useBrowseParams";
|
||||||
import { useQuery } from "@tanstack/react-query";
|
|
||||||
import { unwrapServiceError } from "@/lib/utils";
|
|
||||||
import { ResizablePanel } from "@/components/ui/resizable";
|
|
||||||
import { Skeleton } from "@/components/ui/skeleton";
|
|
||||||
import { useBrowseState } from "@/app/[domain]/browse/hooks/useBrowseState";
|
import { useBrowseState } from "@/app/[domain]/browse/hooks/useBrowseState";
|
||||||
import { PureFileTreePanel } from "./pureFileTreePanel";
|
import { getTree } from "@/app/api/(client)/client";
|
||||||
|
import { KeyboardShortcutHint } from "@/app/components/keyboardShortcutHint";
|
||||||
import { Button } from "@/components/ui/button";
|
import { Button } from "@/components/ui/button";
|
||||||
import { ImperativePanelHandle } from "react-resizable-panels";
|
import { ResizablePanel } from "@/components/ui/resizable";
|
||||||
|
import { Separator } from "@/components/ui/separator";
|
||||||
|
import { Skeleton } from "@/components/ui/skeleton";
|
||||||
|
import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip";
|
||||||
|
import { unwrapServiceError } from "@/lib/utils";
|
||||||
|
import { useQuery } from "@tanstack/react-query";
|
||||||
|
import { SearchIcon } from "lucide-react";
|
||||||
import { useRef } from "react";
|
import { useRef } from "react";
|
||||||
import { useHotkeys } from "react-hotkeys-hook";
|
import { useHotkeys } from "react-hotkeys-hook";
|
||||||
import { Separator } from "@/components/ui/separator";
|
|
||||||
import {
|
import {
|
||||||
GoSidebarCollapse as ExpandIcon,
|
GoSidebarExpand as CollapseIcon,
|
||||||
GoSidebarExpand as CollapseIcon
|
GoSidebarCollapse as ExpandIcon
|
||||||
} from "react-icons/go";
|
} from "react-icons/go";
|
||||||
import { Tooltip, TooltipContent } from "@/components/ui/tooltip";
|
import { ImperativePanelHandle } from "react-resizable-panels";
|
||||||
import { TooltipTrigger } from "@/components/ui/tooltip";
|
import { PureFileTreePanel } from "./pureFileTreePanel";
|
||||||
import { KeyboardShortcutHint } from "@/app/components/keyboardShortcutHint";
|
|
||||||
import { useBrowseParams } from "@/app/[domain]/browse/hooks/useBrowseParams";
|
|
||||||
import { SearchIcon } from "lucide-react";
|
|
||||||
|
|
||||||
|
|
||||||
interface FileTreePanelProps {
|
interface FileTreePanelProps {
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
'use client';
|
'use client';
|
||||||
|
|
||||||
import { FileTreeNode as RawFileTreeNode } from "../actions";
|
import { FileTreeNode as RawFileTreeNode } from "../types";
|
||||||
import { ScrollArea, ScrollBar } from "@/components/ui/scroll-area";
|
import { ScrollArea, ScrollBar } from "@/components/ui/scroll-area";
|
||||||
import React, { useCallback, useMemo, useState, useEffect, useRef } from "react";
|
import React, { useCallback, useMemo, useState, useEffect, useRef } from "react";
|
||||||
import { FileTreeItemComponent } from "./fileTreeItemComponent";
|
import { FileTreeItemComponent } from "./fileTreeItemComponent";
|
||||||
|
|
|
||||||
44
packages/web/src/features/fileTree/types.ts
Normal file
44
packages/web/src/features/fileTree/types.ts
Normal file
|
|
@ -0,0 +1,44 @@
|
||||||
|
import { z } from "zod";
|
||||||
|
|
||||||
|
export const getTreeRequestSchema = z.object({
|
||||||
|
repoName: z.string(),
|
||||||
|
revisionName: z.string(),
|
||||||
|
});
|
||||||
|
export type GetTreeRequest = z.infer<typeof getTreeRequestSchema>;
|
||||||
|
|
||||||
|
export const getFilesRequestSchema = z.object({
|
||||||
|
repoName: z.string(),
|
||||||
|
revisionName: z.string(),
|
||||||
|
});
|
||||||
|
export type GetFilesRequest = z.infer<typeof getFilesRequestSchema>;
|
||||||
|
|
||||||
|
export const fileTreeItemSchema = z.object({
|
||||||
|
type: z.string(),
|
||||||
|
path: z.string(),
|
||||||
|
name: z.string(),
|
||||||
|
});
|
||||||
|
export type FileTreeItem = z.infer<typeof fileTreeItemSchema>;
|
||||||
|
|
||||||
|
type FileTreeNodeType = {
|
||||||
|
type: string;
|
||||||
|
path: string;
|
||||||
|
name: string;
|
||||||
|
children: FileTreeNodeType[];
|
||||||
|
};
|
||||||
|
|
||||||
|
export const fileTreeNodeSchema: z.ZodType<FileTreeNodeType> = z.lazy(() => z.object({
|
||||||
|
type: z.string(),
|
||||||
|
path: z.string(),
|
||||||
|
name: z.string(),
|
||||||
|
children: z.array(fileTreeNodeSchema),
|
||||||
|
}));
|
||||||
|
export type FileTreeNode = z.infer<typeof fileTreeNodeSchema>;
|
||||||
|
|
||||||
|
export const getTreeResponseSchema = z.object({
|
||||||
|
tree: fileTreeNodeSchema,
|
||||||
|
});
|
||||||
|
export type GetTreeResponse = z.infer<typeof getTreeResponseSchema>;
|
||||||
|
|
||||||
|
export const getFilesResponseSchema = z.array(fileTreeItemSchema);
|
||||||
|
export type GetFilesResponse = z.infer<typeof getFilesResponseSchema>;
|
||||||
|
|
||||||
|
|
@ -1,5 +1,4 @@
|
||||||
'use server';
|
import 'server-only';
|
||||||
|
|
||||||
import escapeStringRegexp from "escape-string-regexp";
|
import escapeStringRegexp from "escape-string-regexp";
|
||||||
import { fileNotFound, ServiceError, unexpectedError } from "../../lib/serviceError";
|
import { fileNotFound, ServiceError, unexpectedError } from "../../lib/serviceError";
|
||||||
import { FileSourceRequest, FileSourceResponse } from "./types";
|
import { FileSourceRequest, FileSourceResponse } from "./types";
|
||||||
|
|
|
||||||
|
|
@ -141,6 +141,7 @@ export const searchResponseSchema = z.object({
|
||||||
repositoryInfo: z.array(repositoryInfoSchema),
|
repositoryInfo: z.array(repositoryInfoSchema),
|
||||||
isBranchFilteringEnabled: z.boolean(),
|
isBranchFilteringEnabled: z.boolean(),
|
||||||
isSearchExhaustive: z.boolean(),
|
isSearchExhaustive: z.boolean(),
|
||||||
|
__debug_timings: z.record(z.string(), z.number()).optional(),
|
||||||
});
|
});
|
||||||
|
|
||||||
export const fileSourceRequestSchema = z.object({
|
export const fileSourceRequestSchema = z.object({
|
||||||
|
|
|
||||||
|
|
@ -1,16 +1,17 @@
|
||||||
'use server';
|
import 'server-only';
|
||||||
|
|
||||||
import { invalidZoektResponse, ServiceError } from "../../lib/serviceError";
|
|
||||||
import { isServiceError } from "../../lib/utils";
|
|
||||||
import { zoektFetch } from "./zoektClient";
|
|
||||||
import { ErrorCode } from "../../lib/errorCodes";
|
|
||||||
import { StatusCodes } from "http-status-codes";
|
|
||||||
import { zoektSearchResponseSchema } from "./zoektSchema";
|
|
||||||
import { SearchRequest, SearchResponse, SourceRange } from "./types";
|
|
||||||
import { PrismaClient, Repo } from "@sourcebot/db";
|
|
||||||
import { sew } from "@/actions";
|
import { sew } from "@/actions";
|
||||||
import { base64Decode } from "@sourcebot/shared";
|
|
||||||
import { withOptionalAuthV2 } from "@/withAuthV2";
|
import { withOptionalAuthV2 } from "@/withAuthV2";
|
||||||
|
import { PrismaClient, Repo } from "@sourcebot/db";
|
||||||
|
import { base64Decode, createLogger } from "@sourcebot/shared";
|
||||||
|
import { StatusCodes } from "http-status-codes";
|
||||||
|
import { ErrorCode } from "../../lib/errorCodes";
|
||||||
|
import { invalidZoektResponse, ServiceError } from "../../lib/serviceError";
|
||||||
|
import { isServiceError, measure } from "../../lib/utils";
|
||||||
|
import { SearchRequest, SearchResponse, SourceRange } from "./types";
|
||||||
|
import { zoektFetch } from "./zoektClient";
|
||||||
|
import { ZoektSearchResponse } from "./zoektSchema";
|
||||||
|
|
||||||
|
const logger = createLogger("searchApi");
|
||||||
|
|
||||||
// List of supported query prefixes in zoekt.
|
// List of supported query prefixes in zoekt.
|
||||||
// @see : https://github.com/sourcebot-dev/zoekt/blob/main/query/parse.go#L417
|
// @see : https://github.com/sourcebot-dev/zoekt/blob/main/query/parse.go#L417
|
||||||
|
|
@ -126,7 +127,7 @@ const getFileWebUrl = (template: string, branch: string, fileName: string): stri
|
||||||
return encodeURI(url + optionalQueryParams);
|
return encodeURI(url + optionalQueryParams);
|
||||||
}
|
}
|
||||||
|
|
||||||
export const search = async ({ query, matches, contextLines, whole }: SearchRequest) => sew(() =>
|
export const search = async ({ query, matches, contextLines, whole }: SearchRequest): Promise<SearchResponse | ServiceError> => sew(() =>
|
||||||
withOptionalAuthV2(async ({ org, prisma }) => {
|
withOptionalAuthV2(async ({ org, prisma }) => {
|
||||||
const transformedQuery = await transformZoektQuery(query, org.id, prisma);
|
const transformedQuery = await transformZoektQuery(query, org.id, prisma);
|
||||||
if (isServiceError(transformedQuery)) {
|
if (isServiceError(transformedQuery)) {
|
||||||
|
|
@ -200,20 +201,22 @@ export const search = async ({ query, matches, contextLines, whole }: SearchRequ
|
||||||
"X-Tenant-ID": org.id.toString()
|
"X-Tenant-ID": org.id.toString()
|
||||||
};
|
};
|
||||||
|
|
||||||
const searchResponse = await zoektFetch({
|
const { data: searchResponse, durationMs: fetchDurationMs } = await measure(
|
||||||
path: "/api/search",
|
() => zoektFetch({
|
||||||
body,
|
path: "/api/search",
|
||||||
header,
|
body,
|
||||||
method: "POST",
|
header,
|
||||||
});
|
method: "POST",
|
||||||
|
}),
|
||||||
|
"zoekt_fetch",
|
||||||
|
false
|
||||||
|
);
|
||||||
|
|
||||||
if (!searchResponse.ok) {
|
if (!searchResponse.ok) {
|
||||||
return invalidZoektResponse(searchResponse);
|
return invalidZoektResponse(searchResponse);
|
||||||
}
|
}
|
||||||
|
|
||||||
const searchBody = await searchResponse.json();
|
const transformZoektSearchResponse = async ({ Result }: ZoektSearchResponse) => {
|
||||||
|
|
||||||
const parser = zoektSearchResponseSchema.transform(async ({ Result }) => {
|
|
||||||
// @note (2025-05-12): in zoekt, repositories are identified by the `RepositoryID` field
|
// @note (2025-05-12): in zoekt, repositories are identified by the `RepositoryID` field
|
||||||
// which corresponds to the `id` in the Repo table. In order to efficiently fetch repository
|
// which corresponds to the `id` in the Repo table. In order to efficiently fetch repository
|
||||||
// metadata when transforming (potentially thousands) of file matches, we aggregate a unique
|
// metadata when transforming (potentially thousands) of file matches, we aggregate a unique
|
||||||
|
|
@ -379,7 +382,48 @@ export const search = async ({ query, matches, contextLines, whole }: SearchRequ
|
||||||
flushReason: Result.FlushReason,
|
flushReason: Result.FlushReason,
|
||||||
}
|
}
|
||||||
} satisfies SearchResponse;
|
} satisfies SearchResponse;
|
||||||
});
|
}
|
||||||
|
|
||||||
return parser.parseAsync(searchBody);
|
const { data: rawZoektResponse, durationMs: parseJsonDurationMs } = await measure(
|
||||||
|
() => searchResponse.json(),
|
||||||
|
"parse_json",
|
||||||
|
false
|
||||||
|
);
|
||||||
|
|
||||||
|
// @note: We do not use zod parseAsync here since in cases where the
|
||||||
|
// response is large (> 40MB), there can be significant performance issues.
|
||||||
|
const zoektResponse = rawZoektResponse as ZoektSearchResponse;
|
||||||
|
|
||||||
|
const { data: response, durationMs: transformZoektResponseDurationMs } = await measure(
|
||||||
|
() => transformZoektSearchResponse(zoektResponse),
|
||||||
|
"transform_zoekt_response",
|
||||||
|
false
|
||||||
|
);
|
||||||
|
|
||||||
|
const totalDurationMs = fetchDurationMs + parseJsonDurationMs + transformZoektResponseDurationMs;
|
||||||
|
|
||||||
|
// Debug log: timing breakdown
|
||||||
|
const timings = [
|
||||||
|
{ name: "zoekt_fetch", duration: fetchDurationMs },
|
||||||
|
{ name: "parse_json", duration: parseJsonDurationMs },
|
||||||
|
{ name: "transform_zoekt_response", duration: transformZoektResponseDurationMs },
|
||||||
|
];
|
||||||
|
|
||||||
|
logger.debug(`Search timing breakdown (query: "${query}"):`);
|
||||||
|
timings.forEach(({ name, duration }) => {
|
||||||
|
const percentage = ((duration / totalDurationMs) * 100).toFixed(1);
|
||||||
|
const durationStr = duration.toFixed(2).padStart(8);
|
||||||
|
const percentageStr = percentage.padStart(5);
|
||||||
|
logger.debug(` ${name.padEnd(25)} ${durationStr}ms (${percentageStr}%)`);
|
||||||
|
});
|
||||||
|
logger.debug(` ${"TOTAL".padEnd(25)} ${totalDurationMs.toFixed(2).padStart(8)}ms (100.0%)`);
|
||||||
|
|
||||||
|
return {
|
||||||
|
...response,
|
||||||
|
__debug_timings: {
|
||||||
|
zoekt_fetch: fetchDurationMs,
|
||||||
|
parse_json: parseJsonDurationMs,
|
||||||
|
transform_zoekt_response: transformZoektResponseDurationMs,
|
||||||
|
}
|
||||||
|
} satisfies SearchResponse;
|
||||||
}));
|
}));
|
||||||
|
|
|
||||||
|
|
@ -75,6 +75,8 @@ export const zoektSearchResponseSchema = z.object({
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
export type ZoektSearchResponse = z.infer<typeof zoektSearchResponseSchema>;
|
||||||
|
|
||||||
// @see : https://github.com/sourcebot-dev/zoekt/blob/3780e68cdb537d5a7ed2c84d9b3784f80c7c5d04/api.go#L728
|
// @see : https://github.com/sourcebot-dev/zoekt/blob/3780e68cdb537d5a7ed2c84d9b3784f80c7c5d04/api.go#L728
|
||||||
const zoektRepoStatsSchema = z.object({
|
const zoektRepoStatsSchema = z.object({
|
||||||
Repos: z.number(),
|
Repos: z.number(),
|
||||||
|
|
|
||||||
1
packages/web/src/features/workerApi/README.md
Normal file
1
packages/web/src/features/workerApi/README.md
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
This folder contains utilities to interact with the internal worker REST api. See packages/backend/api.ts
|
||||||
59
packages/web/src/features/workerApi/actions.ts
Normal file
59
packages/web/src/features/workerApi/actions.ts
Normal file
|
|
@ -0,0 +1,59 @@
|
||||||
|
'use server';
|
||||||
|
|
||||||
|
import { sew } from "@/actions";
|
||||||
|
import { unexpectedError } from "@/lib/serviceError";
|
||||||
|
import { withAuthV2, withMinimumOrgRole } from "@/withAuthV2";
|
||||||
|
import { OrgRole } from "@sourcebot/db";
|
||||||
|
import z from "zod";
|
||||||
|
|
||||||
|
const WORKER_API_URL = 'http://localhost:3060';
|
||||||
|
|
||||||
|
export const syncConnection = async (connectionId: number) => sew(() =>
|
||||||
|
withAuthV2(({ role }) =>
|
||||||
|
withMinimumOrgRole(role, OrgRole.OWNER, async () => {
|
||||||
|
const response = await fetch(`${WORKER_API_URL}/api/sync-connection`, {
|
||||||
|
method: 'POST',
|
||||||
|
body: JSON.stringify({
|
||||||
|
connectionId
|
||||||
|
}),
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
return unexpectedError('Failed to sync connection');
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
const schema = z.object({
|
||||||
|
jobId: z.string(),
|
||||||
|
});
|
||||||
|
return schema.parse(data);
|
||||||
|
})
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
export const indexRepo = async (repoId: number) => sew(() =>
|
||||||
|
withAuthV2(({ role }) =>
|
||||||
|
withMinimumOrgRole(role, OrgRole.OWNER, async () => {
|
||||||
|
const response = await fetch(`${WORKER_API_URL}/api/index-repo`, {
|
||||||
|
method: 'POST',
|
||||||
|
body: JSON.stringify({ repoId }),
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
return unexpectedError('Failed to index repo');
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
const schema = z.object({
|
||||||
|
jobId: z.string(),
|
||||||
|
});
|
||||||
|
return schema.parse(data);
|
||||||
|
})
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
@ -62,6 +62,18 @@ const initSingleTenancy = async () => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If we don't have the search context entitlement then wipe any existing
|
||||||
|
// search contexts that may be present in the DB. This could happen if a deployment had
|
||||||
|
// the entitlement, synced search contexts, and then no longer had the entitlement
|
||||||
|
const hasSearchContextEntitlement = hasEntitlement("search-contexts")
|
||||||
|
if(!hasSearchContextEntitlement) {
|
||||||
|
await prisma.searchContext.deleteMany({
|
||||||
|
where: {
|
||||||
|
orgId: SINGLE_TENANT_ORG_ID,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
// Sync anonymous access config from the config file
|
// Sync anonymous access config from the config file
|
||||||
const config = await loadConfig(env.CONFIG_PATH);
|
const config = await loadConfig(env.CONFIG_PATH);
|
||||||
const forceEnableAnonymousAccess = config.settings?.enablePublicAccess ?? env.FORCE_ENABLE_ANONYMOUS_ACCESS === 'true';
|
const forceEnableAnonymousAccess = config.settings?.enablePublicAccess ?? env.FORCE_ENABLE_ANONYMOUS_ACCESS === 'true';
|
||||||
|
|
|
||||||
|
|
@ -181,7 +181,7 @@ export const withMinimumOrgRole = async <T>(
|
||||||
userRole: OrgRole,
|
userRole: OrgRole,
|
||||||
minRequiredRole: OrgRole = OrgRole.MEMBER,
|
minRequiredRole: OrgRole = OrgRole.MEMBER,
|
||||||
fn: () => Promise<T>,
|
fn: () => Promise<T>,
|
||||||
) => {
|
): Promise<T | ServiceError> => {
|
||||||
|
|
||||||
const getAuthorizationPrecedence = (role: OrgRole): number => {
|
const getAuthorizationPrecedence = (role: OrgRole): number => {
|
||||||
switch (role) {
|
switch (role) {
|
||||||
|
|
|
||||||
11
yarn.lock
11
yarn.lock
|
|
@ -7908,6 +7908,8 @@ __metadata:
|
||||||
cross-fetch: "npm:^4.0.0"
|
cross-fetch: "npm:^4.0.0"
|
||||||
dotenv: "npm:^16.4.5"
|
dotenv: "npm:^16.4.5"
|
||||||
express: "npm:^4.21.2"
|
express: "npm:^4.21.2"
|
||||||
|
express-async-errors: "npm:^3.1.1"
|
||||||
|
fast-deep-equal: "npm:^3.1.3"
|
||||||
git-url-parse: "npm:^16.1.0"
|
git-url-parse: "npm:^16.1.0"
|
||||||
gitea-js: "npm:^1.22.0"
|
gitea-js: "npm:^1.22.0"
|
||||||
glob: "npm:^11.0.0"
|
glob: "npm:^11.0.0"
|
||||||
|
|
@ -12538,6 +12540,15 @@ __metadata:
|
||||||
languageName: node
|
languageName: node
|
||||||
linkType: hard
|
linkType: hard
|
||||||
|
|
||||||
|
"express-async-errors@npm:^3.1.1":
|
||||||
|
version: 3.1.1
|
||||||
|
resolution: "express-async-errors@npm:3.1.1"
|
||||||
|
peerDependencies:
|
||||||
|
express: ^4.16.2
|
||||||
|
checksum: 10c0/56c4e90c44e98c7edc5bd38e18dd23b0d9a7139cb94ff3e25943ba257415b433e0e52ea8f9bc1fb5b70a5e6c5246eaace4fb69ab171edfb8896580928bb97ec6
|
||||||
|
languageName: node
|
||||||
|
linkType: hard
|
||||||
|
|
||||||
"express-rate-limit@npm:^7.5.0":
|
"express-rate-limit@npm:^7.5.0":
|
||||||
version: 7.5.0
|
version: 7.5.0
|
||||||
resolution: "express-rate-limit@npm:7.5.0"
|
resolution: "express-rate-limit@npm:7.5.0"
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue