diff --git a/package.json b/package.json index c5909e76..7c726c7e 100644 --- a/package.json +++ b/package.json @@ -6,7 +6,7 @@ "scripts": { "build": "cross-env SKIP_ENV_VALIDATION=1 yarn workspaces foreach -A run build", "test": "yarn workspaces foreach -A run test", - "dev": "yarn dev:prisma:migrate:dev && npm-run-all --print-label --parallel dev:zoekt dev:backend dev:web watch:mcp watch:schemas", + "dev": "concurrently --kill-others --names \"zoekt,worker,web,mcp,schemas\" 'yarn dev:zoekt' 'yarn dev:backend' 'yarn dev:web' 'yarn watch:mcp' 'yarn watch:schemas'", "with-env": "cross-env PATH=\"$PWD/bin:$PATH\" dotenv -e .env.development -c --", "dev:zoekt": "yarn with-env zoekt-webserver -index .sourcebot/index -rpc", "dev:backend": "yarn with-env yarn workspace @sourcebot/backend dev:watch", @@ -21,9 +21,9 @@ "build:deps": "yarn workspaces foreach -R --from '{@sourcebot/schemas,@sourcebot/error,@sourcebot/crypto,@sourcebot/db,@sourcebot/shared}' run build" }, "devDependencies": { + "concurrently": "^9.2.1", "cross-env": "^7.0.3", - "dotenv-cli": "^8.0.0", - "npm-run-all": "^4.1.5" + "dotenv-cli": "^8.0.0" }, "packageManager": "yarn@4.7.0", "resolutions": { diff --git a/packages/backend/src/connectionManager.ts b/packages/backend/src/connectionManager.ts index e17fce06..ce023fc5 100644 --- a/packages/backend/src/connectionManager.ts +++ b/packages/backend/src/connectionManager.ts @@ -364,12 +364,12 @@ export class ConnectionManager { } } - public dispose() { + public async dispose() { if (this.interval) { clearInterval(this.interval); } - this.worker.close(); - this.queue.close(); + await this.worker.close(); + await this.queue.close(); } } diff --git a/packages/backend/src/ee/repoPermissionSyncer.ts b/packages/backend/src/ee/repoPermissionSyncer.ts index f411c3e3..453b94f6 100644 --- a/packages/backend/src/ee/repoPermissionSyncer.ts +++ b/packages/backend/src/ee/repoPermissionSyncer.ts @@ -101,12 +101,12 @@ export class RepoPermissionSyncer { }, 1000 * 5); } - public dispose() { + public async dispose() { if (this.interval) { clearInterval(this.interval); } - this.worker.close(); - this.queue.close(); + await this.worker.close(); + await this.queue.close(); } private async schedulePermissionSync(repos: Repo[]) { diff --git a/packages/backend/src/ee/userPermissionSyncer.ts b/packages/backend/src/ee/userPermissionSyncer.ts index 90ae8629..6ef77bcf 100644 --- a/packages/backend/src/ee/userPermissionSyncer.ts +++ b/packages/backend/src/ee/userPermissionSyncer.ts @@ -101,12 +101,12 @@ export class UserPermissionSyncer { }, 1000 * 5); } - public dispose() { + public async dispose() { if (this.interval) { clearInterval(this.interval); } - this.worker.close(); - this.queue.close(); + await this.worker.close(); + await this.queue.close(); } private async schedulePermissionSync(users: User[]) { diff --git a/packages/backend/src/index.ts b/packages/backend/src/index.ts index 411de187..0b9006b9 100644 --- a/packages/backend/src/index.ts +++ b/packages/backend/src/index.ts @@ -90,13 +90,28 @@ else if (env.EXPERIMENT_EE_PERMISSION_SYNC_ENABLED === 'true' && hasEntitlement( } const cleanup = async (signal: string) => { - logger.info(`Recieved ${signal}, cleaning up...`); + logger.info(`Received ${signal}, cleaning up...`); - connectionManager.dispose(); - repoManager.dispose(); - repoPermissionSyncer.dispose(); - userPermissionSyncer.dispose(); - indexSyncer.dispose(); + const shutdownTimeout = 30000; // 30 seconds + + try { + await Promise.race([ + Promise.all([ + indexSyncer.dispose(), + repoManager.dispose(), + connectionManager.dispose(), + repoPermissionSyncer.dispose(), + userPermissionSyncer.dispose(), + promClient.dispose(), + ]), + new Promise((_, reject) => + setTimeout(() => reject(new Error('Shutdown timeout')), shutdownTimeout) + ) + ]); + logger.info('All workers shut down gracefully'); + } catch (error) { + logger.warn('Shutdown timeout or error, forcing exit:', error instanceof Error ? error.message : String(error)); + } await prisma.$disconnect(); await redis.quit(); diff --git a/packages/backend/src/indexSyncer.ts b/packages/backend/src/indexSyncer.ts index aba95fbf..19b20a77 100644 --- a/packages/backend/src/indexSyncer.ts +++ b/packages/backend/src/indexSyncer.ts @@ -1,29 +1,46 @@ import { createBullBoard } from '@bull-board/api'; import { ExpressAdapter } from '@bull-board/express'; import * as Sentry from '@sentry/node'; -import { PrismaClient, Repo, RepoIndexingJobStatus } from "@sourcebot/db"; -import { createLogger } from "@sourcebot/logger"; +import { PrismaClient, Repo, RepoJobStatus, RepoJobType } from "@sourcebot/db"; +import { createLogger, Logger } from "@sourcebot/logger"; import express from 'express'; import { BullBoardGroupMQAdapter, Job, Queue, ReservedJob, Worker } from "groupmq"; import { Redis } from 'ioredis'; import { AppContext, repoMetadataSchema, RepoWithConnections, Settings } from "./types.js"; -import { getAuthCredentialsForRepo, getRepoPath, measure } from './utils.js'; +import { getAuthCredentialsForRepo, getRepoPath, getShardPrefix, measure } from './utils.js'; import { existsSync } from 'fs'; -import { cloneRepository, fetchRepository, unsetGitConfig, upsertGitConfig } from './git.js'; +import { cloneRepository, fetchRepository, isPathAValidGitRepoRoot, unsetGitConfig, upsertGitConfig } from './git.js'; import { indexGitRepository } from './zoekt.js'; +import { rm, readdir } from 'fs/promises'; -const logger = createLogger('index-syncer'); +const LOG_TAG = 'index-syncer'; +const logger = createLogger(LOG_TAG); +const createJobLogger = (jobId: string) => createLogger(`${LOG_TAG}:job:${jobId}`); -type IndexSyncJob = { +type JobPayload = { + type: 'INDEX' | 'CLEANUP'; jobId: string; -} + repoId: number; + repoName: string; +}; const JOB_TIMEOUT_MS = 1000 * 60 * 60 * 6; // 6 hour indexing timeout + +const groupmqLifecycleExceptionWrapper = async (name: string, fn: () => Promise) => { + try { + await fn(); + } catch (error) { + Sentry.captureException(error); + logger.error(`Exception thrown while executing lifecycle function \`${name}\`.`, error); + } +} + + export class IndexSyncer { private interval?: NodeJS.Timeout; - private queue: Queue; - private worker: Worker; + private queue: Queue; + private worker: Worker; constructor( private db: PrismaClient, @@ -31,28 +48,26 @@ export class IndexSyncer { redis: Redis, private ctx: AppContext, ) { - this.queue = new Queue({ + this.queue = new Queue({ redis, namespace: 'index-sync-queue', jobTimeoutMs: JOB_TIMEOUT_MS, - // logger: true, + logger, + maxAttempts: 1, }); - this.worker = new Worker({ + this.worker = new Worker({ queue: this.queue, maxStalledCount: 1, - stalledInterval: 1000, handler: this.runJob.bind(this), concurrency: this.settings.maxRepoIndexingJobConcurrency, + logger, }); this.worker.on('completed', this.onJobCompleted.bind(this)); this.worker.on('failed', this.onJobFailed.bind(this)); this.worker.on('stalled', this.onJobStalled.bind(this)); - this.worker.on('error', async (error) => { - Sentry.captureException(error); - logger.error(`Index syncer worker error.`, error); - }); + this.worker.on('error', this.onWorkerError.bind(this)); // @nocheckin const app = express(); @@ -69,75 +84,133 @@ export class IndexSyncer { public async startScheduler() { this.interval = setInterval(async () => { - const thresholdDate = new Date(Date.now() - this.settings.reindexIntervalMs); - - const repos = await this.db.repo.findMany({ - where: { - AND: [ - { - OR: [ - { indexedAt: null }, - { indexedAt: { lt: thresholdDate } }, - ] - }, - { - NOT: { - indexingJobs: { - some: { - OR: [ - // Don't schedule if there are active jobs that were created within the threshold date. - // This handles the case where a job is stuck in a pending state and will never be scheduled. - { - AND: [ - { - status: { - in: [ - RepoIndexingJobStatus.PENDING, - RepoIndexingJobStatus.IN_PROGRESS, - ] - }, - }, - { - createdAt: { - gt: thresholdDate, - } - } - ] - }, - // Don't schedule if there are recent failed jobs (within the threshold date). - { - AND: [ - { status: RepoIndexingJobStatus.FAILED }, - { completedAt: { gt: thresholdDate } }, - ] - } - ] - } - } - } - } - ], - } - }); - - if (repos.length === 0) { - return; - } - - await this.scheduleIndexSync(repos); + await this.scheduleIndexJobs(); + await this.scheduleCleanupJobs(); }, 1000 * 5); this.worker.run(); } - private async scheduleIndexSync(repos: Repo[]) { + private async scheduleIndexJobs() { + const thresholdDate = new Date(Date.now() - this.settings.reindexIntervalMs); + const reposToIndex = await this.db.repo.findMany({ + where: { + AND: [ + { + OR: [ + { indexedAt: null }, + { indexedAt: { lt: thresholdDate } }, + ] + }, + { + NOT: { + jobs: { + some: { + AND: [ + { + type: RepoJobType.INDEX, + }, + { + OR: [ + // Don't schedule if there are active jobs that were created within the threshold date. + // This handles the case where a job is stuck in a pending state and will never be scheduled. + { + AND: [ + { + status: { + in: [ + RepoJobStatus.PENDING, + RepoJobStatus.IN_PROGRESS, + ] + }, + }, + { + createdAt: { + gt: thresholdDate, + } + } + ] + }, + // Don't schedule if there are recent failed jobs (within the threshold date). + { + AND: [ + { status: RepoJobStatus.FAILED }, + { completedAt: { gt: thresholdDate } }, + ] + } + ] + } + ] + } + } + } + } + ], + } + }); + + if (reposToIndex.length > 0) { + await this.createJobs(reposToIndex, RepoJobType.INDEX); + } + } + + private async scheduleCleanupJobs() { + const thresholdDate = new Date(Date.now() - this.settings.repoGarbageCollectionGracePeriodMs); + + const reposToCleanup = await this.db.repo.findMany({ + where: { + connections: { + none: {} + }, + OR: [ + { indexedAt: null }, + { indexedAt: { lt: thresholdDate } }, + ], + // Don't schedule if there are active jobs that were created within the threshold date. + NOT: { + jobs: { + some: { + AND: [ + { + type: RepoJobType.CLEANUP, + }, + { + status: { + in: [ + RepoJobStatus.PENDING, + RepoJobStatus.IN_PROGRESS, + ] + }, + }, + { + createdAt: { + gt: thresholdDate, + } + } + ] + } + } + } + } + }); + + if (reposToCleanup.length > 0) { + await this.createJobs(reposToCleanup, RepoJobType.CLEANUP); + } + } + + private async createJobs(repos: Repo[], type: RepoJobType) { // @note: we don't perform this in a transaction because // we want to avoid the situation where a job is created and run // prior to the transaction being committed. - const jobs = await this.db.repoIndexingJob.createManyAndReturn({ + const jobs = await this.db.repoJob.createManyAndReturn({ data: repos.map(repo => ({ + type, repoId: repo.id, - })) + })), + include: { + repo: true, + } }); for (const job of jobs) { @@ -145,22 +218,29 @@ export class IndexSyncer { groupId: `repo:${job.repoId}`, data: { jobId: job.id, + type, + repoName: job.repo.name, + repoId: job.repo.id, }, jobId: job.id, }); } } - private async runJob(job: ReservedJob) { + private async runJob(job: ReservedJob) { const id = job.data.jobId; - const { repo } = await this.db.repoIndexingJob.update({ + const logger = createJobLogger(id); + logger.info(`Running job ${id} for repo ${job.data.repoName}`); + + const { repo, type: jobType } = await this.db.repoJob.update({ where: { id, }, data: { - status: RepoIndexingJobStatus.IN_PROGRESS, + status: RepoJobStatus.IN_PROGRESS, }, select: { + type: true, repo: { include: { connections: { @@ -173,10 +253,14 @@ export class IndexSyncer { } }); - await this.syncGitRepository(repo); + if (jobType === RepoJobType.INDEX) { + await this.indexRepository(repo, logger); + } else if (jobType === RepoJobType.CLEANUP) { + await this.cleanupRepository(repo, logger); + } } - private async syncGitRepository(repo: RepoWithConnections) { + private async indexRepository(repo: RepoWithConnections, logger: Logger) { const { path: repoPath, isReadOnly } = getRepoPath(repo, this.ctx); const metadata = repoMetadataSchema.parse(repo.metadata); @@ -185,6 +269,14 @@ export class IndexSyncer { const cloneUrlMaybeWithToken = credentials?.cloneUrlWithToken ?? repo.cloneUrl; const authHeader = credentials?.authHeader ?? undefined; + // If the repo path exists but it is not a valid git repository root, this indicates + // that the repository is in a bad state. To fix, we remove the directory and perform + // a fresh clone. + if (existsSync(repoPath) && !(await isPathAValidGitRepoRoot(repoPath)) && !isReadOnly) { + logger.warn(`${repoPath} is not a valid git repository root. Deleting directory and performing fresh clone.`); + await rm(repoPath, { recursive: true, force: true }); + } + if (existsSync(repoPath) && !isReadOnly) { // @NOTE: in #483, we changed the cloning method s.t., we _no longer_ // write the clone URL (which could contain a auth token) to the @@ -238,57 +330,94 @@ export class IndexSyncer { logger.info(`Indexed ${repo.displayName} in ${indexDuration_s}s`); } - private async onJobCompleted(job: Job) { - const { repo } = await this.db.repoIndexingJob.update({ - where: { id: job.data.jobId }, - data: { - status: RepoIndexingJobStatus.COMPLETED, - repo: { - update: { + private async cleanupRepository(repo: Repo, logger: Logger) { + const { path: repoPath, isReadOnly } = getRepoPath(repo, this.ctx); + if (existsSync(repoPath) && !isReadOnly) { + logger.info(`Deleting repo directory ${repoPath}`); + await rm(repoPath, { recursive: true, force: true }); + } + + const shardPrefix = getShardPrefix(repo.orgId, repo.id); + const files = (await readdir(this.ctx.indexPath)).filter(file => file.startsWith(shardPrefix)); + for (const file of files) { + const filePath = `${this.ctx.indexPath}/${file}`; + logger.info(`Deleting shard file ${filePath}`); + await rm(filePath, { force: true }); + } + } + + private onJobCompleted = async (job: Job) => + groupmqLifecycleExceptionWrapper('onJobCompleted', async () => { + const logger = createJobLogger(job.data.jobId); + const jobData = await this.db.repoJob.update({ + where: { id: job.data.jobId }, + data: { + status: RepoJobStatus.COMPLETED, + completedAt: new Date(), + } + }); + + if (jobData.type === RepoJobType.INDEX) { + const repo = await this.db.repo.update({ + where: { id: jobData.repoId }, + data: { indexedAt: new Date(), } + }); + + logger.info(`Completed index job ${job.data.jobId} for repo ${repo.name}`); + } + else if (jobData.type === RepoJobType.CLEANUP) { + const repo = await this.db.repo.delete({ + where: { id: jobData.repoId }, + }); + + logger.info(`Completed cleanup job ${job.data.jobId} for repo ${repo.name}`); + } + }); + + private onJobFailed = async (job: Job) => + groupmqLifecycleExceptionWrapper('onJobFailed', async () => { + const logger = createJobLogger(job.data.jobId); + + const { repo } = await this.db.repoJob.update({ + where: { id: job.data.jobId }, + data: { + completedAt: new Date(), + errorMessage: job.failedReason, }, - completedAt: new Date(), - }, - select: { repo: true } + select: { repo: true } + }); + + logger.error(`Failed job ${job.data.jobId} for repo ${repo.name}`); }); - logger.info(`Completed index job ${job.data.jobId} for repo ${repo.name}`); - } + private onJobStalled = async (jobId: string) => + groupmqLifecycleExceptionWrapper('onJobStalled', async () => { + const logger = createJobLogger(jobId); + const { repo } = await this.db.repoJob.update({ + where: { id: jobId }, + data: { + status: RepoJobStatus.FAILED, + completedAt: new Date(), + errorMessage: 'Job stalled', + }, + select: { repo: true } + }); - private async onJobFailed(job: Job) { - const { repo } = await this.db.repoIndexingJob.update({ - where: { id: job.data.jobId }, - data: { - status: RepoIndexingJobStatus.FAILED, - completedAt: new Date(), - errorMessage: job.failedReason, - }, - select: { repo: true} + logger.error(`Job ${jobId} stalled for repo ${repo.name}`); }); - logger.error(`Failed index job ${job.data.jobId} for repo ${repo.name}`); + private async onWorkerError(error: Error) { + Sentry.captureException(error); + logger.error(`Index syncer worker error.`, error); } - private async onJobStalled(jobId: string) { - const { repo } = await this.db.repoIndexingJob.update({ - where: { id: jobId }, - data: { - status: RepoIndexingJobStatus.FAILED, - completedAt: new Date(), - errorMessage: 'Job stalled', - }, - select: { repo: true } - }); - - logger.error(`Job ${jobId} stalled for repo ${repo.name}`); - } - - public dispose() { + public async dispose() { if (this.interval) { clearInterval(this.interval); } - this.worker.close(); - this.queue.close(); + await this.worker.close(); + await this.queue.close(); } } \ No newline at end of file diff --git a/packages/backend/src/promClient.ts b/packages/backend/src/promClient.ts index 058cfe0b..4b806f6e 100644 --- a/packages/backend/src/promClient.ts +++ b/packages/backend/src/promClient.ts @@ -1,4 +1,5 @@ import express, { Request, Response } from 'express'; +import { Server } from 'http'; import client, { Registry, Counter, Gauge } from 'prom-client'; import { createLogger } from "@sourcebot/logger"; @@ -7,6 +8,8 @@ const logger = createLogger('prometheus-client'); export class PromClient { private registry: Registry; private app: express.Application; + private server: Server; + public activeRepoIndexingJobs: Gauge; public pendingRepoIndexingJobs: Gauge; public repoIndexingReattemptsTotal: Counter; @@ -98,12 +101,12 @@ export class PromClient { res.end(metrics); }); - this.app.listen(this.PORT, () => { + this.server = this.app.listen(this.PORT, () => { logger.info(`Prometheus metrics server is running on port ${this.PORT}`); }); } - getRegistry(): Registry { - return this.registry; + dispose() { + this.server.close(); } } \ No newline at end of file diff --git a/packages/backend/src/repoManager.ts b/packages/backend/src/repoManager.ts index 89e41673..33f8bdad 100644 --- a/packages/backend/src/repoManager.ts +++ b/packages/backend/src/repoManager.ts @@ -558,9 +558,9 @@ export class RepoManager { if (this.interval) { clearInterval(this.interval); } - this.indexWorker.close(); - this.indexQueue.close(); - this.gcQueue.close(); - this.gcWorker.close(); + await this.indexWorker.close(); + await this.indexQueue.close(); + await this.gcQueue.close(); + await this.gcWorker.close(); } } \ No newline at end of file diff --git a/packages/db/prisma/schema.prisma b/packages/db/prisma/schema.prisma index 75e30ec9..16ed94f0 100644 --- a/packages/db/prisma/schema.prisma +++ b/packages/db/prisma/schema.prisma @@ -54,13 +54,15 @@ model Repo { webUrl String? connections RepoToConnection[] imageUrl String? + + /// @deprecated status tracking is now done via the `jobs` table. repoIndexingStatus RepoIndexingStatus @default(NEW) permittedUsers UserToRepoPermission[] permissionSyncJobs RepoPermissionSyncJob[] permissionSyncedAt DateTime? /// When the permissions were last synced successfully. - indexingJobs RepoIndexingJob[] + jobs RepoJob[] indexedAt DateTime? /// When the repo was last indexed successfully. external_id String /// The id of the repo in the external service @@ -76,16 +78,22 @@ model Repo { @@index([orgId]) } -enum RepoIndexingJobStatus { +enum RepoJobStatus { PENDING IN_PROGRESS COMPLETED FAILED } -model RepoIndexingJob { +enum RepoJobType { + INDEX + CLEANUP +} + +model RepoJob { id String @id @default(cuid()) - status RepoIndexingJobStatus @default(PENDING) + type RepoJobType + status RepoJobStatus @default(PENDING) createdAt DateTime @default(now()) updatedAt DateTime @updatedAt completedAt DateTime? diff --git a/packages/logger/src/index.ts b/packages/logger/src/index.ts index d3998d2c..635c8b3c 100644 --- a/packages/logger/src/index.ts +++ b/packages/logger/src/index.ts @@ -1,4 +1,4 @@ -import winston, { format } from 'winston'; +import winston, { format, Logger } from 'winston'; import { Logtail } from '@logtail/node'; import { LogtailTransport } from '@logtail/winston'; import { MESSAGE } from 'triple-beam'; @@ -48,7 +48,7 @@ const createLogger = (label: string) => { format: combine( errors({ stack: true }), timestamp(), - labelFn({ label: label }) + labelFn({ label: label }), ), transports: [ new winston.transports.Console({ @@ -84,4 +84,8 @@ const createLogger = (label: string) => { export { createLogger -}; \ No newline at end of file +}; + +export type { + Logger, +} \ No newline at end of file diff --git a/yarn.lock b/yarn.lock index 6fe84282..34be0b97 100644 --- a/yarn.lock +++ b/yarn.lock @@ -10061,6 +10061,17 @@ __metadata: languageName: node linkType: hard +"cliui@npm:^8.0.1": + version: 8.0.1 + resolution: "cliui@npm:8.0.1" + dependencies: + string-width: "npm:^4.2.0" + strip-ansi: "npm:^6.0.1" + wrap-ansi: "npm:^7.0.0" + checksum: 10c0/4bda0f09c340cbb6dfdc1ed508b3ca080f12992c18d68c6be4d9cf51756033d5266e61ec57529e610dacbf4da1c634423b0c1b11037709cc6b09045cbd815df5 + languageName: node + linkType: hard + "clone@npm:^1.0.2": version: 1.0.4 resolution: "clone@npm:1.0.4" @@ -10459,6 +10470,23 @@ __metadata: languageName: node linkType: hard +"concurrently@npm:^9.2.1": + version: 9.2.1 + resolution: "concurrently@npm:9.2.1" + dependencies: + chalk: "npm:4.1.2" + rxjs: "npm:7.8.2" + shell-quote: "npm:1.8.3" + supports-color: "npm:8.1.1" + tree-kill: "npm:1.2.2" + yargs: "npm:17.7.2" + bin: + conc: dist/bin/concurrently.js + concurrently: dist/bin/concurrently.js + checksum: 10c0/da37f239f82eb7ac24f5ddb56259861e5f1d6da2ade7602b6ea7ad3101b13b5ccec02a77b7001402d1028ff2fdc38eed55644b32853ad5abf30e057002a963aa + languageName: node + linkType: hard + "content-disposition@npm:0.5.4": version: 0.5.4 resolution: "content-disposition@npm:0.5.4" @@ -11810,7 +11838,7 @@ __metadata: languageName: node linkType: hard -"escalade@npm:^3.2.0": +"escalade@npm:^3.1.1, escalade@npm:^3.2.0": version: 3.2.0 resolution: "escalade@npm:3.2.0" checksum: 10c0/ced4dd3a78e15897ed3be74e635110bbf3b08877b0a41be50dcb325ee0e0b5f65fc2d50e9845194d7c4633f327e2e1c6cce00a71b617c5673df0374201d67f65 @@ -12781,6 +12809,13 @@ __metadata: languageName: node linkType: hard +"get-caller-file@npm:^2.0.5": + version: 2.0.5 + resolution: "get-caller-file@npm:2.0.5" + checksum: 10c0/c6c7b60271931fa752aeb92f2b47e355eac1af3a2673f47c9589e8f8a41adc74d45551c1bc57b5e66a80609f10ffb72b6f575e4370d61cc3f7f3aaff01757cde + languageName: node + linkType: hard + "get-intrinsic@npm:^1.2.4, get-intrinsic@npm:^1.2.5, get-intrinsic@npm:^1.2.6, get-intrinsic@npm:^1.2.7, get-intrinsic@npm:^1.3.0": version: 1.3.0 resolution: "get-intrinsic@npm:1.3.0" @@ -17617,6 +17652,13 @@ __metadata: languageName: node linkType: hard +"require-directory@npm:^2.1.1": + version: 2.1.1 + resolution: "require-directory@npm:2.1.1" + checksum: 10c0/83aa76a7bc1531f68d92c75a2ca2f54f1b01463cb566cf3fbc787d0de8be30c9dbc211d1d46be3497dac5785fe296f2dd11d531945ac29730643357978966e99 + languageName: node + linkType: hard + "require-from-string@npm:^2.0.2": version: 2.0.2 resolution: "require-from-string@npm:2.0.2" @@ -17931,9 +17973,9 @@ __metadata: version: 0.0.0-use.local resolution: "root-workspace-0b6124@workspace:." dependencies: + concurrently: "npm:^9.2.1" cross-env: "npm:^7.0.3" dotenv-cli: "npm:^8.0.0" - npm-run-all: "npm:^4.1.5" languageName: unknown linkType: soft @@ -18015,6 +18057,15 @@ __metadata: languageName: node linkType: hard +"rxjs@npm:7.8.2": + version: 7.8.2 + resolution: "rxjs@npm:7.8.2" + dependencies: + tslib: "npm:^2.1.0" + checksum: 10c0/1fcd33d2066ada98ba8f21fcbbcaee9f0b271de1d38dc7f4e256bfbc6ffcdde68c8bfb69093de7eeb46f24b1fb820620bf0223706cff26b4ab99a7ff7b2e2c45 + languageName: node + linkType: hard + "safe-array-concat@npm:^1.1.3": version: 1.1.3 resolution: "safe-array-concat@npm:1.1.3" @@ -18443,6 +18494,13 @@ __metadata: languageName: node linkType: hard +"shell-quote@npm:1.8.3": + version: 1.8.3 + resolution: "shell-quote@npm:1.8.3" + checksum: 10c0/bee87c34e1e986cfb4c30846b8e6327d18874f10b535699866f368ade11ea4ee45433d97bf5eada22c4320c27df79c3a6a7eb1bf3ecfc47f2c997d9e5e2672fd + languageName: node + linkType: hard + "shell-quote@npm:^1.6.1": version: 1.8.2 resolution: "shell-quote@npm:1.8.2" @@ -18864,7 +18922,7 @@ __metadata: languageName: node linkType: hard -"string-width-cjs@npm:string-width@^4.2.0, string-width@npm:^4.1.0": +"string-width-cjs@npm:string-width@^4.2.0, string-width@npm:^4.1.0, string-width@npm:^4.2.0, string-width@npm:^4.2.3": version: 4.2.3 resolution: "string-width@npm:4.2.3" dependencies: @@ -19128,6 +19186,15 @@ __metadata: languageName: node linkType: hard +"supports-color@npm:8.1.1": + version: 8.1.1 + resolution: "supports-color@npm:8.1.1" + dependencies: + has-flag: "npm:^4.0.0" + checksum: 10c0/ea1d3c275dd604c974670f63943ed9bd83623edc102430c05adb8efc56ba492746b6e95386e7831b872ec3807fd89dd8eb43f735195f37b5ec343e4234cc7e89 + languageName: node + linkType: hard + "supports-color@npm:^5.3.0, supports-color@npm:^5.5.0": version: 5.5.0 resolution: "supports-color@npm:5.5.0" @@ -19438,6 +19505,15 @@ __metadata: languageName: node linkType: hard +"tree-kill@npm:1.2.2": + version: 1.2.2 + resolution: "tree-kill@npm:1.2.2" + bin: + tree-kill: cli.js + checksum: 10c0/7b1b7c7f17608a8f8d20a162e7957ac1ef6cd1636db1aba92f4e072dc31818c2ff0efac1e3d91064ede67ed5dc57c565420531a8134090a12ac10cf792ab14d2 + languageName: node + linkType: hard + "trim-lines@npm:^3.0.0": version: 3.0.1 resolution: "trim-lines@npm:3.0.1" @@ -20487,7 +20563,7 @@ __metadata: languageName: node linkType: hard -"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0, wrap-ansi@npm:^7.0.0": version: 7.0.0 resolution: "wrap-ansi@npm:7.0.0" dependencies: @@ -20574,6 +20650,13 @@ __metadata: languageName: node linkType: hard +"y18n@npm:^5.0.5": + version: 5.0.8 + resolution: "y18n@npm:5.0.8" + checksum: 10c0/4df2842c36e468590c3691c894bc9cdbac41f520566e76e24f59401ba7d8b4811eb1e34524d57e54bc6d864bcb66baab7ffd9ca42bf1eda596618f9162b91249 + languageName: node + linkType: hard + "yallist@npm:^3.0.2": version: 3.1.1 resolution: "yallist@npm:3.1.1" @@ -20604,6 +20687,28 @@ __metadata: languageName: node linkType: hard +"yargs-parser@npm:^21.1.1": + version: 21.1.1 + resolution: "yargs-parser@npm:21.1.1" + checksum: 10c0/f84b5e48169479d2f402239c59f084cfd1c3acc197a05c59b98bab067452e6b3ea46d4dd8ba2985ba7b3d32a343d77df0debd6b343e5dae3da2aab2cdf5886b2 + languageName: node + linkType: hard + +"yargs@npm:17.7.2": + version: 17.7.2 + resolution: "yargs@npm:17.7.2" + dependencies: + cliui: "npm:^8.0.1" + escalade: "npm:^3.1.1" + get-caller-file: "npm:^2.0.5" + require-directory: "npm:^2.1.1" + string-width: "npm:^4.2.3" + y18n: "npm:^5.0.5" + yargs-parser: "npm:^21.1.1" + checksum: 10c0/ccd7e723e61ad5965fffbb791366db689572b80cca80e0f96aad968dfff4156cd7cd1ad18607afe1046d8241e6fb2d6c08bf7fa7bfb5eaec818735d8feac8f05 + languageName: node + linkType: hard + "yocto-queue@npm:^0.1.0": version: 0.1.0 resolution: "yocto-queue@npm:0.1.0"