mirror of
https://github.com/sourcebot-dev/sourcebot.git
synced 2025-12-13 04:45:19 +00:00
wip: move permissions check to Prisma extension
This commit is contained in:
parent
0b03f94f67
commit
671fd78360
14 changed files with 697 additions and 737 deletions
|
|
@ -74,7 +74,7 @@ await repoManager.validateIndexedReposHaveShards();
|
|||
|
||||
const connectionManagerInterval = connectionManager.startScheduler();
|
||||
const repoManagerInterval = repoManager.startScheduler();
|
||||
const permissionSyncerInterval = env.EXPERIMENT_PERMISSION_SYNC_ENABLED ? permissionSyncer.startScheduler() : null;
|
||||
const permissionSyncerInterval = env.EXPERIMENT_PERMISSION_SYNC_ENABLED === 'true' ? permissionSyncer.startScheduler() : null;
|
||||
|
||||
|
||||
const cleanup = async (signal: string) => {
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ export class RepoPermissionSyncer {
|
|||
}
|
||||
|
||||
// @todo: make this configurable
|
||||
}, 1000 * 5);
|
||||
}, 1000 * 60);
|
||||
}
|
||||
|
||||
public dispose() {
|
||||
|
|
|
|||
|
|
@ -1,46 +1,45 @@
|
|||
'use server';
|
||||
|
||||
import { getAuditService } from "@/ee/features/audit/factory";
|
||||
import { env } from "@/env.mjs";
|
||||
import { addUserToOrganization, orgHasAvailability } from "@/lib/authUtils";
|
||||
import { ErrorCode } from "@/lib/errorCodes";
|
||||
import { notAuthenticated, notFound, orgNotFound, secretAlreadyExists, ServiceError, ServiceErrorException, unexpectedError } from "@/lib/serviceError";
|
||||
import { CodeHostType, isHttpError, isServiceError } from "@/lib/utils";
|
||||
import { CodeHostType, getOrgMetadata, isHttpError, isServiceError } from "@/lib/utils";
|
||||
import { prisma } from "@/prisma";
|
||||
import { render } from "@react-email/components";
|
||||
import * as Sentry from '@sentry/nextjs';
|
||||
import { decrypt, encrypt, generateApiKey, hashSecret, getTokenFromConfig } from "@sourcebot/crypto";
|
||||
import { ConnectionSyncStatus, OrgRole, Prisma, RepoIndexingStatus, StripeSubscriptionStatus, Org, ApiKey } from "@sourcebot/db";
|
||||
import { decrypt, encrypt, generateApiKey, getTokenFromConfig, hashSecret } from "@sourcebot/crypto";
|
||||
import { ApiKey, ConnectionSyncStatus, Org, OrgRole, Prisma, RepoIndexingStatus, StripeSubscriptionStatus } from "@sourcebot/db";
|
||||
import { createLogger } from "@sourcebot/logger";
|
||||
import { azuredevopsSchema } from "@sourcebot/schemas/v3/azuredevops.schema";
|
||||
import { bitbucketSchema } from "@sourcebot/schemas/v3/bitbucket.schema";
|
||||
import { ConnectionConfig } from "@sourcebot/schemas/v3/connection.type";
|
||||
import { genericGitHostSchema } from "@sourcebot/schemas/v3/genericGitHost.schema";
|
||||
import { gerritSchema } from "@sourcebot/schemas/v3/gerrit.schema";
|
||||
import { giteaSchema } from "@sourcebot/schemas/v3/gitea.schema";
|
||||
import { githubSchema } from "@sourcebot/schemas/v3/github.schema";
|
||||
import { gitlabSchema } from "@sourcebot/schemas/v3/gitlab.schema";
|
||||
import { azuredevopsSchema } from "@sourcebot/schemas/v3/azuredevops.schema";
|
||||
import { GithubConnectionConfig } from "@sourcebot/schemas/v3/github.type";
|
||||
import { GitlabConnectionConfig } from "@sourcebot/schemas/v3/gitlab.type";
|
||||
import { GiteaConnectionConfig } from "@sourcebot/schemas/v3/gitea.type";
|
||||
import { githubSchema } from "@sourcebot/schemas/v3/github.schema";
|
||||
import { GithubConnectionConfig } from "@sourcebot/schemas/v3/github.type";
|
||||
import { gitlabSchema } from "@sourcebot/schemas/v3/gitlab.schema";
|
||||
import { GitlabConnectionConfig } from "@sourcebot/schemas/v3/gitlab.type";
|
||||
import { getPlan, hasEntitlement } from "@sourcebot/shared";
|
||||
import Ajv from "ajv";
|
||||
import { StatusCodes } from "http-status-codes";
|
||||
import { cookies, headers } from "next/headers";
|
||||
import { createTransport } from "nodemailer";
|
||||
import { auth } from "./auth";
|
||||
import { Octokit } from "octokit";
|
||||
import { auth } from "./auth";
|
||||
import { getConnection } from "./data/connection";
|
||||
import { getOrgFromDomain } from "./data/org";
|
||||
import { decrementOrgSeatCount, getSubscriptionForOrg } from "./ee/features/billing/serverUtils";
|
||||
import { IS_BILLING_ENABLED } from "./ee/features/billing/stripe";
|
||||
import InviteUserEmail from "./emails/inviteUserEmail";
|
||||
import JoinRequestApprovedEmail from "./emails/joinRequestApprovedEmail";
|
||||
import JoinRequestSubmittedEmail from "./emails/joinRequestSubmittedEmail";
|
||||
import { AGENTIC_SEARCH_TUTORIAL_DISMISSED_COOKIE_NAME, MOBILE_UNSUPPORTED_SPLASH_SCREEN_DISMISSED_COOKIE_NAME, SEARCH_MODE_COOKIE_NAME, SINGLE_TENANT_ORG_DOMAIN, SOURCEBOT_GUEST_USER_ID, SOURCEBOT_SUPPORT_EMAIL } from "./lib/constants";
|
||||
import { orgDomainSchema, orgNameSchema, repositoryQuerySchema } from "./lib/schemas";
|
||||
import { TenancyMode, ApiKeyPayload } from "./lib/types";
|
||||
import { decrementOrgSeatCount, getSubscriptionForOrg } from "./ee/features/billing/serverUtils";
|
||||
import { bitbucketSchema } from "@sourcebot/schemas/v3/bitbucket.schema";
|
||||
import { genericGitHostSchema } from "@sourcebot/schemas/v3/genericGitHost.schema";
|
||||
import { getPlan, hasEntitlement } from "@sourcebot/shared";
|
||||
import JoinRequestSubmittedEmail from "./emails/joinRequestSubmittedEmail";
|
||||
import JoinRequestApprovedEmail from "./emails/joinRequestApprovedEmail";
|
||||
import { createLogger } from "@sourcebot/logger";
|
||||
import { getAuditService } from "@/ee/features/audit/factory";
|
||||
import { addUserToOrganization, orgHasAvailability } from "@/lib/authUtils";
|
||||
import { getOrgMetadata } from "@/lib/utils";
|
||||
import { getOrgFromDomain } from "./data/org";
|
||||
import { ApiKeyPayload, TenancyMode } from "./lib/types";
|
||||
import { withOptionalAuthV2 } from "./withAuthV2";
|
||||
|
||||
const ajv = new Ajv({
|
||||
|
|
@ -640,7 +639,7 @@ export const getConnectionInfo = async (connectionId: number, domain: string) =>
|
|||
})));
|
||||
|
||||
export const getRepos = async (filter: { status?: RepoIndexingStatus[], connectionId?: number } = {}) => sew(() =>
|
||||
withOptionalAuthV2(async ({ org, user }) => {
|
||||
withOptionalAuthV2(async ({ org, prisma }) => {
|
||||
const repos = await prisma.repo.findMany({
|
||||
where: {
|
||||
orgId: org.id,
|
||||
|
|
@ -654,13 +653,6 @@ export const getRepos = async (filter: { status?: RepoIndexingStatus[], connecti
|
|||
}
|
||||
}
|
||||
} : {}),
|
||||
...(env.EXPERIMENT_PERMISSION_SYNC_ENABLED === 'true' ? {
|
||||
permittedUsers: {
|
||||
some: {
|
||||
userId: user?.id,
|
||||
}
|
||||
}
|
||||
} : {})
|
||||
},
|
||||
include: {
|
||||
connections: {
|
||||
|
|
@ -688,74 +680,65 @@ export const getRepos = async (filter: { status?: RepoIndexingStatus[], connecti
|
|||
}))
|
||||
}));
|
||||
|
||||
export const getRepoInfoByName = async (repoName: string, domain: string) => sew(() =>
|
||||
withAuth((userId) =>
|
||||
withOrgMembership(userId, domain, async ({ org }) => {
|
||||
// @note: repo names are represented by their remote url
|
||||
// on the code host. E.g.,:
|
||||
// - github.com/sourcebot-dev/sourcebot
|
||||
// - gitlab.com/gitlab-org/gitlab
|
||||
// - gerrit.wikimedia.org/r/mediawiki/extensions/OnionsPorFavor
|
||||
// etc.
|
||||
//
|
||||
// For most purposes, repo names are unique within an org, so using
|
||||
// findFirst is equivalent to findUnique. Duplicates _can_ occur when
|
||||
// a repository is specified by its remote url in a generic `git`
|
||||
// connection. For example:
|
||||
//
|
||||
// ```json
|
||||
// {
|
||||
// "connections": {
|
||||
// "connection-1": {
|
||||
// "type": "github",
|
||||
// "repos": [
|
||||
// "sourcebot-dev/sourcebot"
|
||||
// ]
|
||||
// },
|
||||
// "connection-2": {
|
||||
// "type": "git",
|
||||
// "url": "file:///tmp/repos/sourcebot"
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// ```
|
||||
//
|
||||
// In this scenario, both repos will be named "github.com/sourcebot-dev/sourcebot".
|
||||
// We will leave this as an edge case for now since it's unlikely to happen in practice.
|
||||
//
|
||||
// @v4-todo: we could add a unique constraint on repo name + orgId to help de-duplicate
|
||||
// these cases.
|
||||
// @see: repoCompileUtils.ts
|
||||
const repo = await prisma.repo.findFirst({
|
||||
where: {
|
||||
name: repoName,
|
||||
orgId: org.id,
|
||||
...(env.EXPERIMENT_PERMISSION_SYNC_ENABLED === 'true' ? {
|
||||
permittedUsers: {
|
||||
some: {
|
||||
userId: userId,
|
||||
}
|
||||
}
|
||||
} : {})
|
||||
},
|
||||
});
|
||||
export const getRepoInfoByName = async (repoName: string) => sew(() =>
|
||||
withOptionalAuthV2(async ({ org, prisma }) => {
|
||||
// @note: repo names are represented by their remote url
|
||||
// on the code host. E.g.,:
|
||||
// - github.com/sourcebot-dev/sourcebot
|
||||
// - gitlab.com/gitlab-org/gitlab
|
||||
// - gerrit.wikimedia.org/r/mediawiki/extensions/OnionsPorFavor
|
||||
// etc.
|
||||
//
|
||||
// For most purposes, repo names are unique within an org, so using
|
||||
// findFirst is equivalent to findUnique. Duplicates _can_ occur when
|
||||
// a repository is specified by its remote url in a generic `git`
|
||||
// connection. For example:
|
||||
//
|
||||
// ```json
|
||||
// {
|
||||
// "connections": {
|
||||
// "connection-1": {
|
||||
// "type": "github",
|
||||
// "repos": [
|
||||
// "sourcebot-dev/sourcebot"
|
||||
// ]
|
||||
// },
|
||||
// "connection-2": {
|
||||
// "type": "git",
|
||||
// "url": "file:///tmp/repos/sourcebot"
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// ```
|
||||
//
|
||||
// In this scenario, both repos will be named "github.com/sourcebot-dev/sourcebot".
|
||||
// We will leave this as an edge case for now since it's unlikely to happen in practice.
|
||||
//
|
||||
// @v4-todo: we could add a unique constraint on repo name + orgId to help de-duplicate
|
||||
// these cases.
|
||||
// @see: repoCompileUtils.ts
|
||||
const repo = await prisma.repo.findFirst({
|
||||
where: {
|
||||
name: repoName,
|
||||
orgId: org.id,
|
||||
},
|
||||
});
|
||||
|
||||
if (!repo) {
|
||||
return notFound();
|
||||
}
|
||||
if (!repo) {
|
||||
return notFound();
|
||||
}
|
||||
|
||||
return {
|
||||
id: repo.id,
|
||||
name: repo.name,
|
||||
displayName: repo.displayName ?? undefined,
|
||||
codeHostType: repo.external_codeHostType,
|
||||
webUrl: repo.webUrl ?? undefined,
|
||||
imageUrl: repo.imageUrl ?? undefined,
|
||||
indexedAt: repo.indexedAt ?? undefined,
|
||||
repoIndexingStatus: repo.repoIndexingStatus,
|
||||
}
|
||||
}, /* minRequiredRole = */ OrgRole.GUEST), /* allowAnonymousAccess = */ true
|
||||
));
|
||||
return {
|
||||
id: repo.id,
|
||||
name: repo.name,
|
||||
displayName: repo.displayName ?? undefined,
|
||||
codeHostType: repo.external_codeHostType,
|
||||
webUrl: repo.webUrl ?? undefined,
|
||||
imageUrl: repo.imageUrl ?? undefined,
|
||||
indexedAt: repo.indexedAt ?? undefined,
|
||||
repoIndexingStatus: repo.repoIndexingStatus,
|
||||
}
|
||||
}));
|
||||
|
||||
export const createConnection = async (name: string, type: CodeHostType, connectionConfig: string, domain: string): Promise<{ id: number } | ServiceError> => sew(() =>
|
||||
withAuth((userId) =>
|
||||
|
|
@ -805,150 +788,141 @@ export const createConnection = async (name: string, type: CodeHostType, connect
|
|||
}, OrgRole.OWNER)
|
||||
));
|
||||
|
||||
export const experimental_addGithubRepositoryByUrl = async (repositoryUrl: string, domain: string): Promise<{ connectionId: number } | ServiceError> => sew(() =>
|
||||
withAuth((userId) =>
|
||||
withOrgMembership(userId, domain, async ({ org }) => {
|
||||
if (env.EXPERIMENT_SELF_SERVE_REPO_INDEXING_ENABLED !== 'true') {
|
||||
return {
|
||||
statusCode: StatusCodes.BAD_REQUEST,
|
||||
errorCode: ErrorCode.INVALID_REQUEST_BODY,
|
||||
message: "This feature is not enabled.",
|
||||
} satisfies ServiceError;
|
||||
}
|
||||
export const experimental_addGithubRepositoryByUrl = async (repositoryUrl: string): Promise<{ connectionId: number } | ServiceError> => sew(() =>
|
||||
withOptionalAuthV2(async ({ org, prisma }) => {
|
||||
if (env.EXPERIMENT_SELF_SERVE_REPO_INDEXING_ENABLED !== 'true') {
|
||||
return {
|
||||
statusCode: StatusCodes.BAD_REQUEST,
|
||||
errorCode: ErrorCode.INVALID_REQUEST_BODY,
|
||||
message: "This feature is not enabled.",
|
||||
} satisfies ServiceError;
|
||||
}
|
||||
|
||||
// Parse repository URL to extract owner/repo
|
||||
const repoInfo = (() => {
|
||||
const url = repositoryUrl.trim();
|
||||
// Parse repository URL to extract owner/repo
|
||||
const repoInfo = (() => {
|
||||
const url = repositoryUrl.trim();
|
||||
|
||||
// Handle various GitHub URL formats
|
||||
const patterns = [
|
||||
// https://github.com/owner/repo or https://github.com/owner/repo.git
|
||||
/^https?:\/\/github\.com\/([a-zA-Z0-9_.-]+)\/([a-zA-Z0-9_.-]+?)(?:\.git)?\/?$/,
|
||||
// github.com/owner/repo
|
||||
/^github\.com\/([a-zA-Z0-9_.-]+)\/([a-zA-Z0-9_.-]+?)(?:\.git)?\/?$/,
|
||||
// owner/repo
|
||||
/^([a-zA-Z0-9_.-]+)\/([a-zA-Z0-9_.-]+)$/
|
||||
];
|
||||
// Handle various GitHub URL formats
|
||||
const patterns = [
|
||||
// https://github.com/owner/repo or https://github.com/owner/repo.git
|
||||
/^https?:\/\/github\.com\/([a-zA-Z0-9_.-]+)\/([a-zA-Z0-9_.-]+?)(?:\.git)?\/?$/,
|
||||
// github.com/owner/repo
|
||||
/^github\.com\/([a-zA-Z0-9_.-]+)\/([a-zA-Z0-9_.-]+?)(?:\.git)?\/?$/,
|
||||
// owner/repo
|
||||
/^([a-zA-Z0-9_.-]+)\/([a-zA-Z0-9_.-]+)$/
|
||||
];
|
||||
|
||||
for (const pattern of patterns) {
|
||||
const match = url.match(pattern);
|
||||
if (match) {
|
||||
return {
|
||||
owner: match[1],
|
||||
repo: match[2]
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
})();
|
||||
|
||||
if (!repoInfo) {
|
||||
return {
|
||||
statusCode: StatusCodes.BAD_REQUEST,
|
||||
errorCode: ErrorCode.INVALID_REQUEST_BODY,
|
||||
message: "Invalid repository URL format. Please use 'owner/repo' or 'https://github.com/owner/repo' format.",
|
||||
} satisfies ServiceError;
|
||||
}
|
||||
|
||||
const { owner, repo } = repoInfo;
|
||||
|
||||
// Use GitHub API to fetch repository information and get the external_id
|
||||
const octokit = new Octokit({
|
||||
auth: env.EXPERIMENT_SELF_SERVE_REPO_INDEXING_GITHUB_TOKEN
|
||||
});
|
||||
|
||||
let githubRepo;
|
||||
try {
|
||||
const response = await octokit.rest.repos.get({
|
||||
owner,
|
||||
repo,
|
||||
});
|
||||
githubRepo = response.data;
|
||||
} catch (error) {
|
||||
if (isHttpError(error, 404)) {
|
||||
for (const pattern of patterns) {
|
||||
const match = url.match(pattern);
|
||||
if (match) {
|
||||
return {
|
||||
statusCode: StatusCodes.NOT_FOUND,
|
||||
errorCode: ErrorCode.INVALID_REQUEST_BODY,
|
||||
message: `Repository '${owner}/${repo}' not found or is private. Only public repositories can be added.`,
|
||||
} satisfies ServiceError;
|
||||
owner: match[1],
|
||||
repo: match[2]
|
||||
};
|
||||
}
|
||||
|
||||
if (isHttpError(error, 403)) {
|
||||
return {
|
||||
statusCode: StatusCodes.FORBIDDEN,
|
||||
errorCode: ErrorCode.INVALID_REQUEST_BODY,
|
||||
message: `Access to repository '${owner}/${repo}' is forbidden. Only public repositories can be added.`,
|
||||
} satisfies ServiceError;
|
||||
}
|
||||
|
||||
return {
|
||||
statusCode: StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
errorCode: ErrorCode.INVALID_REQUEST_BODY,
|
||||
message: `Failed to fetch repository information: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
} satisfies ServiceError;
|
||||
}
|
||||
|
||||
if (githubRepo.private) {
|
||||
return {
|
||||
statusCode: StatusCodes.BAD_REQUEST,
|
||||
errorCode: ErrorCode.INVALID_REQUEST_BODY,
|
||||
message: "Only public repositories can be added.",
|
||||
} satisfies ServiceError;
|
||||
}
|
||||
return null;
|
||||
})();
|
||||
|
||||
// Check if this repository is already connected using the external_id
|
||||
const existingRepo = await prisma.repo.findFirst({
|
||||
where: {
|
||||
orgId: org.id,
|
||||
external_id: githubRepo.id.toString(),
|
||||
external_codeHostType: 'github',
|
||||
external_codeHostUrl: 'https://github.com',
|
||||
...(env.EXPERIMENT_PERMISSION_SYNC_ENABLED === 'true' ? {
|
||||
permittedUsers: {
|
||||
some: {
|
||||
userId: userId,
|
||||
}
|
||||
}
|
||||
} : {})
|
||||
}
|
||||
if (!repoInfo) {
|
||||
return {
|
||||
statusCode: StatusCodes.BAD_REQUEST,
|
||||
errorCode: ErrorCode.INVALID_REQUEST_BODY,
|
||||
message: "Invalid repository URL format. Please use 'owner/repo' or 'https://github.com/owner/repo' format.",
|
||||
} satisfies ServiceError;
|
||||
}
|
||||
|
||||
const { owner, repo } = repoInfo;
|
||||
|
||||
// Use GitHub API to fetch repository information and get the external_id
|
||||
const octokit = new Octokit({
|
||||
auth: env.EXPERIMENT_SELF_SERVE_REPO_INDEXING_GITHUB_TOKEN
|
||||
});
|
||||
|
||||
let githubRepo;
|
||||
try {
|
||||
const response = await octokit.rest.repos.get({
|
||||
owner,
|
||||
repo,
|
||||
});
|
||||
|
||||
if (existingRepo) {
|
||||
githubRepo = response.data;
|
||||
} catch (error) {
|
||||
if (isHttpError(error, 404)) {
|
||||
return {
|
||||
statusCode: StatusCodes.BAD_REQUEST,
|
||||
errorCode: ErrorCode.CONNECTION_ALREADY_EXISTS,
|
||||
message: "This repository already exists.",
|
||||
statusCode: StatusCodes.NOT_FOUND,
|
||||
errorCode: ErrorCode.INVALID_REQUEST_BODY,
|
||||
message: `Repository '${owner}/${repo}' not found or is private. Only public repositories can be added.`,
|
||||
} satisfies ServiceError;
|
||||
}
|
||||
|
||||
const connectionName = `${owner}-${repo}-${Date.now()}`;
|
||||
|
||||
// Create GitHub connection config
|
||||
const connectionConfig: GithubConnectionConfig = {
|
||||
type: "github" as const,
|
||||
repos: [`${owner}/${repo}`],
|
||||
...(env.EXPERIMENT_SELF_SERVE_REPO_INDEXING_GITHUB_TOKEN ? {
|
||||
token: {
|
||||
env: 'EXPERIMENT_SELF_SERVE_REPO_INDEXING_GITHUB_TOKEN'
|
||||
}
|
||||
} : {})
|
||||
};
|
||||
|
||||
const connection = await prisma.connection.create({
|
||||
data: {
|
||||
orgId: org.id,
|
||||
name: connectionName,
|
||||
config: connectionConfig as unknown as Prisma.InputJsonValue,
|
||||
connectionType: 'github',
|
||||
}
|
||||
});
|
||||
if (isHttpError(error, 403)) {
|
||||
return {
|
||||
statusCode: StatusCodes.FORBIDDEN,
|
||||
errorCode: ErrorCode.INVALID_REQUEST_BODY,
|
||||
message: `Access to repository '${owner}/${repo}' is forbidden. Only public repositories can be added.`,
|
||||
} satisfies ServiceError;
|
||||
}
|
||||
|
||||
return {
|
||||
connectionId: connection.id,
|
||||
statusCode: StatusCodes.INTERNAL_SERVER_ERROR,
|
||||
errorCode: ErrorCode.INVALID_REQUEST_BODY,
|
||||
message: `Failed to fetch repository information: ${error instanceof Error ? error.message : 'Unknown error'}`,
|
||||
} satisfies ServiceError;
|
||||
}
|
||||
|
||||
if (githubRepo.private) {
|
||||
return {
|
||||
statusCode: StatusCodes.BAD_REQUEST,
|
||||
errorCode: ErrorCode.INVALID_REQUEST_BODY,
|
||||
message: "Only public repositories can be added.",
|
||||
} satisfies ServiceError;
|
||||
}
|
||||
|
||||
// Check if this repository is already connected using the external_id
|
||||
const existingRepo = await prisma.repo.findFirst({
|
||||
where: {
|
||||
orgId: org.id,
|
||||
external_id: githubRepo.id.toString(),
|
||||
external_codeHostType: 'github',
|
||||
external_codeHostUrl: 'https://github.com',
|
||||
}
|
||||
}, OrgRole.GUEST), /* allowAnonymousAccess = */ true
|
||||
));
|
||||
});
|
||||
|
||||
if (existingRepo) {
|
||||
return {
|
||||
statusCode: StatusCodes.BAD_REQUEST,
|
||||
errorCode: ErrorCode.CONNECTION_ALREADY_EXISTS,
|
||||
message: "This repository already exists.",
|
||||
} satisfies ServiceError;
|
||||
}
|
||||
|
||||
const connectionName = `${owner}-${repo}-${Date.now()}`;
|
||||
|
||||
// Create GitHub connection config
|
||||
const connectionConfig: GithubConnectionConfig = {
|
||||
type: "github" as const,
|
||||
repos: [`${owner}/${repo}`],
|
||||
...(env.EXPERIMENT_SELF_SERVE_REPO_INDEXING_GITHUB_TOKEN ? {
|
||||
token: {
|
||||
env: 'EXPERIMENT_SELF_SERVE_REPO_INDEXING_GITHUB_TOKEN'
|
||||
}
|
||||
} : {})
|
||||
};
|
||||
|
||||
const connection = await prisma.connection.create({
|
||||
data: {
|
||||
orgId: org.id,
|
||||
name: connectionName,
|
||||
config: connectionConfig as unknown as Prisma.InputJsonValue,
|
||||
connectionType: 'github',
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
connectionId: connection.id,
|
||||
}
|
||||
}));
|
||||
|
||||
export const updateConnectionDisplayName = async (connectionId: number, name: string, domain: string): Promise<{ success: boolean } | ServiceError> => sew(() =>
|
||||
withAuth((userId) =>
|
||||
|
|
@ -2043,82 +2017,73 @@ export const getSearchContexts = async (domain: string) => sew(() =>
|
|||
}, /* minRequiredRole = */ OrgRole.GUEST), /* allowAnonymousAccess = */ true
|
||||
));
|
||||
|
||||
export const getRepoImage = async (repoId: number, domain: string): Promise<ArrayBuffer | ServiceError> => sew(async () => {
|
||||
return await withAuth(async (userId) => {
|
||||
return await withOrgMembership(userId, domain, async ({ org }) => {
|
||||
const repo = await prisma.repo.findUnique({
|
||||
where: {
|
||||
id: repoId,
|
||||
orgId: org.id,
|
||||
...(env.EXPERIMENT_PERMISSION_SYNC_ENABLED === 'true' ? {
|
||||
permittedUsers: {
|
||||
some: {
|
||||
userId: userId,
|
||||
}
|
||||
}
|
||||
} : {})
|
||||
},
|
||||
include: {
|
||||
connections: {
|
||||
include: {
|
||||
connection: true,
|
||||
}
|
||||
export const getRepoImage = async (repoId: number): Promise<ArrayBuffer | ServiceError> => sew(async () => {
|
||||
return await withOptionalAuthV2(async ({ org, prisma }) => {
|
||||
const repo = await prisma.repo.findUnique({
|
||||
where: {
|
||||
id: repoId,
|
||||
orgId: org.id,
|
||||
},
|
||||
include: {
|
||||
connections: {
|
||||
include: {
|
||||
connection: true,
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
if (!repo || !repo.imageUrl) {
|
||||
return notFound();
|
||||
}
|
||||
|
||||
const authHeaders: Record<string, string> = {};
|
||||
for (const { connection } of repo.connections) {
|
||||
try {
|
||||
if (connection.connectionType === 'github') {
|
||||
const config = connection.config as unknown as GithubConnectionConfig;
|
||||
if (config.token) {
|
||||
const token = await getTokenFromConfig(config.token, connection.orgId, prisma);
|
||||
authHeaders['Authorization'] = `token ${token}`;
|
||||
break;
|
||||
}
|
||||
} else if (connection.connectionType === 'gitlab') {
|
||||
const config = connection.config as unknown as GitlabConnectionConfig;
|
||||
if (config.token) {
|
||||
const token = await getTokenFromConfig(config.token, connection.orgId, prisma);
|
||||
authHeaders['PRIVATE-TOKEN'] = token;
|
||||
break;
|
||||
}
|
||||
} else if (connection.connectionType === 'gitea') {
|
||||
const config = connection.config as unknown as GiteaConnectionConfig;
|
||||
if (config.token) {
|
||||
const token = await getTokenFromConfig(config.token, connection.orgId, prisma);
|
||||
authHeaders['Authorization'] = `token ${token}`;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to get token for connection ${connection.id}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(repo.imageUrl, {
|
||||
headers: authHeaders,
|
||||
});
|
||||
|
||||
if (!repo || !repo.imageUrl) {
|
||||
if (!response.ok) {
|
||||
logger.warn(`Failed to fetch image from ${repo.imageUrl}: ${response.status}`);
|
||||
return notFound();
|
||||
}
|
||||
|
||||
const authHeaders: Record<string, string> = {};
|
||||
for (const { connection } of repo.connections) {
|
||||
try {
|
||||
if (connection.connectionType === 'github') {
|
||||
const config = connection.config as unknown as GithubConnectionConfig;
|
||||
if (config.token) {
|
||||
const token = await getTokenFromConfig(config.token, connection.orgId, prisma);
|
||||
authHeaders['Authorization'] = `token ${token}`;
|
||||
break;
|
||||
}
|
||||
} else if (connection.connectionType === 'gitlab') {
|
||||
const config = connection.config as unknown as GitlabConnectionConfig;
|
||||
if (config.token) {
|
||||
const token = await getTokenFromConfig(config.token, connection.orgId, prisma);
|
||||
authHeaders['PRIVATE-TOKEN'] = token;
|
||||
break;
|
||||
}
|
||||
} else if (connection.connectionType === 'gitea') {
|
||||
const config = connection.config as unknown as GiteaConnectionConfig;
|
||||
if (config.token) {
|
||||
const token = await getTokenFromConfig(config.token, connection.orgId, prisma);
|
||||
authHeaders['Authorization'] = `token ${token}`;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
logger.warn(`Failed to get token for connection ${connection.id}:`, error);
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(repo.imageUrl, {
|
||||
headers: authHeaders,
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
logger.warn(`Failed to fetch image from ${repo.imageUrl}: ${response.status}`);
|
||||
return notFound();
|
||||
}
|
||||
|
||||
const imageBuffer = await response.arrayBuffer();
|
||||
return imageBuffer;
|
||||
} catch (error) {
|
||||
logger.error(`Error proxying image for repo ${repoId}:`, error);
|
||||
return notFound();
|
||||
}
|
||||
}, /* minRequiredRole = */ OrgRole.GUEST);
|
||||
}, /* allowAnonymousAccess = */ true);
|
||||
const imageBuffer = await response.arrayBuffer();
|
||||
return imageBuffer;
|
||||
} catch (error) {
|
||||
logger.error(`Error proxying image for repo ${repoId}:`, error);
|
||||
return notFound();
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
export const getAnonymousAccessStatus = async (domain: string): Promise<boolean | ServiceError> => sew(async () => {
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ export const CodePreviewPanel = async ({ path, repoName, revisionName, domain }:
|
|||
repository: repoName,
|
||||
branch: revisionName,
|
||||
}, domain),
|
||||
getRepoInfoByName(repoName, domain),
|
||||
getRepoInfoByName(repoName),
|
||||
]);
|
||||
|
||||
if (isServiceError(fileSourceResponse) || isServiceError(repoInfoResponse)) {
|
||||
|
|
|
|||
|
|
@ -10,17 +10,16 @@ interface TreePreviewPanelProps {
|
|||
path: string;
|
||||
repoName: string;
|
||||
revisionName?: string;
|
||||
domain: string;
|
||||
}
|
||||
|
||||
export const TreePreviewPanel = async ({ path, repoName, revisionName, domain }: TreePreviewPanelProps) => {
|
||||
export const TreePreviewPanel = async ({ path, repoName, revisionName }: TreePreviewPanelProps) => {
|
||||
const [repoInfoResponse, folderContentsResponse] = await Promise.all([
|
||||
getRepoInfoByName(repoName, domain),
|
||||
getRepoInfoByName(repoName),
|
||||
getFolderContents({
|
||||
repoName,
|
||||
revisionName: revisionName ?? 'HEAD',
|
||||
path,
|
||||
}, domain)
|
||||
})
|
||||
]);
|
||||
|
||||
if (isServiceError(folderContentsResponse) || isServiceError(repoInfoResponse)) {
|
||||
|
|
|
|||
|
|
@ -42,7 +42,6 @@ export default async function BrowsePage(props: BrowsePageProps) {
|
|||
path={path}
|
||||
repoName={repoName}
|
||||
revisionName={revisionName}
|
||||
domain={domain}
|
||||
/>
|
||||
)}
|
||||
</Suspense>
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ import { useHotkeys } from "react-hotkeys-hook";
|
|||
import { useQuery } from "@tanstack/react-query";
|
||||
import { unwrapServiceError } from "@/lib/utils";
|
||||
import { FileTreeItem, getFiles } from "@/features/fileTree/actions";
|
||||
import { useDomain } from "@/hooks/useDomain";
|
||||
import { Dialog, DialogContent, DialogDescription, DialogTitle } from "@/components/ui/dialog";
|
||||
import { useBrowseNavigation } from "../hooks/useBrowseNavigation";
|
||||
import { useBrowseState } from "../hooks/useBrowseState";
|
||||
|
|
@ -28,7 +27,6 @@ type SearchResult = {
|
|||
|
||||
export const FileSearchCommandDialog = () => {
|
||||
const { repoName, revisionName } = useBrowseParams();
|
||||
const domain = useDomain();
|
||||
const { state: { isFileSearchOpen }, updateBrowseState } = useBrowseState();
|
||||
|
||||
const commandListRef = useRef<HTMLDivElement>(null);
|
||||
|
|
@ -57,8 +55,8 @@ export const FileSearchCommandDialog = () => {
|
|||
}, [isFileSearchOpen]);
|
||||
|
||||
const { data: files, isLoading, isError } = useQuery({
|
||||
queryKey: ['files', repoName, revisionName, domain],
|
||||
queryFn: () => unwrapServiceError(getFiles({ repoName, revisionName: revisionName ?? 'HEAD' }, domain)),
|
||||
queryKey: ['files', repoName, revisionName],
|
||||
queryFn: () => unwrapServiceError(getFiles({ repoName, revisionName: revisionName ?? 'HEAD' })),
|
||||
enabled: isFileSearchOpen,
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -8,7 +8,6 @@ import { zodResolver } from "@hookform/resolvers/zod";
|
|||
import { useForm } from "react-hook-form";
|
||||
import { z } from "zod";
|
||||
import { experimental_addGithubRepositoryByUrl } from "@/actions";
|
||||
import { useDomain } from "@/hooks/useDomain";
|
||||
import { isServiceError } from "@/lib/utils";
|
||||
import { useToast } from "@/components/hooks/use-toast";
|
||||
import { useRouter } from "next/navigation";
|
||||
|
|
@ -37,7 +36,6 @@ const formSchema = z.object({
|
|||
});
|
||||
|
||||
export const AddRepositoryDialog = ({ isOpen, onOpenChange }: AddRepositoryDialogProps) => {
|
||||
const domain = useDomain();
|
||||
const { toast } = useToast();
|
||||
const router = useRouter();
|
||||
|
||||
|
|
@ -52,7 +50,7 @@ export const AddRepositoryDialog = ({ isOpen, onOpenChange }: AddRepositoryDialo
|
|||
|
||||
const onSubmit = async (data: z.infer<typeof formSchema>) => {
|
||||
|
||||
const result = await experimental_addGithubRepositoryByUrl(data.repositoryUrl.trim(), domain);
|
||||
const result = await experimental_addGithubRepositoryByUrl(data.repositoryUrl.trim());
|
||||
if (isServiceError(result)) {
|
||||
toast({
|
||||
title: "Error adding repository",
|
||||
|
|
|
|||
|
|
@ -3,18 +3,18 @@ import { isServiceError } from "@/lib/utils";
|
|||
import { NextRequest } from "next/server";
|
||||
|
||||
export async function GET(
|
||||
request: NextRequest,
|
||||
_request: NextRequest,
|
||||
props: { params: Promise<{ domain: string; repoId: string }> }
|
||||
) {
|
||||
const params = await props.params;
|
||||
const { domain, repoId } = params;
|
||||
const { repoId } = params;
|
||||
const repoIdNum = parseInt(repoId);
|
||||
|
||||
if (isNaN(repoIdNum)) {
|
||||
return new Response("Invalid repo ID", { status: 400 });
|
||||
}
|
||||
|
||||
const result = await getRepoImage(repoIdNum, domain);
|
||||
const result = await getRepoImage(repoIdNum);
|
||||
if (isServiceError(result)) {
|
||||
return new Response(result.message, { status: result.statusCode });
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,13 +1,13 @@
|
|||
'use server';
|
||||
|
||||
import { sew, withAuth, withOrgMembership } from '@/actions';
|
||||
import { sew } from '@/actions';
|
||||
import { env } from '@/env.mjs';
|
||||
import { OrgRole, Repo } from '@sourcebot/db';
|
||||
import { prisma } from '@/prisma';
|
||||
import { notFound, unexpectedError } from '@/lib/serviceError';
|
||||
import { simpleGit } from 'simple-git';
|
||||
import path from 'path';
|
||||
import { withOptionalAuthV2 } from '@/withAuthV2';
|
||||
import { Repo } from '@sourcebot/db';
|
||||
import { createLogger } from '@sourcebot/logger';
|
||||
import path from 'path';
|
||||
import { simpleGit } from 'simple-git';
|
||||
|
||||
const logger = createLogger('file-tree');
|
||||
|
||||
|
|
@ -25,209 +25,182 @@ export type FileTreeNode = FileTreeItem & {
|
|||
* Returns the tree of files (blobs) and directories (trees) for a given repository,
|
||||
* at a given revision.
|
||||
*/
|
||||
export const getTree = async (params: { repoName: string, revisionName: string }, domain: string) => sew(() =>
|
||||
withAuth((userId) =>
|
||||
withOrgMembership(userId, domain, async ({ org }) => {
|
||||
const { repoName, revisionName } = params;
|
||||
const repo = await prisma.repo.findFirst({
|
||||
where: {
|
||||
name: repoName,
|
||||
orgId: org.id,
|
||||
...(env.EXPERIMENT_PERMISSION_SYNC_ENABLED === 'true' ? {
|
||||
permittedUsers: {
|
||||
some: {
|
||||
userId: userId,
|
||||
}
|
||||
}
|
||||
} : {})
|
||||
},
|
||||
});
|
||||
export const getTree = async (params: { repoName: string, revisionName: string }) => sew(() =>
|
||||
withOptionalAuthV2(async ({ org, prisma }) => {
|
||||
const { repoName, revisionName } = params;
|
||||
const repo = await prisma.repo.findFirst({
|
||||
where: {
|
||||
name: repoName,
|
||||
orgId: org.id,
|
||||
},
|
||||
});
|
||||
|
||||
if (!repo) {
|
||||
return notFound();
|
||||
}
|
||||
if (!repo) {
|
||||
return notFound();
|
||||
}
|
||||
|
||||
const { path: repoPath } = getRepoPath(repo);
|
||||
const { path: repoPath } = getRepoPath(repo);
|
||||
|
||||
const git = simpleGit().cwd(repoPath);
|
||||
const git = simpleGit().cwd(repoPath);
|
||||
|
||||
let result: string;
|
||||
try {
|
||||
result = await git.raw([
|
||||
'ls-tree',
|
||||
revisionName,
|
||||
// recursive
|
||||
'-r',
|
||||
// include trees when recursing
|
||||
'-t',
|
||||
// format as output as {type},{path}
|
||||
'--format=%(objecttype),%(path)',
|
||||
]);
|
||||
} catch (error) {
|
||||
logger.error('git ls-tree failed.', { error });
|
||||
return unexpectedError('git ls-tree command failed.');
|
||||
}
|
||||
let result: string;
|
||||
try {
|
||||
result = await git.raw([
|
||||
'ls-tree',
|
||||
revisionName,
|
||||
// recursive
|
||||
'-r',
|
||||
// include trees when recursing
|
||||
'-t',
|
||||
// format as output as {type},{path}
|
||||
'--format=%(objecttype),%(path)',
|
||||
]);
|
||||
} catch (error) {
|
||||
logger.error('git ls-tree failed.', { error });
|
||||
return unexpectedError('git ls-tree command failed.');
|
||||
}
|
||||
|
||||
const lines = result.split('\n').filter(line => line.trim());
|
||||
|
||||
const flatList = lines.map(line => {
|
||||
const [type, path] = line.split(',');
|
||||
return {
|
||||
type,
|
||||
path,
|
||||
}
|
||||
});
|
||||
|
||||
const tree = buildFileTree(flatList);
|
||||
const lines = result.split('\n').filter(line => line.trim());
|
||||
|
||||
const flatList = lines.map(line => {
|
||||
const [type, path] = line.split(',');
|
||||
return {
|
||||
tree,
|
||||
type,
|
||||
path,
|
||||
}
|
||||
});
|
||||
|
||||
}, /* minRequiredRole = */ OrgRole.GUEST), /* allowAnonymousAccess = */ true)
|
||||
);
|
||||
const tree = buildFileTree(flatList);
|
||||
|
||||
return {
|
||||
tree,
|
||||
}
|
||||
|
||||
}));
|
||||
|
||||
/**
|
||||
* Returns the contents of a folder at a given path in a given repository,
|
||||
* at a given revision.
|
||||
*/
|
||||
export const getFolderContents = async (params: { repoName: string, revisionName: string, path: string }, domain: string) => sew(() =>
|
||||
withAuth((userId) =>
|
||||
withOrgMembership(userId, domain, async ({ org }) => {
|
||||
const { repoName, revisionName, path } = params;
|
||||
const repo = await prisma.repo.findFirst({
|
||||
where: {
|
||||
name: repoName,
|
||||
orgId: org.id,
|
||||
...(env.EXPERIMENT_PERMISSION_SYNC_ENABLED === 'true' ? {
|
||||
permittedUsers: {
|
||||
some: {
|
||||
userId: userId,
|
||||
}
|
||||
}
|
||||
} : {})
|
||||
},
|
||||
});
|
||||
export const getFolderContents = async (params: { repoName: string, revisionName: string, path: string }) => sew(() =>
|
||||
withOptionalAuthV2(async ({ org, prisma }) => {
|
||||
const { repoName, revisionName, path } = params;
|
||||
const repo = await prisma.repo.findFirst({
|
||||
where: {
|
||||
name: repoName,
|
||||
orgId: org.id,
|
||||
},
|
||||
});
|
||||
|
||||
if (!repo) {
|
||||
return notFound();
|
||||
if (!repo) {
|
||||
return notFound();
|
||||
}
|
||||
|
||||
const { path: repoPath } = getRepoPath(repo);
|
||||
|
||||
// @note: we don't allow directory traversal
|
||||
// or null bytes in the path.
|
||||
if (path.includes('..') || path.includes('\0')) {
|
||||
return notFound();
|
||||
}
|
||||
|
||||
// Normalize the path by...
|
||||
let normalizedPath = path;
|
||||
|
||||
// ... adding a trailing slash if it doesn't have one.
|
||||
// This is important since ls-tree won't return the contents
|
||||
// of a directory if it doesn't have a trailing slash.
|
||||
if (!normalizedPath.endsWith('/')) {
|
||||
normalizedPath = `${normalizedPath}/`;
|
||||
}
|
||||
|
||||
// ... removing any leading slashes. This is needed since
|
||||
// the path is relative to the repository's root, so we
|
||||
// need a relative path.
|
||||
if (normalizedPath.startsWith('/')) {
|
||||
normalizedPath = normalizedPath.slice(1);
|
||||
}
|
||||
|
||||
const git = simpleGit().cwd(repoPath);
|
||||
|
||||
let result: string;
|
||||
try {
|
||||
result = await git.raw([
|
||||
'ls-tree',
|
||||
revisionName,
|
||||
// format as output as {type},{path}
|
||||
'--format=%(objecttype),%(path)',
|
||||
...(normalizedPath.length === 0 ? [] : [normalizedPath]),
|
||||
]);
|
||||
} catch (error) {
|
||||
logger.error('git ls-tree failed.', { error });
|
||||
return unexpectedError('git ls-tree command failed.');
|
||||
}
|
||||
|
||||
const lines = result.split('\n').filter(line => line.trim());
|
||||
|
||||
const contents: FileTreeItem[] = lines.map(line => {
|
||||
const [type, path] = line.split(',');
|
||||
const name = path.split('/').pop() ?? '';
|
||||
|
||||
return {
|
||||
type,
|
||||
path,
|
||||
name,
|
||||
}
|
||||
});
|
||||
|
||||
const { path: repoPath } = getRepoPath(repo);
|
||||
return contents;
|
||||
}));
|
||||
|
||||
// @note: we don't allow directory traversal
|
||||
// or null bytes in the path.
|
||||
if (path.includes('..') || path.includes('\0')) {
|
||||
return notFound();
|
||||
export const getFiles = async (params: { repoName: string, revisionName: string }) => sew(() =>
|
||||
withOptionalAuthV2(async ({ org, prisma }) => {
|
||||
const { repoName, revisionName } = params;
|
||||
|
||||
const repo = await prisma.repo.findFirst({
|
||||
where: {
|
||||
name: repoName,
|
||||
orgId: org.id,
|
||||
},
|
||||
});
|
||||
|
||||
if (!repo) {
|
||||
return notFound();
|
||||
}
|
||||
|
||||
const { path: repoPath } = getRepoPath(repo);
|
||||
|
||||
const git = simpleGit().cwd(repoPath);
|
||||
|
||||
let result: string;
|
||||
try {
|
||||
result = await git.raw([
|
||||
'ls-tree',
|
||||
revisionName,
|
||||
// recursive
|
||||
'-r',
|
||||
// only return the names of the files
|
||||
'--name-only',
|
||||
]);
|
||||
} catch (error) {
|
||||
logger.error('git ls-tree failed.', { error });
|
||||
return unexpectedError('git ls-tree command failed.');
|
||||
}
|
||||
|
||||
const paths = result.split('\n').filter(line => line.trim());
|
||||
|
||||
const files: FileTreeItem[] = paths.map(path => {
|
||||
const name = path.split('/').pop() ?? '';
|
||||
return {
|
||||
type: 'blob',
|
||||
path,
|
||||
name,
|
||||
}
|
||||
});
|
||||
|
||||
// Normalize the path by...
|
||||
let normalizedPath = path;
|
||||
return files;
|
||||
|
||||
// ... adding a trailing slash if it doesn't have one.
|
||||
// This is important since ls-tree won't return the contents
|
||||
// of a directory if it doesn't have a trailing slash.
|
||||
if (!normalizedPath.endsWith('/')) {
|
||||
normalizedPath = `${normalizedPath}/`;
|
||||
}
|
||||
|
||||
// ... removing any leading slashes. This is needed since
|
||||
// the path is relative to the repository's root, so we
|
||||
// need a relative path.
|
||||
if (normalizedPath.startsWith('/')) {
|
||||
normalizedPath = normalizedPath.slice(1);
|
||||
}
|
||||
|
||||
const git = simpleGit().cwd(repoPath);
|
||||
|
||||
let result: string;
|
||||
try {
|
||||
result = await git.raw([
|
||||
'ls-tree',
|
||||
revisionName,
|
||||
// format as output as {type},{path}
|
||||
'--format=%(objecttype),%(path)',
|
||||
...(normalizedPath.length === 0 ? [] : [normalizedPath]),
|
||||
]);
|
||||
} catch (error) {
|
||||
logger.error('git ls-tree failed.', { error });
|
||||
return unexpectedError('git ls-tree command failed.');
|
||||
}
|
||||
|
||||
const lines = result.split('\n').filter(line => line.trim());
|
||||
|
||||
const contents: FileTreeItem[] = lines.map(line => {
|
||||
const [type, path] = line.split(',');
|
||||
const name = path.split('/').pop() ?? '';
|
||||
|
||||
return {
|
||||
type,
|
||||
path,
|
||||
name,
|
||||
}
|
||||
});
|
||||
|
||||
return contents;
|
||||
}, /* minRequiredRole = */ OrgRole.GUEST), /* allowAnonymousAccess = */ true)
|
||||
);
|
||||
|
||||
export const getFiles = async (params: { repoName: string, revisionName: string }, domain: string) => sew(() =>
|
||||
withAuth((userId) =>
|
||||
withOrgMembership(userId, domain, async ({ org }) => {
|
||||
const { repoName, revisionName } = params;
|
||||
|
||||
const repo = await prisma.repo.findFirst({
|
||||
where: {
|
||||
name: repoName,
|
||||
orgId: org.id,
|
||||
...(env.EXPERIMENT_PERMISSION_SYNC_ENABLED === 'true' ? {
|
||||
permittedUsers: {
|
||||
some: {
|
||||
userId: userId,
|
||||
}
|
||||
}
|
||||
} : {})
|
||||
},
|
||||
});
|
||||
|
||||
if (!repo) {
|
||||
return notFound();
|
||||
}
|
||||
|
||||
const { path: repoPath } = getRepoPath(repo);
|
||||
|
||||
const git = simpleGit().cwd(repoPath);
|
||||
|
||||
let result: string;
|
||||
try {
|
||||
result = await git.raw([
|
||||
'ls-tree',
|
||||
revisionName,
|
||||
// recursive
|
||||
'-r',
|
||||
// only return the names of the files
|
||||
'--name-only',
|
||||
]);
|
||||
} catch (error) {
|
||||
logger.error('git ls-tree failed.', { error });
|
||||
return unexpectedError('git ls-tree command failed.');
|
||||
}
|
||||
|
||||
const paths = result.split('\n').filter(line => line.trim());
|
||||
|
||||
const files: FileTreeItem[] = paths.map(path => {
|
||||
const name = path.split('/').pop() ?? '';
|
||||
return {
|
||||
type: 'blob',
|
||||
path,
|
||||
name,
|
||||
}
|
||||
});
|
||||
|
||||
return files;
|
||||
|
||||
}, /* minRequiredRole = */ OrgRole.GUEST), /* allowAnonymousAccess = */ true)
|
||||
);
|
||||
}));
|
||||
|
||||
const buildFileTree = (flatList: { type: string, path: string }[]): FileTreeNode => {
|
||||
const root: FileTreeNode = {
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@
|
|||
import { getTree } from "../actions";
|
||||
import { useQuery } from "@tanstack/react-query";
|
||||
import { unwrapServiceError } from "@/lib/utils";
|
||||
import { useDomain } from "@/hooks/useDomain";
|
||||
import { ResizablePanel } from "@/components/ui/resizable";
|
||||
import { Skeleton } from "@/components/ui/skeleton";
|
||||
import { useBrowseState } from "@/app/[domain]/browse/hooks/useBrowseState";
|
||||
|
|
@ -41,17 +40,16 @@ export const FileTreePanel = ({ order }: FileTreePanelProps) => {
|
|||
updateBrowseState,
|
||||
} = useBrowseState();
|
||||
|
||||
const domain = useDomain();
|
||||
const { repoName, revisionName, path } = useBrowseParams();
|
||||
|
||||
const fileTreePanelRef = useRef<ImperativePanelHandle>(null);
|
||||
const { data, isPending, isError } = useQuery({
|
||||
queryKey: ['tree', repoName, revisionName, domain],
|
||||
queryKey: ['tree', repoName, revisionName],
|
||||
queryFn: () => unwrapServiceError(
|
||||
getTree({
|
||||
repoName,
|
||||
revisionName: revisionName ?? 'HEAD',
|
||||
}, domain)
|
||||
})
|
||||
),
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -4,14 +4,14 @@ import { env } from "@/env.mjs";
|
|||
import { invalidZoektResponse, ServiceError } from "../../lib/serviceError";
|
||||
import { isServiceError } from "../../lib/utils";
|
||||
import { zoektFetch } from "./zoektClient";
|
||||
import { prisma } from "@/prisma";
|
||||
import { ErrorCode } from "../../lib/errorCodes";
|
||||
import { StatusCodes } from "http-status-codes";
|
||||
import { zoektSearchResponseSchema } from "./zoektSchema";
|
||||
import { SearchRequest, SearchResponse, SourceRange } from "./types";
|
||||
import { OrgRole, Repo } from "@sourcebot/db";
|
||||
import { sew, withAuth, withOrgMembership } from "@/actions";
|
||||
import { PrismaClient, Repo } from "@sourcebot/db";
|
||||
import { sew } from "@/actions";
|
||||
import { base64Decode } from "@sourcebot/shared";
|
||||
import { withOptionalAuthV2 } from "@/withAuthV2";
|
||||
|
||||
// List of supported query prefixes in zoekt.
|
||||
// @see : https://github.com/sourcebot-dev/zoekt/blob/main/query/parse.go#L417
|
||||
|
|
@ -36,7 +36,7 @@ enum zoektPrefixes {
|
|||
reposet = "reposet:",
|
||||
}
|
||||
|
||||
const transformZoektQuery = async (query: string, orgId: number): Promise<string | ServiceError> => {
|
||||
const transformZoektQuery = async (query: string, orgId: number, prisma: PrismaClient): Promise<string | ServiceError> => {
|
||||
const prevQueryParts = query.split(" ");
|
||||
const newQueryParts = [];
|
||||
|
||||
|
|
@ -127,235 +127,219 @@ const getFileWebUrl = (template: string, branch: string, fileName: string): stri
|
|||
return encodeURI(url + optionalQueryParams);
|
||||
}
|
||||
|
||||
export const search = async ({ query, matches, contextLines, whole }: SearchRequest, domain: string, apiKey: string | undefined = undefined) => sew(() =>
|
||||
withAuth((userId, _apiKeyHash) =>
|
||||
withOrgMembership(userId, domain, async ({ org }) => {
|
||||
const transformedQuery = await transformZoektQuery(query, org.id);
|
||||
if (isServiceError(transformedQuery)) {
|
||||
return transformedQuery;
|
||||
export const search = async ({ query, matches, contextLines, whole }: SearchRequest) => sew(() =>
|
||||
withOptionalAuthV2(async ({ org, prisma }) => {
|
||||
const transformedQuery = await transformZoektQuery(query, org.id, prisma);
|
||||
if (isServiceError(transformedQuery)) {
|
||||
return transformedQuery;
|
||||
}
|
||||
query = transformedQuery;
|
||||
|
||||
const isBranchFilteringEnabled = (
|
||||
query.includes(zoektPrefixes.branch) ||
|
||||
query.includes(zoektPrefixes.branchShort)
|
||||
);
|
||||
|
||||
// We only want to show matches for the default branch when
|
||||
// the user isn't explicitly filtering by branch.
|
||||
if (!isBranchFilteringEnabled) {
|
||||
query = query.concat(` branch:HEAD`);
|
||||
}
|
||||
|
||||
const body = JSON.stringify({
|
||||
q: query,
|
||||
// @see: https://github.com/sourcebot-dev/zoekt/blob/main/api.go#L892
|
||||
opts: {
|
||||
ChunkMatches: true,
|
||||
MaxMatchDisplayCount: matches,
|
||||
NumContextLines: contextLines,
|
||||
Whole: !!whole,
|
||||
TotalMaxMatchCount: env.TOTAL_MAX_MATCH_COUNT,
|
||||
ShardMaxMatchCount: env.SHARD_MAX_MATCH_COUNT,
|
||||
MaxWallTime: env.ZOEKT_MAX_WALL_TIME_MS * 1000 * 1000, // zoekt expects a duration in nanoseconds
|
||||
}
|
||||
query = transformedQuery;
|
||||
});
|
||||
|
||||
const isBranchFilteringEnabled = (
|
||||
query.includes(zoektPrefixes.branch) ||
|
||||
query.includes(zoektPrefixes.branchShort)
|
||||
);
|
||||
let header: Record<string, string> = {};
|
||||
header = {
|
||||
"X-Tenant-ID": org.id.toString()
|
||||
};
|
||||
|
||||
// We only want to show matches for the default branch when
|
||||
// the user isn't explicitly filtering by branch.
|
||||
if (!isBranchFilteringEnabled) {
|
||||
query = query.concat(` branch:HEAD`);
|
||||
}
|
||||
const searchResponse = await zoektFetch({
|
||||
path: "/api/search",
|
||||
body,
|
||||
header,
|
||||
method: "POST",
|
||||
});
|
||||
|
||||
const body = JSON.stringify({
|
||||
q: query,
|
||||
// @see: https://github.com/sourcebot-dev/zoekt/blob/main/api.go#L892
|
||||
opts: {
|
||||
ChunkMatches: true,
|
||||
MaxMatchDisplayCount: matches,
|
||||
NumContextLines: contextLines,
|
||||
Whole: !!whole,
|
||||
TotalMaxMatchCount: env.TOTAL_MAX_MATCH_COUNT,
|
||||
ShardMaxMatchCount: env.SHARD_MAX_MATCH_COUNT,
|
||||
MaxWallTime: env.ZOEKT_MAX_WALL_TIME_MS * 1000 * 1000, // zoekt expects a duration in nanoseconds
|
||||
if (!searchResponse.ok) {
|
||||
return invalidZoektResponse(searchResponse);
|
||||
}
|
||||
|
||||
const searchBody = await searchResponse.json();
|
||||
|
||||
const parser = zoektSearchResponseSchema.transform(async ({ Result }) => {
|
||||
// @note (2025-05-12): in zoekt, repositories are identified by the `RepositoryID` field
|
||||
// which corresponds to the `id` in the Repo table. In order to efficiently fetch repository
|
||||
// metadata when transforming (potentially thousands) of file matches, we aggregate a unique
|
||||
// set of repository ids* and map them to their corresponding Repo record.
|
||||
//
|
||||
// *Q: Why is `RepositoryID` optional? And why are we falling back to `Repository`?
|
||||
// A: Prior to this change, the repository id was not plumbed into zoekt, so RepositoryID was
|
||||
// always undefined. To make this a non-breaking change, we fallback to using the repository's name
|
||||
// (`Repository`) as the identifier in these cases. This is not guaranteed to be unique, but in
|
||||
// practice it is since the repository name includes the host and path (e.g., 'github.com/org/repo',
|
||||
// 'gitea.com/org/repo', etc.).
|
||||
//
|
||||
// Note: When a repository is re-indexed (every hour) this ID will be populated.
|
||||
// @see: https://github.com/sourcebot-dev/zoekt/pull/6
|
||||
const repoIdentifiers = new Set(Result.Files?.map((file) => file.RepositoryID ?? file.Repository) ?? []);
|
||||
const repos = new Map<string | number, Repo>();
|
||||
|
||||
(await prisma.repo.findMany({
|
||||
where: {
|
||||
id: {
|
||||
in: Array.from(repoIdentifiers).filter((id) => typeof id === "number"),
|
||||
},
|
||||
orgId: org.id,
|
||||
}
|
||||
});
|
||||
})).forEach(repo => repos.set(repo.id, repo));
|
||||
|
||||
let header: Record<string, string> = {};
|
||||
header = {
|
||||
"X-Tenant-ID": org.id.toString()
|
||||
};
|
||||
(await prisma.repo.findMany({
|
||||
where: {
|
||||
name: {
|
||||
in: Array.from(repoIdentifiers).filter((id) => typeof id === "string"),
|
||||
},
|
||||
orgId: org.id,
|
||||
}
|
||||
})).forEach(repo => repos.set(repo.name, repo));
|
||||
|
||||
const searchResponse = await zoektFetch({
|
||||
path: "/api/search",
|
||||
body,
|
||||
header,
|
||||
method: "POST",
|
||||
});
|
||||
const files = Result.Files?.map((file) => {
|
||||
const fileNameChunks = file.ChunkMatches.filter((chunk) => chunk.FileName);
|
||||
|
||||
if (!searchResponse.ok) {
|
||||
return invalidZoektResponse(searchResponse);
|
||||
}
|
||||
|
||||
const searchBody = await searchResponse.json();
|
||||
|
||||
const parser = zoektSearchResponseSchema.transform(async ({ Result }) => {
|
||||
// @note (2025-05-12): in zoekt, repositories are identified by the `RepositoryID` field
|
||||
// which corresponds to the `id` in the Repo table. In order to efficiently fetch repository
|
||||
// metadata when transforming (potentially thousands) of file matches, we aggregate a unique
|
||||
// set of repository ids* and map them to their corresponding Repo record.
|
||||
//
|
||||
// *Q: Why is `RepositoryID` optional? And why are we falling back to `Repository`?
|
||||
// A: Prior to this change, the repository id was not plumbed into zoekt, so RepositoryID was
|
||||
// always undefined. To make this a non-breaking change, we fallback to using the repository's name
|
||||
// (`Repository`) as the identifier in these cases. This is not guaranteed to be unique, but in
|
||||
// practice it is since the repository name includes the host and path (e.g., 'github.com/org/repo',
|
||||
// 'gitea.com/org/repo', etc.).
|
||||
//
|
||||
// Note: When a repository is re-indexed (every hour) this ID will be populated.
|
||||
// @see: https://github.com/sourcebot-dev/zoekt/pull/6
|
||||
const repoIdentifiers = new Set(Result.Files?.map((file) => file.RepositoryID ?? file.Repository) ?? []);
|
||||
const repos = new Map<string | number, Repo>();
|
||||
|
||||
(await prisma.repo.findMany({
|
||||
where: {
|
||||
id: {
|
||||
in: Array.from(repoIdentifiers).filter((id) => typeof id === "number"),
|
||||
},
|
||||
orgId: org.id,
|
||||
...(env.EXPERIMENT_PERMISSION_SYNC_ENABLED === 'true' ? {
|
||||
permittedUsers: {
|
||||
some: {
|
||||
userId,
|
||||
}
|
||||
}
|
||||
} : {})
|
||||
}
|
||||
})).forEach(repo => repos.set(repo.id, repo));
|
||||
|
||||
(await prisma.repo.findMany({
|
||||
where: {
|
||||
name: {
|
||||
in: Array.from(repoIdentifiers).filter((id) => typeof id === "string"),
|
||||
},
|
||||
orgId: org.id,
|
||||
...(env.EXPERIMENT_PERMISSION_SYNC_ENABLED === 'true' ? {
|
||||
permittedUsers: {
|
||||
some: {
|
||||
userId,
|
||||
}
|
||||
}
|
||||
} : {})
|
||||
}
|
||||
})).forEach(repo => repos.set(repo.name, repo));
|
||||
|
||||
const files = Result.Files?.map((file) => {
|
||||
const fileNameChunks = file.ChunkMatches.filter((chunk) => chunk.FileName);
|
||||
|
||||
const webUrl = (() => {
|
||||
const template: string | undefined = Result.RepoURLs[file.Repository];
|
||||
if (!template) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// If there are multiple branches pointing to the same revision of this file, it doesn't
|
||||
// matter which branch we use here, so use the first one.
|
||||
const branch = file.Branches && file.Branches.length > 0 ? file.Branches[0] : "HEAD";
|
||||
return getFileWebUrl(template, branch, file.FileName);
|
||||
})();
|
||||
|
||||
const identifier = file.RepositoryID ?? file.Repository;
|
||||
const repo = repos.get(identifier);
|
||||
|
||||
// This can happen if the user doesn't have access to the repository.
|
||||
if (!repo) {
|
||||
const webUrl = (() => {
|
||||
const template: string | undefined = Result.RepoURLs[file.Repository];
|
||||
if (!template) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return {
|
||||
fileName: {
|
||||
text: file.FileName,
|
||||
matchRanges: fileNameChunks.length === 1 ? fileNameChunks[0].Ranges.map((range) => ({
|
||||
start: {
|
||||
byteOffset: range.Start.ByteOffset,
|
||||
column: range.Start.Column,
|
||||
lineNumber: range.Start.LineNumber,
|
||||
},
|
||||
end: {
|
||||
byteOffset: range.End.ByteOffset,
|
||||
column: range.End.Column,
|
||||
lineNumber: range.End.LineNumber,
|
||||
}
|
||||
})) : [],
|
||||
},
|
||||
repository: repo.name,
|
||||
repositoryId: repo.id,
|
||||
webUrl: webUrl,
|
||||
language: file.Language,
|
||||
chunks: file.ChunkMatches
|
||||
.filter((chunk) => !chunk.FileName) // Filter out filename chunks.
|
||||
.map((chunk) => {
|
||||
return {
|
||||
content: base64Decode(chunk.Content),
|
||||
matchRanges: chunk.Ranges.map((range) => ({
|
||||
start: {
|
||||
byteOffset: range.Start.ByteOffset,
|
||||
column: range.Start.Column,
|
||||
lineNumber: range.Start.LineNumber,
|
||||
},
|
||||
end: {
|
||||
byteOffset: range.End.ByteOffset,
|
||||
column: range.End.Column,
|
||||
lineNumber: range.End.LineNumber,
|
||||
}
|
||||
}) satisfies SourceRange),
|
||||
contentStart: {
|
||||
byteOffset: chunk.ContentStart.ByteOffset,
|
||||
column: chunk.ContentStart.Column,
|
||||
lineNumber: chunk.ContentStart.LineNumber,
|
||||
},
|
||||
symbols: chunk.SymbolInfo?.map((symbol) => {
|
||||
return {
|
||||
symbol: symbol.Sym,
|
||||
kind: symbol.Kind,
|
||||
parent: symbol.Parent.length > 0 ? {
|
||||
symbol: symbol.Parent,
|
||||
kind: symbol.ParentKind,
|
||||
} : undefined,
|
||||
}
|
||||
}) ?? undefined,
|
||||
}
|
||||
}),
|
||||
branches: file.Branches,
|
||||
content: file.Content ? base64Decode(file.Content) : undefined,
|
||||
}
|
||||
}).filter((file) => file !== undefined) ?? [];
|
||||
// If there are multiple branches pointing to the same revision of this file, it doesn't
|
||||
// matter which branch we use here, so use the first one.
|
||||
const branch = file.Branches && file.Branches.length > 0 ? file.Branches[0] : "HEAD";
|
||||
return getFileWebUrl(template, branch, file.FileName);
|
||||
})();
|
||||
|
||||
const identifier = file.RepositoryID ?? file.Repository;
|
||||
const repo = repos.get(identifier);
|
||||
|
||||
// This can happen if the user doesn't have access to the repository.
|
||||
if (!repo) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return {
|
||||
zoektStats: {
|
||||
duration: Result.Duration,
|
||||
fileCount: Result.FileCount,
|
||||
matchCount: Result.MatchCount,
|
||||
filesSkipped: Result.FilesSkipped,
|
||||
contentBytesLoaded: Result.ContentBytesLoaded,
|
||||
indexBytesLoaded: Result.IndexBytesLoaded,
|
||||
crashes: Result.Crashes,
|
||||
shardFilesConsidered: Result.ShardFilesConsidered,
|
||||
filesConsidered: Result.FilesConsidered,
|
||||
filesLoaded: Result.FilesLoaded,
|
||||
shardsScanned: Result.ShardsScanned,
|
||||
shardsSkipped: Result.ShardsSkipped,
|
||||
shardsSkippedFilter: Result.ShardsSkippedFilter,
|
||||
ngramMatches: Result.NgramMatches,
|
||||
ngramLookups: Result.NgramLookups,
|
||||
wait: Result.Wait,
|
||||
matchTreeConstruction: Result.MatchTreeConstruction,
|
||||
matchTreeSearch: Result.MatchTreeSearch,
|
||||
regexpsConsidered: Result.RegexpsConsidered,
|
||||
flushReason: Result.FlushReason,
|
||||
fileName: {
|
||||
text: file.FileName,
|
||||
matchRanges: fileNameChunks.length === 1 ? fileNameChunks[0].Ranges.map((range) => ({
|
||||
start: {
|
||||
byteOffset: range.Start.ByteOffset,
|
||||
column: range.Start.Column,
|
||||
lineNumber: range.Start.LineNumber,
|
||||
},
|
||||
end: {
|
||||
byteOffset: range.End.ByteOffset,
|
||||
column: range.End.Column,
|
||||
lineNumber: range.End.LineNumber,
|
||||
}
|
||||
})) : [],
|
||||
},
|
||||
files,
|
||||
repositoryInfo: Array.from(repos.values()).map((repo) => ({
|
||||
id: repo.id,
|
||||
codeHostType: repo.external_codeHostType,
|
||||
name: repo.name,
|
||||
displayName: repo.displayName ?? undefined,
|
||||
webUrl: repo.webUrl ?? undefined,
|
||||
})),
|
||||
isBranchFilteringEnabled: isBranchFilteringEnabled,
|
||||
stats: {
|
||||
matchCount: files.reduce(
|
||||
(acc, file) =>
|
||||
acc + file.chunks.reduce(
|
||||
(acc, chunk) => acc + chunk.matchRanges.length,
|
||||
0,
|
||||
),
|
||||
0,
|
||||
)
|
||||
}
|
||||
} satisfies SearchResponse;
|
||||
});
|
||||
repository: repo.name,
|
||||
repositoryId: repo.id,
|
||||
webUrl: webUrl,
|
||||
language: file.Language,
|
||||
chunks: file.ChunkMatches
|
||||
.filter((chunk) => !chunk.FileName) // Filter out filename chunks.
|
||||
.map((chunk) => {
|
||||
return {
|
||||
content: base64Decode(chunk.Content),
|
||||
matchRanges: chunk.Ranges.map((range) => ({
|
||||
start: {
|
||||
byteOffset: range.Start.ByteOffset,
|
||||
column: range.Start.Column,
|
||||
lineNumber: range.Start.LineNumber,
|
||||
},
|
||||
end: {
|
||||
byteOffset: range.End.ByteOffset,
|
||||
column: range.End.Column,
|
||||
lineNumber: range.End.LineNumber,
|
||||
}
|
||||
}) satisfies SourceRange),
|
||||
contentStart: {
|
||||
byteOffset: chunk.ContentStart.ByteOffset,
|
||||
column: chunk.ContentStart.Column,
|
||||
lineNumber: chunk.ContentStart.LineNumber,
|
||||
},
|
||||
symbols: chunk.SymbolInfo?.map((symbol) => {
|
||||
return {
|
||||
symbol: symbol.Sym,
|
||||
kind: symbol.Kind,
|
||||
parent: symbol.Parent.length > 0 ? {
|
||||
symbol: symbol.Parent,
|
||||
kind: symbol.ParentKind,
|
||||
} : undefined,
|
||||
}
|
||||
}) ?? undefined,
|
||||
}
|
||||
}),
|
||||
branches: file.Branches,
|
||||
content: file.Content ? base64Decode(file.Content) : undefined,
|
||||
}
|
||||
}).filter((file) => file !== undefined) ?? [];
|
||||
|
||||
return parser.parseAsync(searchBody);
|
||||
}, /* minRequiredRole = */ OrgRole.GUEST), /* allowAnonymousAccess = */ true, apiKey ? { apiKey, domain } : undefined)
|
||||
);
|
||||
return {
|
||||
zoektStats: {
|
||||
duration: Result.Duration,
|
||||
fileCount: Result.FileCount,
|
||||
matchCount: Result.MatchCount,
|
||||
filesSkipped: Result.FilesSkipped,
|
||||
contentBytesLoaded: Result.ContentBytesLoaded,
|
||||
indexBytesLoaded: Result.IndexBytesLoaded,
|
||||
crashes: Result.Crashes,
|
||||
shardFilesConsidered: Result.ShardFilesConsidered,
|
||||
filesConsidered: Result.FilesConsidered,
|
||||
filesLoaded: Result.FilesLoaded,
|
||||
shardsScanned: Result.ShardsScanned,
|
||||
shardsSkipped: Result.ShardsSkipped,
|
||||
shardsSkippedFilter: Result.ShardsSkippedFilter,
|
||||
ngramMatches: Result.NgramMatches,
|
||||
ngramLookups: Result.NgramLookups,
|
||||
wait: Result.Wait,
|
||||
matchTreeConstruction: Result.MatchTreeConstruction,
|
||||
matchTreeSearch: Result.MatchTreeSearch,
|
||||
regexpsConsidered: Result.RegexpsConsidered,
|
||||
flushReason: Result.FlushReason,
|
||||
},
|
||||
files,
|
||||
repositoryInfo: Array.from(repos.values()).map((repo) => ({
|
||||
id: repo.id,
|
||||
codeHostType: repo.external_codeHostType,
|
||||
name: repo.name,
|
||||
displayName: repo.displayName ?? undefined,
|
||||
webUrl: repo.webUrl ?? undefined,
|
||||
})),
|
||||
isBranchFilteringEnabled: isBranchFilteringEnabled,
|
||||
stats: {
|
||||
matchCount: files.reduce(
|
||||
(acc, file) =>
|
||||
acc + file.chunks.reduce(
|
||||
(acc, chunk) => acc + chunk.matchRanges.length,
|
||||
0,
|
||||
),
|
||||
0,
|
||||
)
|
||||
}
|
||||
} satisfies SearchResponse;
|
||||
});
|
||||
|
||||
return parser.parseAsync(searchBody);
|
||||
}));
|
||||
|
|
|
|||
|
|
@ -1,7 +1,48 @@
|
|||
import 'server-only';
|
||||
import { PrismaClient } from "@sourcebot/db";
|
||||
import { env } from "@/env.mjs";
|
||||
import { Prisma, PrismaClient } from "@sourcebot/db";
|
||||
|
||||
// @see: https://authjs.dev/getting-started/adapters/prisma
|
||||
const globalForPrisma = globalThis as unknown as { prisma: PrismaClient }
|
||||
|
||||
// @NOTE: In almost all cases, the userScopedPrismaClientExtension should be used
|
||||
// (since actions & queries are scoped to a particular user). There are some exceptions
|
||||
// (e.g., in initialize.ts).
|
||||
//
|
||||
// @todo: we can mark this as `__unsafePrisma` in the future once we've migrated
|
||||
// all of the actions & queries to use the userScopedPrismaClientExtension to avoid
|
||||
// accidental misuse.
|
||||
export const prisma = globalForPrisma.prisma || new PrismaClient()
|
||||
if (process.env.NODE_ENV !== "production") globalForPrisma.prisma = prisma
|
||||
if (env.NODE_ENV !== "production") globalForPrisma.prisma = prisma
|
||||
|
||||
/**
|
||||
* Creates a prisma client extension that scopes queries to striclty information
|
||||
* a given user should be able to access.
|
||||
*/
|
||||
export const userScopedPrismaClientExtension = (userId?: string) => {
|
||||
return Prisma.defineExtension(
|
||||
(prisma) => {
|
||||
return prisma.$extends({
|
||||
query: {
|
||||
...(env.EXPERIMENT_PERMISSION_SYNC_ENABLED === 'true' ? {
|
||||
repo: {
|
||||
$allOperations({ args, query }) {
|
||||
if ('where' in args) {
|
||||
args.where = {
|
||||
...args.where,
|
||||
permittedUsers: {
|
||||
some: {
|
||||
userId,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return query(args);
|
||||
}
|
||||
}
|
||||
} : {})
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
import { prisma } from "@/prisma";
|
||||
import { prisma as __unsafePrisma, userScopedPrismaClientExtension } from "@/prisma";
|
||||
import { hashSecret } from "@sourcebot/crypto";
|
||||
import { ApiKey, Org, OrgRole, User } from "@sourcebot/db";
|
||||
import { ApiKey, Org, OrgRole, PrismaClient, User } from "@sourcebot/db";
|
||||
import { headers } from "next/headers";
|
||||
import { auth } from "./auth";
|
||||
import { notAuthenticated, notFound, ServiceError } from "./lib/serviceError";
|
||||
|
|
@ -14,12 +14,14 @@ interface OptionalAuthContext {
|
|||
user?: User;
|
||||
org: Org;
|
||||
role: OrgRole;
|
||||
prisma: PrismaClient;
|
||||
}
|
||||
|
||||
interface RequiredAuthContext {
|
||||
user: User;
|
||||
org: Org;
|
||||
role: Omit<OrgRole, 'GUEST'>;
|
||||
prisma: PrismaClient;
|
||||
}
|
||||
|
||||
export const withAuthV2 = async <T>(fn: (params: RequiredAuthContext) => Promise<T>) => {
|
||||
|
|
@ -29,13 +31,13 @@ export const withAuthV2 = async <T>(fn: (params: RequiredAuthContext) => Promise
|
|||
return authContext;
|
||||
}
|
||||
|
||||
const { user, org, role } = authContext;
|
||||
const { user, org, role, prisma } = authContext;
|
||||
|
||||
if (!user || role === OrgRole.GUEST) {
|
||||
return notAuthenticated();
|
||||
}
|
||||
|
||||
return fn({ user, org, role });
|
||||
return fn({ user, org, role, prisma });
|
||||
};
|
||||
|
||||
export const withOptionalAuthV2 = async <T>(fn: (params: OptionalAuthContext) => Promise<T>) => {
|
||||
|
|
@ -44,7 +46,7 @@ export const withOptionalAuthV2 = async <T>(fn: (params: OptionalAuthContext) =>
|
|||
return authContext;
|
||||
}
|
||||
|
||||
const { user, org, role } = authContext;
|
||||
const { user, org, role, prisma } = authContext;
|
||||
|
||||
const hasAnonymousAccessEntitlement = hasEntitlement("anonymous-access");
|
||||
const orgMetadata = getOrgMetadata(org);
|
||||
|
|
@ -61,13 +63,13 @@ export const withOptionalAuthV2 = async <T>(fn: (params: OptionalAuthContext) =>
|
|||
return notAuthenticated();
|
||||
}
|
||||
|
||||
return fn({ user, org, role });
|
||||
return fn({ user, org, role, prisma });
|
||||
};
|
||||
|
||||
export const getAuthContext = async (): Promise<OptionalAuthContext | ServiceError> => {
|
||||
const user = await getAuthenticatedUser();
|
||||
|
||||
const org = await prisma.org.findUnique({
|
||||
const org = await __unsafePrisma.org.findUnique({
|
||||
where: {
|
||||
id: SINGLE_TENANT_ORG_ID,
|
||||
}
|
||||
|
|
@ -77,7 +79,7 @@ export const getAuthContext = async (): Promise<OptionalAuthContext | ServiceErr
|
|||
return notFound("Organization not found");
|
||||
}
|
||||
|
||||
const membership = user ? await prisma.userToOrg.findUnique({
|
||||
const membership = user ? await __unsafePrisma.userToOrg.findUnique({
|
||||
where: {
|
||||
orgId_userId: {
|
||||
orgId: org.id,
|
||||
|
|
@ -86,10 +88,13 @@ export const getAuthContext = async (): Promise<OptionalAuthContext | ServiceErr
|
|||
},
|
||||
}) : null;
|
||||
|
||||
const prisma = __unsafePrisma.$extends(userScopedPrismaClientExtension(user?.id)) as PrismaClient;
|
||||
|
||||
return {
|
||||
user: user ?? undefined,
|
||||
org,
|
||||
role: membership?.role ?? OrgRole.GUEST,
|
||||
prisma,
|
||||
};
|
||||
};
|
||||
|
||||
|
|
@ -98,7 +103,7 @@ export const getAuthenticatedUser = async () => {
|
|||
const session = await auth();
|
||||
if (session) {
|
||||
const userId = session.user.id;
|
||||
const user = await prisma.user.findUnique({
|
||||
const user = await __unsafePrisma.user.findUnique({
|
||||
where: {
|
||||
id: userId,
|
||||
}
|
||||
|
|
@ -116,7 +121,7 @@ export const getAuthenticatedUser = async () => {
|
|||
}
|
||||
|
||||
// Attempt to find the user associated with this api key.
|
||||
const user = await prisma.user.findUnique({
|
||||
const user = await __unsafePrisma.user.findUnique({
|
||||
where: {
|
||||
id: apiKey.createdById,
|
||||
},
|
||||
|
|
@ -127,7 +132,7 @@ export const getAuthenticatedUser = async () => {
|
|||
}
|
||||
|
||||
// Update the last used at timestamp for this api key.
|
||||
await prisma.apiKey.update({
|
||||
await __unsafePrisma.apiKey.update({
|
||||
where: {
|
||||
hash: apiKey.hash,
|
||||
},
|
||||
|
|
@ -152,7 +157,7 @@ const getVerifiedApiObject = async (apiKeyString: string): Promise<ApiKey | unde
|
|||
}
|
||||
|
||||
const hash = hashSecret(parts[1]);
|
||||
const apiKey = await prisma.apiKey.findUnique({
|
||||
const apiKey = await __unsafePrisma.apiKey.findUnique({
|
||||
where: {
|
||||
hash,
|
||||
},
|
||||
|
|
|
|||
Loading…
Reference in a new issue