mirror of
https://github.com/sourcebot-dev/sourcebot.git
synced 2025-12-11 20:05:25 +00:00
Multi branch / tag support (#58)
This commit is contained in:
parent
5992ac5f88
commit
ada53fc2c6
30 changed files with 546 additions and 49 deletions
BIN
.github/images/revisions_filter_dark.png
vendored
Normal file
BIN
.github/images/revisions_filter_dark.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 655 KiB |
BIN
.github/images/revisions_filter_light.png
vendored
Normal file
BIN
.github/images/revisions_filter_light.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 878 KiB |
|
|
@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||||
|
|
||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- Added support for indexing and searching repositories across multiple revisions (tag or branch). ([#58](https://github.com/sourcebot-dev/sourcebot/pull/58))
|
||||||
|
|
||||||
## [2.3.0] - 2024-11-01
|
## [2.3.0] - 2024-11-01
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
|
||||||
40
README.md
40
README.md
|
|
@ -267,6 +267,46 @@ docker run -e <b>GITEA_TOKEN=my-secret-token</b> /* additional args */ ghcr.io/s
|
||||||
|
|
||||||
If you're using a self-hosted GitLab or GitHub instance with a custom domain, you can specify the domain in your config file. See [configs/self-hosted.json](configs/self-hosted.json) for examples.
|
If you're using a self-hosted GitLab or GitHub instance with a custom domain, you can specify the domain in your config file. See [configs/self-hosted.json](configs/self-hosted.json) for examples.
|
||||||
|
|
||||||
|
## Searching multiple branches
|
||||||
|
|
||||||
|
By default, Sourcebot will index the default branch. To configure Sourcebot to index multiple branches (or tags), the `revisions` field can be used:
|
||||||
|
|
||||||
|
```jsonc
|
||||||
|
{
|
||||||
|
"$schema": "https://raw.githubusercontent.com/sourcebot-dev/sourcebot/main/schemas/v2/index.json",
|
||||||
|
"repos": [
|
||||||
|
{
|
||||||
|
"type": "github",
|
||||||
|
"revisions": {
|
||||||
|
// Index the `main` branch and any branches matching the `releases/*` glob pattern.
|
||||||
|
"branches": [
|
||||||
|
"main",
|
||||||
|
"releases/*"
|
||||||
|
],
|
||||||
|
// Index the `latest` tag and any tags matching the `v*.*.*` glob pattern.
|
||||||
|
"tags": [
|
||||||
|
"latest",
|
||||||
|
"v*.*.*"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"repos": [
|
||||||
|
"my_org/repo_a",
|
||||||
|
"my_org/repo_b"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
For each repository (in this case, `repo_a` and `repo_b`), Sourcebot will index all branches and tags matching the `branches` and `tags` patterns provided. Any branches or tags that don't match the patterns will be ignored and not indexed.
|
||||||
|
|
||||||
|
To search on a specific revision, use the `revision` filter in the search bar:
|
||||||
|
|
||||||
|
<picture>
|
||||||
|
<source media="(prefers-color-scheme: dark)" srcset=".github/images/revisions_filter_dark.png">
|
||||||
|
<img style="max-width:700px;width:100%" src=".github/images/revisions_filter_light.png">
|
||||||
|
</picture>
|
||||||
|
|
||||||
## Searching a local directory
|
## Searching a local directory
|
||||||
|
|
||||||
Local directories can be searched by using the `local` type in your config file:
|
Local directories can be searched by using the `local` type in your config file:
|
||||||
|
|
|
||||||
26
configs/multi-branch.json
Normal file
26
configs/multi-branch.json
Normal file
|
|
@ -0,0 +1,26 @@
|
||||||
|
{
|
||||||
|
"$schema": "../schemas/v2/index.json",
|
||||||
|
"repos": [
|
||||||
|
{
|
||||||
|
"type": "github",
|
||||||
|
"revisions": {
|
||||||
|
// Specify branches to index...
|
||||||
|
"branches": [
|
||||||
|
"main",
|
||||||
|
"release/*"
|
||||||
|
],
|
||||||
|
// ... or specify tags
|
||||||
|
"tags": [
|
||||||
|
"v*.*.*"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
// For each repo (repoa, repob), Sourcebot will index all branches and tags in the repo
|
||||||
|
// matching the `branches` and `tags` patterns above. Any branches or tags that don't
|
||||||
|
// match the patterns will be ignored and not indexed.
|
||||||
|
"repos": [
|
||||||
|
"org/repoa",
|
||||||
|
"org/repob"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
@ -12,6 +12,7 @@
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/argparse": "^2.0.16",
|
"@types/argparse": "^2.0.16",
|
||||||
|
"@types/micromatch": "^4.0.9",
|
||||||
"@types/node": "^22.7.5",
|
"@types/node": "^22.7.5",
|
||||||
"json-schema-to-typescript": "^15.0.2",
|
"json-schema-to-typescript": "^15.0.2",
|
||||||
"tsc-watch": "^6.2.0",
|
"tsc-watch": "^6.2.0",
|
||||||
|
|
@ -25,6 +26,7 @@
|
||||||
"cross-fetch": "^4.0.0",
|
"cross-fetch": "^4.0.0",
|
||||||
"gitea-js": "^1.22.0",
|
"gitea-js": "^1.22.0",
|
||||||
"lowdb": "^7.0.1",
|
"lowdb": "^7.0.1",
|
||||||
|
"micromatch": "^4.0.8",
|
||||||
"simple-git": "^3.27.0",
|
"simple-git": "^3.27.0",
|
||||||
"strip-json-comments": "^5.0.1",
|
"strip-json-comments": "^5.0.1",
|
||||||
"winston": "^3.15.0"
|
"winston": "^3.15.0"
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@ import { AppContext, GitRepository } from './types.js';
|
||||||
import fetch from 'cross-fetch';
|
import fetch from 'cross-fetch';
|
||||||
import { createLogger } from './logger.js';
|
import { createLogger } from './logger.js';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
import micromatch from 'micromatch';
|
||||||
|
|
||||||
const logger = createLogger('Gitea');
|
const logger = createLogger('Gitea');
|
||||||
|
|
||||||
|
|
@ -60,7 +61,9 @@ export const getGiteaReposFromConfig = async (config: GiteaConfig, ctx: AppConte
|
||||||
'zoekt.archived': marshalBool(repo.archived),
|
'zoekt.archived': marshalBool(repo.archived),
|
||||||
'zoekt.fork': marshalBool(repo.fork!),
|
'zoekt.fork': marshalBool(repo.fork!),
|
||||||
'zoekt.public': marshalBool(repo.internal === false && repo.private === false),
|
'zoekt.public': marshalBool(repo.internal === false && repo.private === false),
|
||||||
}
|
},
|
||||||
|
branches: [],
|
||||||
|
tags: []
|
||||||
} satisfies GitRepository;
|
} satisfies GitRepository;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -77,10 +80,68 @@ export const getGiteaReposFromConfig = async (config: GiteaConfig, ctx: AppConte
|
||||||
repos = excludeReposByName(repos, config.exclude.repos, logger);
|
repos = excludeReposByName(repos, config.exclude.repos, logger);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
logger.debug(`Found ${repos.length} total repositories.`);
|
||||||
|
|
||||||
|
if (config.revisions) {
|
||||||
|
if (config.revisions.branches) {
|
||||||
|
const branchGlobs = config.revisions.branches;
|
||||||
|
repos = await Promise.all(
|
||||||
|
repos.map(async (repo) => {
|
||||||
|
const [owner, name] = repo.name.split('/');
|
||||||
|
let branches = (await getBranchesForRepo(owner, name, api)).map(branch => branch.name!);
|
||||||
|
branches = micromatch.match(branches, branchGlobs);
|
||||||
|
|
||||||
|
return {
|
||||||
|
...repo,
|
||||||
|
branches,
|
||||||
|
};
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (config.revisions.tags) {
|
||||||
|
const tagGlobs = config.revisions.tags;
|
||||||
|
repos = await Promise.all(
|
||||||
|
repos.map(async (repo) => {
|
||||||
|
const [owner, name] = repo.name.split('/');
|
||||||
|
let tags = (await getTagsForRepo(owner, name, api)).map(tag => tag.name!);
|
||||||
|
tags = micromatch.match(tags, tagGlobs);
|
||||||
|
|
||||||
|
return {
|
||||||
|
...repo,
|
||||||
|
tags,
|
||||||
|
};
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return repos;
|
return repos;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const getTagsForRepo = async <T>(owner: string, repo: string, api: Api<T>) => {
|
||||||
|
logger.debug(`Fetching tags for repo ${owner}/${repo}...`);
|
||||||
|
const { durationMs, data: tags } = await measure(() =>
|
||||||
|
paginate((page) => api.repos.repoListTags(owner, repo, {
|
||||||
|
page
|
||||||
|
}))
|
||||||
|
);
|
||||||
|
logger.debug(`Found ${tags.length} tags in repo ${owner}/${repo} in ${durationMs}ms.`);
|
||||||
|
return tags;
|
||||||
|
}
|
||||||
|
|
||||||
|
const getBranchesForRepo = async <T>(owner: string, repo: string, api: Api<T>) => {
|
||||||
|
logger.debug(`Fetching branches for repo ${owner}/${repo}...`);
|
||||||
|
const { durationMs, data: branches } = await measure(() =>
|
||||||
|
paginate((page) => api.repos.repoListBranches(owner, repo, {
|
||||||
|
page
|
||||||
|
}))
|
||||||
|
);
|
||||||
|
logger.debug(`Found ${branches.length} branches in repo ${owner}/${repo} in ${durationMs}ms.`);
|
||||||
|
return branches;
|
||||||
|
}
|
||||||
|
|
||||||
const getReposOwnedByUsers = async <T>(users: string[], api: Api<T>) => {
|
const getReposOwnedByUsers = async <T>(users: string[], api: Api<T>) => {
|
||||||
const repos = (await Promise.all(users.map(async (user) => {
|
const repos = (await Promise.all(users.map(async (user) => {
|
||||||
logger.debug(`Fetching repos for user ${user}...`);
|
logger.debug(`Fetching repos for user ${user}...`);
|
||||||
|
|
|
||||||
|
|
@ -3,12 +3,14 @@ import { GitHubConfig } from "./schemas/v2.js";
|
||||||
import { createLogger } from "./logger.js";
|
import { createLogger } from "./logger.js";
|
||||||
import { AppContext, GitRepository } from "./types.js";
|
import { AppContext, GitRepository } from "./types.js";
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { excludeArchivedRepos, excludeForkedRepos, excludeReposByName, getTokenFromConfig, marshalBool } from "./utils.js";
|
import { excludeArchivedRepos, excludeForkedRepos, excludeReposByName, getTokenFromConfig, marshalBool, measure } from "./utils.js";
|
||||||
|
import micromatch from "micromatch";
|
||||||
|
|
||||||
const logger = createLogger("GitHub");
|
const logger = createLogger("GitHub");
|
||||||
|
|
||||||
type OctokitRepository = {
|
type OctokitRepository = {
|
||||||
name: string,
|
name: string,
|
||||||
|
id: number,
|
||||||
full_name: string,
|
full_name: string,
|
||||||
fork: boolean,
|
fork: boolean,
|
||||||
private: boolean,
|
private: boolean,
|
||||||
|
|
@ -88,7 +90,9 @@ export const getGitHubReposFromConfig = async (config: GitHubConfig, signal: Abo
|
||||||
'zoekt.archived': marshalBool(repo.archived),
|
'zoekt.archived': marshalBool(repo.archived),
|
||||||
'zoekt.fork': marshalBool(repo.fork),
|
'zoekt.fork': marshalBool(repo.fork),
|
||||||
'zoekt.public': marshalBool(repo.private === false)
|
'zoekt.public': marshalBool(repo.private === false)
|
||||||
}
|
},
|
||||||
|
branches: [],
|
||||||
|
tags: [],
|
||||||
} satisfies GitRepository;
|
} satisfies GitRepository;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -107,10 +111,75 @@ export const getGitHubReposFromConfig = async (config: GitHubConfig, signal: Abo
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.debug(`Found ${repos.length} total repositories.`);
|
logger.debug(`Found ${repos.length} total repositories.`);
|
||||||
|
|
||||||
|
if (config.revisions) {
|
||||||
|
if (config.revisions.branches) {
|
||||||
|
const branchGlobs = config.revisions.branches;
|
||||||
|
repos = await Promise.all(
|
||||||
|
repos.map(async (repo) => {
|
||||||
|
const [owner, name] = repo.name.split('/');
|
||||||
|
let branches = (await getBranchesForRepo(owner, name, octokit, signal)).map(branch => branch.name);
|
||||||
|
branches = micromatch.match(branches, branchGlobs);
|
||||||
|
|
||||||
|
return {
|
||||||
|
...repo,
|
||||||
|
branches,
|
||||||
|
};
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
if (config.revisions.tags) {
|
||||||
|
const tagGlobs = config.revisions.tags;
|
||||||
|
repos = await Promise.all(
|
||||||
|
repos.map(async (repo) => {
|
||||||
|
const [owner, name] = repo.name.split('/');
|
||||||
|
let tags = (await getTagsForRepo(owner, name, octokit, signal)).map(tag => tag.name);
|
||||||
|
tags = micromatch.match(tags, tagGlobs);
|
||||||
|
|
||||||
|
return {
|
||||||
|
...repo,
|
||||||
|
tags,
|
||||||
|
};
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return repos;
|
return repos;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const getTagsForRepo = async (owner: string, repo: string, octokit: Octokit, signal: AbortSignal) => {
|
||||||
|
logger.debug(`Fetching tags for repo ${owner}/${repo}...`);
|
||||||
|
|
||||||
|
const { durationMs, data: tags } = await measure(() => octokit.paginate(octokit.repos.listTags, {
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
per_page: 100,
|
||||||
|
request: {
|
||||||
|
signal
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
|
||||||
|
logger.debug(`Found ${tags.length} tags for repo ${owner}/${repo} in ${durationMs}ms`);
|
||||||
|
return tags;
|
||||||
|
}
|
||||||
|
|
||||||
|
const getBranchesForRepo = async (owner: string, repo: string, octokit: Octokit, signal: AbortSignal) => {
|
||||||
|
logger.debug(`Fetching branches for repo ${owner}/${repo}...`);
|
||||||
|
const { durationMs, data: branches } = await measure(() => octokit.paginate(octokit.repos.listBranches, {
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
per_page: 100,
|
||||||
|
request: {
|
||||||
|
signal
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
logger.debug(`Found ${branches.length} branches for repo ${owner}/${repo} in ${durationMs}ms`);
|
||||||
|
return branches;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
const getReposOwnedByUsers = async (users: string[], isAuthenticated: boolean, octokit: Octokit, signal: AbortSignal) => {
|
const getReposOwnedByUsers = async (users: string[], isAuthenticated: boolean, octokit: Octokit, signal: AbortSignal) => {
|
||||||
// @todo : error handling
|
// @todo : error handling
|
||||||
const repos = (await Promise.all(users.map(async (user) => {
|
const repos = (await Promise.all(users.map(async (user) => {
|
||||||
|
|
@ -149,7 +218,6 @@ const getReposOwnedByUsers = async (users: string[], isAuthenticated: boolean, o
|
||||||
}
|
}
|
||||||
|
|
||||||
const getReposForOrgs = async (orgs: string[], octokit: Octokit, signal: AbortSignal) => {
|
const getReposForOrgs = async (orgs: string[], octokit: Octokit, signal: AbortSignal) => {
|
||||||
// @todo : error handling
|
|
||||||
const repos = (await Promise.all(orgs.map(async (org) => {
|
const repos = (await Promise.all(orgs.map(async (org) => {
|
||||||
logger.debug(`Fetching repository info for org ${org}...`);
|
logger.debug(`Fetching repository info for org ${org}...`);
|
||||||
const start = Date.now();
|
const start = Date.now();
|
||||||
|
|
@ -172,7 +240,6 @@ const getReposForOrgs = async (orgs: string[], octokit: Octokit, signal: AbortSi
|
||||||
}
|
}
|
||||||
|
|
||||||
const getRepos = async (repoList: string[], octokit: Octokit, signal: AbortSignal) => {
|
const getRepos = async (repoList: string[], octokit: Octokit, signal: AbortSignal) => {
|
||||||
// @todo : error handling
|
|
||||||
const repos = await Promise.all(repoList.map(async (repo) => {
|
const repos = await Promise.all(repoList.map(async (repo) => {
|
||||||
logger.debug(`Fetching repository info for ${repo}...`);
|
logger.debug(`Fetching repository info for ${repo}...`);
|
||||||
const start = Date.now();
|
const start = Date.now();
|
||||||
|
|
|
||||||
|
|
@ -4,6 +4,7 @@ import { excludeArchivedRepos, excludeForkedRepos, excludeReposByName, getTokenF
|
||||||
import { createLogger } from "./logger.js";
|
import { createLogger } from "./logger.js";
|
||||||
import { AppContext, GitRepository } from "./types.js";
|
import { AppContext, GitRepository } from "./types.js";
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
import micromatch from "micromatch";
|
||||||
|
|
||||||
const logger = createLogger("GitLab");
|
const logger = createLogger("GitLab");
|
||||||
|
|
||||||
|
|
@ -90,7 +91,9 @@ export const getGitLabReposFromConfig = async (config: GitLabConfig, ctx: AppCon
|
||||||
'zoekt.archived': marshalBool(project.archived),
|
'zoekt.archived': marshalBool(project.archived),
|
||||||
'zoekt.fork': marshalBool(isFork),
|
'zoekt.fork': marshalBool(isFork),
|
||||||
'zoekt.public': marshalBool(project.visibility === 'public'),
|
'zoekt.public': marshalBool(project.visibility === 'public'),
|
||||||
}
|
},
|
||||||
|
branches: [],
|
||||||
|
tags: [],
|
||||||
} satisfies GitRepository;
|
} satisfies GitRepository;
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -110,5 +113,41 @@ export const getGitLabReposFromConfig = async (config: GitLabConfig, ctx: AppCon
|
||||||
|
|
||||||
logger.debug(`Found ${repos.length} total repositories.`);
|
logger.debug(`Found ${repos.length} total repositories.`);
|
||||||
|
|
||||||
|
if (config.revisions) {
|
||||||
|
if (config.revisions.branches) {
|
||||||
|
const branchGlobs = config.revisions.branches;
|
||||||
|
repos = await Promise.all(repos.map(async (repo) => {
|
||||||
|
logger.debug(`Fetching branches for repo ${repo.name}...`);
|
||||||
|
let { durationMs, data } = await measure(() => api.Branches.all(repo.name));
|
||||||
|
logger.debug(`Found ${data.length} branches in repo ${repo.name} in ${durationMs}ms.`);
|
||||||
|
|
||||||
|
let branches = data.map((branch) => branch.name);
|
||||||
|
branches = micromatch.match(branches, branchGlobs);
|
||||||
|
|
||||||
|
return {
|
||||||
|
...repo,
|
||||||
|
branches,
|
||||||
|
};
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (config.revisions.tags) {
|
||||||
|
const tagGlobs = config.revisions.tags;
|
||||||
|
repos = await Promise.all(repos.map(async (repo) => {
|
||||||
|
logger.debug(`Fetching tags for repo ${repo.name}...`);
|
||||||
|
let { durationMs, data } = await measure(() => api.Tags.all(repo.name));
|
||||||
|
logger.debug(`Found ${data.length} tags in repo ${repo.name} in ${durationMs}ms.`);
|
||||||
|
|
||||||
|
let tags = data.map((tag) => tag.name);
|
||||||
|
tags = micromatch.match(tags, tagGlobs);
|
||||||
|
|
||||||
|
return {
|
||||||
|
...repo,
|
||||||
|
tags,
|
||||||
|
};
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return repos;
|
return repos;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ import { AppContext, LocalRepository, GitRepository, Repository } from "./types.
|
||||||
import { cloneRepository, fetchRepository } from "./git.js";
|
import { cloneRepository, fetchRepository } from "./git.js";
|
||||||
import { createLogger } from "./logger.js";
|
import { createLogger } from "./logger.js";
|
||||||
import { createRepository, Database, loadDB, updateRepository } from './db.js';
|
import { createRepository, Database, loadDB, updateRepository } from './db.js';
|
||||||
import { isRemotePath, measure } from "./utils.js";
|
import { arraysEqualShallow, isRemotePath, measure } from "./utils.js";
|
||||||
import { REINDEX_INTERVAL_MS, RESYNC_CONFIG_INTERVAL_MS } from "./constants.js";
|
import { REINDEX_INTERVAL_MS, RESYNC_CONFIG_INTERVAL_MS } from "./constants.js";
|
||||||
import stripJsonComments from 'strip-json-comments';
|
import stripJsonComments from 'strip-json-comments';
|
||||||
import { indexGitRepository, indexLocalRepository } from "./zoekt.js";
|
import { indexGitRepository, indexLocalRepository } from "./zoekt.js";
|
||||||
|
|
@ -30,16 +30,21 @@ type Arguments = {
|
||||||
const syncGitRepository = async (repo: GitRepository, ctx: AppContext) => {
|
const syncGitRepository = async (repo: GitRepository, ctx: AppContext) => {
|
||||||
if (existsSync(repo.path)) {
|
if (existsSync(repo.path)) {
|
||||||
logger.info(`Fetching ${repo.id}...`);
|
logger.info(`Fetching ${repo.id}...`);
|
||||||
|
|
||||||
const { durationMs } = await measure(() => fetchRepository(repo, ({ method, stage , progress}) => {
|
const { durationMs } = await measure(() => fetchRepository(repo, ({ method, stage , progress}) => {
|
||||||
logger.info(`git.${method} ${stage} stage ${progress}% complete for ${repo.id}`)
|
logger.info(`git.${method} ${stage} stage ${progress}% complete for ${repo.id}`)
|
||||||
}));
|
}));
|
||||||
|
|
||||||
process.stdout.write('\n');
|
process.stdout.write('\n');
|
||||||
logger.info(`Fetched ${repo.id} in ${durationMs / 1000}s`);
|
logger.info(`Fetched ${repo.id} in ${durationMs / 1000}s`);
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
logger.info(`Cloning ${repo.id}...`);
|
logger.info(`Cloning ${repo.id}...`);
|
||||||
|
|
||||||
const { durationMs } = await measure(() => cloneRepository(repo, ({ method, stage, progress }) => {
|
const { durationMs } = await measure(() => cloneRepository(repo, ({ method, stage, progress }) => {
|
||||||
logger.info(`git.${method} ${stage} stage ${progress}% complete for ${repo.id}`)
|
logger.info(`git.${method} ${stage} stage ${progress}% complete for ${repo.id}`)
|
||||||
}));
|
}));
|
||||||
|
|
||||||
process.stdout.write('\n');
|
process.stdout.write('\n');
|
||||||
logger.info(`Cloned ${repo.id} in ${durationMs / 1000}s`);
|
logger.info(`Cloned ${repo.id} in ${durationMs / 1000}s`);
|
||||||
}
|
}
|
||||||
|
|
@ -55,6 +60,39 @@ const syncLocalRepository = async (repo: LocalRepository, ctx: AppContext, signa
|
||||||
logger.info(`Indexed ${repo.id} in ${durationMs / 1000}s`);
|
logger.info(`Indexed ${repo.id} in ${durationMs / 1000}s`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const isRepoReindxingRequired = (previous: Repository, current: Repository) => {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if the any of the `revisions` properties have changed.
|
||||||
|
*/
|
||||||
|
const isRevisionsChanged = () => {
|
||||||
|
if (previous.vcs !== 'git' || current.vcs !== 'git') {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
!arraysEqualShallow(previous.branches, current.branches) ||
|
||||||
|
!arraysEqualShallow(previous.tags, current.tags)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the `exclude.paths` property has changed.
|
||||||
|
*/
|
||||||
|
const isExcludePathsChanged = () => {
|
||||||
|
if (previous.vcs !== 'local' || current.vcs !== 'local') {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return !arraysEqualShallow(previous.excludedPaths, current.excludedPaths);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
isRevisionsChanged() ||
|
||||||
|
isExcludePathsChanged()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
const syncConfig = async (configPath: string, db: Database, signal: AbortSignal, ctx: AppContext) => {
|
const syncConfig = async (configPath: string, db: Database, signal: AbortSignal, ctx: AppContext) => {
|
||||||
const configContent = await (async () => {
|
const configContent = await (async () => {
|
||||||
if (isRemotePath(configPath)) {
|
if (isRemotePath(configPath)) {
|
||||||
|
|
@ -121,7 +159,17 @@ const syncConfig = async (configPath: string, db: Database, signal: AbortSignal,
|
||||||
// Merge the repositories into the database
|
// Merge the repositories into the database
|
||||||
for (const newRepo of configRepos) {
|
for (const newRepo of configRepos) {
|
||||||
if (newRepo.id in db.data.repos) {
|
if (newRepo.id in db.data.repos) {
|
||||||
await updateRepository(newRepo.id, newRepo, db);
|
const existingRepo = db.data.repos[newRepo.id];
|
||||||
|
const isReindexingRequired = isRepoReindxingRequired(existingRepo, newRepo);
|
||||||
|
if (isReindexingRequired) {
|
||||||
|
logger.info(`Marking ${newRepo.id} for reindexing due to configuration change.`);
|
||||||
|
}
|
||||||
|
await updateRepository(existingRepo.id, {
|
||||||
|
...newRepo,
|
||||||
|
...(isReindexingRequired ? {
|
||||||
|
lastIndexedDate: undefined,
|
||||||
|
}: {})
|
||||||
|
}, db);
|
||||||
} else {
|
} else {
|
||||||
await createRepository(newRepo, db);
|
await createRepository(newRepo, db);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -58,6 +58,20 @@ export interface GitHubConfig {
|
||||||
*/
|
*/
|
||||||
repos?: string[];
|
repos?: string[];
|
||||||
};
|
};
|
||||||
|
revisions?: GitRevisions;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* The revisions (branches, tags) that should be included when indexing. The default branch (HEAD) is always indexed.
|
||||||
|
*/
|
||||||
|
export interface GitRevisions {
|
||||||
|
/**
|
||||||
|
* List of branches to include when indexing. For a given repo, only the branches that exist on the repo's remote *and* match at least one of the provided `branches` will be indexed. The default branch (HEAD) is always indexed. Glob patterns are supported.
|
||||||
|
*/
|
||||||
|
branches?: string[];
|
||||||
|
/**
|
||||||
|
* List of tags to include when indexing. For a given repo, only the tags that exist on the repo's remote *and* match at least one of the provided `tags` will be indexed. Glob patterns are supported.
|
||||||
|
*/
|
||||||
|
tags?: string[];
|
||||||
}
|
}
|
||||||
export interface GitLabConfig {
|
export interface GitLabConfig {
|
||||||
/**
|
/**
|
||||||
|
|
@ -105,6 +119,7 @@ export interface GitLabConfig {
|
||||||
*/
|
*/
|
||||||
projects?: string[];
|
projects?: string[];
|
||||||
};
|
};
|
||||||
|
revisions?: GitRevisions;
|
||||||
}
|
}
|
||||||
export interface GiteaConfig {
|
export interface GiteaConfig {
|
||||||
/**
|
/**
|
||||||
|
|
@ -152,6 +167,7 @@ export interface GiteaConfig {
|
||||||
*/
|
*/
|
||||||
repos?: string[];
|
repos?: string[];
|
||||||
};
|
};
|
||||||
|
revisions?: GitRevisions;
|
||||||
}
|
}
|
||||||
export interface LocalConfig {
|
export interface LocalConfig {
|
||||||
/**
|
/**
|
||||||
|
|
|
||||||
|
|
@ -13,6 +13,8 @@ interface BaseRepository {
|
||||||
export interface GitRepository extends BaseRepository {
|
export interface GitRepository extends BaseRepository {
|
||||||
vcs: 'git';
|
vcs: 'git';
|
||||||
cloneUrl: string;
|
cloneUrl: string;
|
||||||
|
branches: string[];
|
||||||
|
tags: string[];
|
||||||
gitConfigMetadata?: Record<string, string>;
|
gitConfigMetadata?: Record<string, string>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -73,4 +73,12 @@ export const resolvePathRelativeToConfig = (localPath: string, configPath: strin
|
||||||
}
|
}
|
||||||
|
|
||||||
return absolutePath;
|
return absolutePath;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const arraysEqualShallow = <T>(a?: T[], b?: T[]) => {
|
||||||
|
if (a === b) return true;
|
||||||
|
if (a === undefined || b === undefined) return false;
|
||||||
|
if (a.length !== b.length) return false;
|
||||||
|
|
||||||
|
return a.every(item => b.includes(item)) && b.every(item => a.includes(item));
|
||||||
}
|
}
|
||||||
|
|
@ -4,8 +4,16 @@ import { AppContext, GitRepository, LocalRepository } from "./types.js";
|
||||||
const ALWAYS_EXCLUDED_DIRS = ['.git', '.hg', '.svn'];
|
const ALWAYS_EXCLUDED_DIRS = ['.git', '.hg', '.svn'];
|
||||||
|
|
||||||
export const indexGitRepository = async (repo: GitRepository, ctx: AppContext) => {
|
export const indexGitRepository = async (repo: GitRepository, ctx: AppContext) => {
|
||||||
|
const revisions = [
|
||||||
|
'HEAD',
|
||||||
|
...repo.branches ?? [],
|
||||||
|
...repo.tags ?? [],
|
||||||
|
];
|
||||||
|
|
||||||
|
const command = `zoekt-git-index -index ${ctx.indexPath} -branches ${revisions.join(',')} ${repo.path}`;
|
||||||
|
|
||||||
return new Promise<{ stdout: string, stderr: string }>((resolve, reject) => {
|
return new Promise<{ stdout: string, stderr: string }>((resolve, reject) => {
|
||||||
exec(`zoekt-git-index -index ${ctx.indexPath} ${repo.path}`, (error, stdout, stderr) => {
|
exec(command, (error, stdout, stderr) => {
|
||||||
if (error) {
|
if (error) {
|
||||||
reject(error);
|
reject(error);
|
||||||
return;
|
return;
|
||||||
|
|
|
||||||
|
|
@ -61,6 +61,7 @@
|
||||||
"server-only": "^0.0.1",
|
"server-only": "^0.0.1",
|
||||||
"sharp": "^0.33.5",
|
"sharp": "^0.33.5",
|
||||||
"tailwind-merge": "^2.5.2",
|
"tailwind-merge": "^2.5.2",
|
||||||
|
"tailwind-scrollbar-hide": "^1.1.7",
|
||||||
"tailwindcss-animate": "^1.0.7",
|
"tailwindcss-animate": "^1.0.7",
|
||||||
"usehooks-ts": "^3.1.0",
|
"usehooks-ts": "^3.1.0",
|
||||||
"zod": "^3.23.8"
|
"zod": "^3.23.8"
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
import { fileSourceResponseSchema, listRepositoriesResponseSchema, searchResponseSchema } from "@/lib/schemas";
|
import { fileSourceResponseSchema, listRepositoriesResponseSchema, searchResponseSchema } from "@/lib/schemas";
|
||||||
import { FileSourceResponse, ListRepositoriesResponse, SearchRequest, SearchResponse } from "@/lib/types";
|
import { FileSourceRequest, FileSourceResponse, ListRepositoriesResponse, SearchRequest, SearchResponse } from "@/lib/types";
|
||||||
|
|
||||||
export const search = async (body: SearchRequest): Promise<SearchResponse> => {
|
export const search = async (body: SearchRequest): Promise<SearchResponse> => {
|
||||||
const result = await fetch(`/api/search`, {
|
const result = await fetch(`/api/search`, {
|
||||||
|
|
@ -13,16 +13,13 @@ export const search = async (body: SearchRequest): Promise<SearchResponse> => {
|
||||||
return searchResponseSchema.parse(result);
|
return searchResponseSchema.parse(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
export const fetchFileSource = async (fileName: string, repository: string): Promise<FileSourceResponse> => {
|
export const fetchFileSource = async (body: FileSourceRequest): Promise<FileSourceResponse> => {
|
||||||
const result = await fetch(`/api/source`, {
|
const result = await fetch(`/api/source`, {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
headers: {
|
headers: {
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
},
|
},
|
||||||
body: JSON.stringify({
|
body: JSON.stringify(body),
|
||||||
fileName,
|
|
||||||
repository,
|
|
||||||
}),
|
|
||||||
}).then(response => response.json());
|
}).then(response => response.json());
|
||||||
|
|
||||||
return fileSourceResponseSchema.parse(result);
|
return fileSourceResponseSchema.parse(result);
|
||||||
|
|
|
||||||
|
|
@ -76,7 +76,7 @@ export default async function Home() {
|
||||||
<Query query="lang:typescript"><Highlight>lang:</Highlight>typescript</Query> <QueryExplanation>(by language)</QueryExplanation>
|
<Query query="lang:typescript"><Highlight>lang:</Highlight>typescript</Query> <QueryExplanation>(by language)</QueryExplanation>
|
||||||
</QueryExample>
|
</QueryExample>
|
||||||
<QueryExample>
|
<QueryExample>
|
||||||
<Query query="branch:HEAD"><Highlight>branch:</Highlight>HEAD</Query> <QueryExplanation>(by branch)</QueryExplanation>
|
<Query query="revision:HEAD"><Highlight>revision:</Highlight>HEAD</Query> <QueryExplanation>(by branch or tag)</QueryExplanation>
|
||||||
</QueryExample>
|
</QueryExample>
|
||||||
</HowToSection>
|
</HowToSection>
|
||||||
<HowToSection
|
<HowToSection
|
||||||
|
|
|
||||||
|
|
@ -55,7 +55,7 @@ export const columns: ColumnDef<RepositoryColumnInfo>[] = [
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="flex flex-col gap-2">
|
<div className="flex flex-col gap-2 max-h-32 overflow-scroll scrollbar-hide">
|
||||||
{branches.map(({ name, version }, index) => {
|
{branches.map(({ name, version }, index) => {
|
||||||
const shortVersion = version.substring(0, 8);
|
const shortVersion = version.substring(0, 8);
|
||||||
return (
|
return (
|
||||||
|
|
|
||||||
|
|
@ -124,7 +124,7 @@ export const CodePreview = ({
|
||||||
{/* File path */}
|
{/* File path */}
|
||||||
<div className="flex-1 overflow-hidden">
|
<div className="flex-1 overflow-hidden">
|
||||||
<span
|
<span
|
||||||
className={clsx("block truncate-start", {
|
className={clsx("block truncate-start text-sm font-mono", {
|
||||||
"cursor-pointer text-blue-500 hover:underline": file?.link
|
"cursor-pointer text-blue-500 hover:underline": file?.link
|
||||||
})}
|
})}
|
||||||
onClick={() => {
|
onClick={() => {
|
||||||
|
|
|
||||||
|
|
@ -21,16 +21,24 @@ export const CodePreviewPanel = ({
|
||||||
}: CodePreviewPanelProps) => {
|
}: CodePreviewPanelProps) => {
|
||||||
|
|
||||||
const { data: file } = useQuery({
|
const { data: file } = useQuery({
|
||||||
queryKey: ["source", fileMatch?.FileName, fileMatch?.Repository],
|
queryKey: ["source", fileMatch?.FileName, fileMatch?.Repository, fileMatch?.Branches],
|
||||||
queryFn: async (): Promise<CodePreviewFile | undefined> => {
|
queryFn: async (): Promise<CodePreviewFile | undefined> => {
|
||||||
if (!fileMatch) {
|
if (!fileMatch) {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
return fetchFileSource(fileMatch.FileName, fileMatch.Repository)
|
// If there are multiple branches pointing to the same revision of this file, it doesn't
|
||||||
|
// matter which branch we use here, so use the first one.
|
||||||
|
const branch = fileMatch.Branches && fileMatch.Branches.length > 0 ? fileMatch.Branches[0] : undefined;
|
||||||
|
|
||||||
|
return fetchFileSource({
|
||||||
|
fileName: fileMatch.FileName,
|
||||||
|
repository: fileMatch.Repository,
|
||||||
|
branch,
|
||||||
|
})
|
||||||
.then(({ source }) => {
|
.then(({ source }) => {
|
||||||
// @todo : refector this to use the templates provided by zoekt.
|
// @todo : refector this to use the templates provided by zoekt.
|
||||||
const link = getCodeHostFilePreviewLink(fileMatch.Repository, fileMatch.FileName)
|
const link = getCodeHostFilePreviewLink(fileMatch.Repository, fileMatch.FileName, branch);
|
||||||
|
|
||||||
const decodedSource = base64Decode(source);
|
const decodedSource = base64Decode(source);
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -17,6 +17,7 @@ interface FileMatchContainerProps {
|
||||||
onMatchIndexChanged: (matchIndex: number) => void;
|
onMatchIndexChanged: (matchIndex: number) => void;
|
||||||
showAllMatches: boolean;
|
showAllMatches: boolean;
|
||||||
onShowAllMatchesButtonClicked: () => void;
|
onShowAllMatchesButtonClicked: () => void;
|
||||||
|
isBranchFilteringEnabled: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const FileMatchContainer = ({
|
export const FileMatchContainer = ({
|
||||||
|
|
@ -25,6 +26,7 @@ export const FileMatchContainer = ({
|
||||||
onMatchIndexChanged,
|
onMatchIndexChanged,
|
||||||
showAllMatches,
|
showAllMatches,
|
||||||
onShowAllMatchesButtonClicked,
|
onShowAllMatchesButtonClicked,
|
||||||
|
isBranchFilteringEnabled,
|
||||||
}: FileMatchContainerProps) => {
|
}: FileMatchContainerProps) => {
|
||||||
|
|
||||||
const matchCount = useMemo(() => {
|
const matchCount = useMemo(() => {
|
||||||
|
|
@ -90,6 +92,14 @@ export const FileMatchContainer = ({
|
||||||
onMatchIndexChanged(matchIndex);
|
onMatchIndexChanged(matchIndex);
|
||||||
}, [matches, onMatchIndexChanged, onOpenFile]);
|
}, [matches, onMatchIndexChanged, onOpenFile]);
|
||||||
|
|
||||||
|
const branches = useMemo(() => {
|
||||||
|
if (!file.Branches) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
return file.Branches;
|
||||||
|
}, [file.Branches]);
|
||||||
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div>
|
<div>
|
||||||
|
|
@ -114,9 +124,18 @@ export const FileMatchContainer = ({
|
||||||
>
|
>
|
||||||
{repoName}
|
{repoName}
|
||||||
</span>
|
</span>
|
||||||
|
{isBranchFilteringEnabled && branches.length > 0 && (
|
||||||
|
<span
|
||||||
|
className="text-xs font-semibold text-gray-500 dark:text-gray-400 mt-0.5"
|
||||||
|
title={branches.join(", ")}
|
||||||
|
>
|
||||||
|
{`@ ${branches[0]}`}
|
||||||
|
{branches.length > 1 && ` (+ ${branches.length - 1})`}
|
||||||
|
</span>
|
||||||
|
)}
|
||||||
<span>·</span>
|
<span>·</span>
|
||||||
<div className="flex-1 flex items-center overflow-hidden">
|
<div className="flex-1 flex items-center overflow-hidden">
|
||||||
<span className="inline-block w-full truncate-start">
|
<span className="inline-block w-full truncate-start font-mono text-sm">
|
||||||
{!fileNameRange ?
|
{!fileNameRange ?
|
||||||
file.FileName
|
file.FileName
|
||||||
: (
|
: (
|
||||||
|
|
|
||||||
|
|
@ -11,6 +11,7 @@ interface SearchResultsPanelProps {
|
||||||
onMatchIndexChanged: (matchIndex: number) => void;
|
onMatchIndexChanged: (matchIndex: number) => void;
|
||||||
isLoadMoreButtonVisible: boolean;
|
isLoadMoreButtonVisible: boolean;
|
||||||
onLoadMoreButtonClicked: () => void;
|
onLoadMoreButtonClicked: () => void;
|
||||||
|
isBranchFilteringEnabled: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
const ESTIMATED_LINE_HEIGHT_PX = 20;
|
const ESTIMATED_LINE_HEIGHT_PX = 20;
|
||||||
|
|
@ -23,6 +24,7 @@ export const SearchResultsPanel = ({
|
||||||
onMatchIndexChanged,
|
onMatchIndexChanged,
|
||||||
isLoadMoreButtonVisible,
|
isLoadMoreButtonVisible,
|
||||||
onLoadMoreButtonClicked,
|
onLoadMoreButtonClicked,
|
||||||
|
isBranchFilteringEnabled,
|
||||||
}: SearchResultsPanelProps) => {
|
}: SearchResultsPanelProps) => {
|
||||||
const parentRef = useRef<HTMLDivElement>(null);
|
const parentRef = useRef<HTMLDivElement>(null);
|
||||||
const [showAllMatchesStates, setShowAllMatchesStates] = useState(Array(fileMatches.length).fill(false));
|
const [showAllMatchesStates, setShowAllMatchesStates] = useState(Array(fileMatches.length).fill(false));
|
||||||
|
|
@ -145,6 +147,7 @@ export const SearchResultsPanel = ({
|
||||||
onShowAllMatchesButtonClicked={() => {
|
onShowAllMatchesButtonClicked={() => {
|
||||||
onShowAllMatchesButtonClicked(virtualRow.index);
|
onShowAllMatchesButtonClicked(virtualRow.index);
|
||||||
}}
|
}}
|
||||||
|
isBranchFilteringEnabled={isBranchFilteringEnabled}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
))}
|
))}
|
||||||
|
|
|
||||||
|
|
@ -77,35 +77,55 @@ export default function SearchPage() {
|
||||||
});
|
});
|
||||||
}, [captureEvent, searchResponse]);
|
}, [captureEvent, searchResponse]);
|
||||||
|
|
||||||
const { fileMatches, searchDurationMs } = useMemo((): { fileMatches: SearchResultFile[], searchDurationMs: number } => {
|
const { fileMatches, searchDurationMs, totalMatchCount, isBranchFilteringEnabled } = useMemo(() => {
|
||||||
if (!searchResponse) {
|
if (!searchResponse) {
|
||||||
return {
|
return {
|
||||||
fileMatches: [],
|
fileMatches: [],
|
||||||
searchDurationMs: 0,
|
searchDurationMs: 0,
|
||||||
|
totalMatchCount: 0,
|
||||||
|
isBranchFilteringEnabled: false,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
const isBranchFilteringEnabled = searchResponse.isBranchFilteringEnabled;
|
||||||
fileMatches: searchResponse.Result.Files ?? [],
|
let fileMatches = searchResponse.Result.Files ?? [];
|
||||||
searchDurationMs: Math.round(searchResponse.Result.Duration / 1000000),
|
|
||||||
|
// We only want to show matches for the default branch when
|
||||||
|
// the user isn't explicitly filtering by branch.
|
||||||
|
if (!isBranchFilteringEnabled) {
|
||||||
|
fileMatches = fileMatches.filter(match => {
|
||||||
|
// @note : this case handles local repos that don't have any branches.
|
||||||
|
if (!match.Branches) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return match.Branches.includes("HEAD");
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}, [searchResponse]);
|
|
||||||
|
return {
|
||||||
|
fileMatches,
|
||||||
|
searchDurationMs: Math.round(searchResponse.Result.Duration / 1000000),
|
||||||
|
totalMatchCount: searchResponse.Result.MatchCount,
|
||||||
|
isBranchFilteringEnabled,
|
||||||
|
}
|
||||||
|
}, [searchResponse, searchQuery]);
|
||||||
|
|
||||||
const isMoreResultsButtonVisible = useMemo(() => {
|
const isMoreResultsButtonVisible = useMemo(() => {
|
||||||
return searchResponse && searchResponse.Result.MatchCount > maxMatchDisplayCount;
|
return totalMatchCount > maxMatchDisplayCount;
|
||||||
}, [searchResponse, maxMatchDisplayCount]);
|
}, [totalMatchCount, maxMatchDisplayCount]);
|
||||||
|
|
||||||
const numMatches = useMemo(() => {
|
const numMatches = useMemo(() => {
|
||||||
// Accumualtes the number of matches across all files
|
// Accumualtes the number of matches across all files
|
||||||
return searchResponse?.Result.Files?.reduce(
|
return fileMatches.reduce(
|
||||||
(acc, file) =>
|
(acc, file) =>
|
||||||
acc + file.ChunkMatches.reduce(
|
acc + file.ChunkMatches.reduce(
|
||||||
(acc, chunk) => acc + chunk.Ranges.length,
|
(acc, chunk) => acc + chunk.Ranges.length,
|
||||||
0,
|
0,
|
||||||
),
|
),
|
||||||
0,
|
0,
|
||||||
) ?? 0;
|
);
|
||||||
}, [searchResponse]);
|
}, [fileMatches]);
|
||||||
|
|
||||||
const onLoadMoreResults = useCallback(() => {
|
const onLoadMoreResults = useCallback(() => {
|
||||||
const url = createPathWithQueryParams('/search',
|
const url = createPathWithQueryParams('/search',
|
||||||
|
|
@ -151,8 +171,8 @@ export default function SearchPage() {
|
||||||
{!isLoading && (
|
{!isLoading && (
|
||||||
<div className="bg-accent py-1 px-2 flex flex-row items-center gap-4">
|
<div className="bg-accent py-1 px-2 flex flex-row items-center gap-4">
|
||||||
{
|
{
|
||||||
fileMatches.length > 0 && searchResponse ? (
|
fileMatches.length > 0 ? (
|
||||||
<p className="text-sm font-medium">{`[${searchDurationMs} ms] Displaying ${numMatches} of ${searchResponse.Result.MatchCount} matches in ${fileMatches.length} ${fileMatches.length > 1 ? 'files' : 'file'}`}</p>
|
<p className="text-sm font-medium">{`[${searchDurationMs} ms] Found ${numMatches} matches in ${fileMatches.length} ${fileMatches.length > 1 ? 'files' : 'file'}`}</p>
|
||||||
) : (
|
) : (
|
||||||
<p className="text-sm font-medium">No results</p>
|
<p className="text-sm font-medium">No results</p>
|
||||||
)
|
)
|
||||||
|
|
@ -180,6 +200,7 @@ export default function SearchPage() {
|
||||||
fileMatches={fileMatches}
|
fileMatches={fileMatches}
|
||||||
isMoreResultsButtonVisible={isMoreResultsButtonVisible}
|
isMoreResultsButtonVisible={isMoreResultsButtonVisible}
|
||||||
onLoadMoreResults={onLoadMoreResults}
|
onLoadMoreResults={onLoadMoreResults}
|
||||||
|
isBranchFilteringEnabled={isBranchFilteringEnabled}
|
||||||
/>
|
/>
|
||||||
)}
|
)}
|
||||||
</div>
|
</div>
|
||||||
|
|
@ -190,12 +211,14 @@ interface PanelGroupProps {
|
||||||
fileMatches: SearchResultFile[];
|
fileMatches: SearchResultFile[];
|
||||||
isMoreResultsButtonVisible?: boolean;
|
isMoreResultsButtonVisible?: boolean;
|
||||||
onLoadMoreResults: () => void;
|
onLoadMoreResults: () => void;
|
||||||
|
isBranchFilteringEnabled: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
const PanelGroup = ({
|
const PanelGroup = ({
|
||||||
fileMatches,
|
fileMatches,
|
||||||
isMoreResultsButtonVisible,
|
isMoreResultsButtonVisible,
|
||||||
onLoadMoreResults,
|
onLoadMoreResults,
|
||||||
|
isBranchFilteringEnabled,
|
||||||
}: PanelGroupProps) => {
|
}: PanelGroupProps) => {
|
||||||
const [selectedMatchIndex, setSelectedMatchIndex] = useState(0);
|
const [selectedMatchIndex, setSelectedMatchIndex] = useState(0);
|
||||||
const [selectedFile, setSelectedFile] = useState<SearchResultFile | undefined>(undefined);
|
const [selectedFile, setSelectedFile] = useState<SearchResultFile | undefined>(undefined);
|
||||||
|
|
@ -253,6 +276,7 @@ const PanelGroup = ({
|
||||||
}}
|
}}
|
||||||
isLoadMoreButtonVisible={!!isMoreResultsButtonVisible}
|
isLoadMoreButtonVisible={!!isMoreResultsButtonVisible}
|
||||||
onLoadMoreButtonClicked={onLoadMoreResults}
|
onLoadMoreButtonClicked={onLoadMoreResults}
|
||||||
|
isBranchFilteringEnabled={isBranchFilteringEnabled}
|
||||||
/>
|
/>
|
||||||
) : (
|
) : (
|
||||||
<div className="flex flex-col items-center justify-center h-full">
|
<div className="flex flex-col items-center justify-center h-full">
|
||||||
|
|
|
||||||
|
|
@ -47,7 +47,7 @@ export const searchResponseStats = {
|
||||||
}
|
}
|
||||||
|
|
||||||
// @see : https://github.com/sourcebot-dev/zoekt/blob/3780e68cdb537d5a7ed2c84d9b3784f80c7c5d04/api.go#L497
|
// @see : https://github.com/sourcebot-dev/zoekt/blob/3780e68cdb537d5a7ed2c84d9b3784f80c7c5d04/api.go#L497
|
||||||
export const searchResponseSchema = z.object({
|
export const zoektSearchResponseSchema = z.object({
|
||||||
Result: z.object({
|
Result: z.object({
|
||||||
...searchResponseStats,
|
...searchResponseStats,
|
||||||
Files: z.array(z.object({
|
Files: z.array(z.object({
|
||||||
|
|
@ -71,9 +71,16 @@ export const searchResponseSchema = z.object({
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
export const searchResponseSchema = z.object({
|
||||||
|
...zoektSearchResponseSchema.shape,
|
||||||
|
// Flag when a branch filter was used (e.g., `branch:`, `revision:`, etc.).
|
||||||
|
isBranchFilteringEnabled: z.boolean(),
|
||||||
|
});
|
||||||
|
|
||||||
export const fileSourceRequestSchema = z.object({
|
export const fileSourceRequestSchema = z.object({
|
||||||
fileName: z.string(),
|
fileName: z.string(),
|
||||||
repository: z.string()
|
repository: z.string(),
|
||||||
|
branch: z.string().optional(),
|
||||||
});
|
});
|
||||||
|
|
||||||
export const fileSourceResponseSchema = z.object({
|
export const fileSourceResponseSchema = z.object({
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,45 @@
|
||||||
import escapeStringRegexp from "escape-string-regexp";
|
import escapeStringRegexp from "escape-string-regexp";
|
||||||
import { SHARD_MAX_MATCH_COUNT, TOTAL_MAX_MATCH_COUNT } from "../environment";
|
import { SHARD_MAX_MATCH_COUNT, TOTAL_MAX_MATCH_COUNT } from "../environment";
|
||||||
import { listRepositoriesResponseSchema, searchResponseSchema } from "../schemas";
|
import { listRepositoriesResponseSchema, searchResponseSchema, zoektSearchResponseSchema } from "../schemas";
|
||||||
import { FileSourceRequest, FileSourceResponse, ListRepositoriesResponse, SearchRequest, SearchResponse } from "../types";
|
import { FileSourceRequest, FileSourceResponse, ListRepositoriesResponse, SearchRequest, SearchResponse } from "../types";
|
||||||
import { fileNotFound, invalidZoektResponse, ServiceError, unexpectedError } from "../serviceError";
|
import { fileNotFound, invalidZoektResponse, ServiceError, unexpectedError } from "../serviceError";
|
||||||
import { isServiceError } from "../utils";
|
import { isServiceError } from "../utils";
|
||||||
import { zoektFetch } from "./zoektClient";
|
import { zoektFetch } from "./zoektClient";
|
||||||
|
|
||||||
|
// List of supported query prefixes in zoekt.
|
||||||
|
// @see : https://github.com/sourcebot-dev/zoekt/blob/main/query/parse.go#L417
|
||||||
|
enum zoektPrefixes {
|
||||||
|
archived = "archived:",
|
||||||
|
branchShort = "b:",
|
||||||
|
branch = "branch:",
|
||||||
|
caseShort = "c:",
|
||||||
|
case = "case:",
|
||||||
|
content = "content:",
|
||||||
|
fileShort = "f:",
|
||||||
|
file = "file:",
|
||||||
|
fork = "fork:",
|
||||||
|
public = "public:",
|
||||||
|
repoShort = "r:",
|
||||||
|
repo = "repo:",
|
||||||
|
regex = "regex:",
|
||||||
|
lang = "lang:",
|
||||||
|
sym = "sym:",
|
||||||
|
typeShort = "t:",
|
||||||
|
type = "type:",
|
||||||
|
}
|
||||||
|
|
||||||
|
// Mapping of additional "alias" prefixes to zoekt prefixes.
|
||||||
|
const aliasPrefixMappings: Record<string, zoektPrefixes> = {
|
||||||
|
"rev:": zoektPrefixes.branch,
|
||||||
|
"revision:": zoektPrefixes.branch,
|
||||||
|
}
|
||||||
|
|
||||||
export const search = async ({ query, maxMatchDisplayCount, whole }: SearchRequest): Promise<SearchResponse | ServiceError> => {
|
export const search = async ({ query, maxMatchDisplayCount, whole }: SearchRequest): Promise<SearchResponse | ServiceError> => {
|
||||||
|
// Replace any alias prefixes with their corresponding zoekt prefixes.
|
||||||
|
for (const [prefix, zoektPrefix] of Object.entries(aliasPrefixMappings)) {
|
||||||
|
query = query.replaceAll(prefix, zoektPrefix);
|
||||||
|
}
|
||||||
|
|
||||||
const body = JSON.stringify({
|
const body = JSON.stringify({
|
||||||
q: query,
|
q: query,
|
||||||
// @see: https://github.com/sourcebot-dev/zoekt/blob/main/api.go#L892
|
// @see: https://github.com/sourcebot-dev/zoekt/blob/main/api.go#L892
|
||||||
|
|
@ -31,21 +64,34 @@ export const search = async ({ query, maxMatchDisplayCount, whole }: SearchReque
|
||||||
}
|
}
|
||||||
|
|
||||||
const searchBody = await searchResponse.json();
|
const searchBody = await searchResponse.json();
|
||||||
const parsedSearchResponse = searchResponseSchema.safeParse(searchBody);
|
const parsedSearchResponse = zoektSearchResponseSchema.safeParse(searchBody);
|
||||||
if (!parsedSearchResponse.success) {
|
if (!parsedSearchResponse.success) {
|
||||||
console.error(`Failed to parse zoekt response. Error: ${parsedSearchResponse.error}`);
|
console.error(`Failed to parse zoekt response. Error: ${parsedSearchResponse.error}`);
|
||||||
return unexpectedError(`Something went wrong while parsing the response from zoekt`);
|
return unexpectedError(`Something went wrong while parsing the response from zoekt`);
|
||||||
}
|
}
|
||||||
|
|
||||||
return parsedSearchResponse.data;
|
const isBranchFilteringEnabled = (
|
||||||
|
query.includes(zoektPrefixes.branch) ||
|
||||||
|
query.includes(zoektPrefixes.branchShort)
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
...parsedSearchResponse.data,
|
||||||
|
isBranchFilteringEnabled,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export const getFileSource = async ({ fileName, repository }: FileSourceRequest): Promise<FileSourceResponse | ServiceError> => {
|
export const getFileSource = async ({ fileName, repository, branch }: FileSourceRequest): Promise<FileSourceResponse | ServiceError> => {
|
||||||
const escapedFileName = escapeStringRegexp(fileName);
|
const escapedFileName = escapeStringRegexp(fileName);
|
||||||
const escapedRepository = escapeStringRegexp(repository);
|
const escapedRepository = escapeStringRegexp(repository);
|
||||||
|
|
||||||
|
let query = `file:${escapedFileName} repo:^${escapedRepository}$`;
|
||||||
|
if (branch) {
|
||||||
|
query = query.concat(` branch:${branch}`);
|
||||||
|
}
|
||||||
|
|
||||||
const searchResponse = await search({
|
const searchResponse = await search({
|
||||||
query: `file:${escapedFileName} repo:^${escapedRepository}$`,
|
query,
|
||||||
maxMatchDisplayCount: 1,
|
maxMatchDisplayCount: 1,
|
||||||
whole: true,
|
whole: true,
|
||||||
});
|
});
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,9 @@ import { fileSourceRequestSchema, fileSourceResponseSchema, listRepositoriesResp
|
||||||
|
|
||||||
export type KeymapType = "default" | "vim";
|
export type KeymapType = "default" | "vim";
|
||||||
|
|
||||||
|
export type SearchRequest = z.infer<typeof searchRequestSchema>;
|
||||||
export type SearchResponse = z.infer<typeof searchResponseSchema>;
|
export type SearchResponse = z.infer<typeof searchResponseSchema>;
|
||||||
|
|
||||||
export type SearchResult = SearchResponse["Result"];
|
export type SearchResult = SearchResponse["Result"];
|
||||||
export type SearchResultFile = NonNullable<SearchResult["Files"]>[number];
|
export type SearchResultFile = NonNullable<SearchResult["Files"]>[number];
|
||||||
export type SearchResultFileMatch = SearchResultFile["ChunkMatches"][number];
|
export type SearchResultFileMatch = SearchResultFile["ChunkMatches"][number];
|
||||||
|
|
@ -15,7 +17,6 @@ export type FileSourceResponse = z.infer<typeof fileSourceResponseSchema>;
|
||||||
|
|
||||||
export type ListRepositoriesResponse = z.infer<typeof listRepositoriesResponseSchema>;
|
export type ListRepositoriesResponse = z.infer<typeof listRepositoriesResponseSchema>;
|
||||||
export type Repository = z.infer<typeof repositorySchema>;
|
export type Repository = z.infer<typeof repositorySchema>;
|
||||||
export type SearchRequest = z.infer<typeof searchRequestSchema>;
|
|
||||||
|
|
||||||
export enum SearchQueryParams {
|
export enum SearchQueryParams {
|
||||||
query = "query",
|
query = "query",
|
||||||
|
|
|
||||||
|
|
@ -73,19 +73,19 @@ export const getRepoCodeHostInfo = (repoName: string): CodeHostInfo | undefined
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const getCodeHostFilePreviewLink = (repoName: string, filePath: string): string | undefined => {
|
export const getCodeHostFilePreviewLink = (repoName: string, filePath: string, branch: string = "HEAD"): string | undefined => {
|
||||||
const info = getRepoCodeHostInfo(repoName);
|
const info = getRepoCodeHostInfo(repoName);
|
||||||
|
|
||||||
if (info?.type === "github") {
|
if (info?.type === "github") {
|
||||||
return `${info.repoLink}/blob/HEAD/${filePath}`;
|
return `${info.repoLink}/blob/${branch}/${filePath}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (info?.type === "gitlab") {
|
if (info?.type === "gitlab") {
|
||||||
return `${info.repoLink}/-/blob/HEAD/${filePath}`;
|
return `${info.repoLink}/-/blob/${branch}/${filePath}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (info?.type === "gitea") {
|
if (info?.type === "gitea") {
|
||||||
return `${info.repoLink}/src/branch/HEAD/${filePath}`;
|
return `${info.repoLink}/src/branch/${branch}/${filePath}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
return undefined;
|
return undefined;
|
||||||
|
|
|
||||||
|
|
@ -74,7 +74,10 @@ const config = {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
plugins: [require("tailwindcss-animate")],
|
plugins: [
|
||||||
|
require("tailwindcss-animate"),
|
||||||
|
require('tailwind-scrollbar-hide')
|
||||||
|
],
|
||||||
} satisfies Config
|
} satisfies Config
|
||||||
|
|
||||||
export default config
|
export default config
|
||||||
|
|
@ -24,6 +24,47 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
"GitRevisions": {
|
||||||
|
"type": "object",
|
||||||
|
"description": "The revisions (branches, tags) that should be included when indexing. The default branch (HEAD) is always indexed.",
|
||||||
|
"properties": {
|
||||||
|
"branches": {
|
||||||
|
"type": "array",
|
||||||
|
"description": "List of branches to include when indexing. For a given repo, only the branches that exist on the repo's remote *and* match at least one of the provided `branches` will be indexed. The default branch (HEAD) is always indexed. Glob patterns are supported.",
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
[
|
||||||
|
"main",
|
||||||
|
"release/*"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"**"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"default": []
|
||||||
|
},
|
||||||
|
"tags": {
|
||||||
|
"type": "array",
|
||||||
|
"description": "List of tags to include when indexing. For a given repo, only the tags that exist on the repo's remote *and* match at least one of the provided `tags` will be indexed. Glob patterns are supported.",
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"examples": [
|
||||||
|
[
|
||||||
|
"latest",
|
||||||
|
"v2.*.*"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"**"
|
||||||
|
]
|
||||||
|
],
|
||||||
|
"default": []
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"additionalProperties": false
|
||||||
|
},
|
||||||
"GitHubConfig": {
|
"GitHubConfig": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
|
@ -113,6 +154,9 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"additionalProperties": false
|
"additionalProperties": false
|
||||||
|
},
|
||||||
|
"revisions": {
|
||||||
|
"$ref": "#/definitions/GitRevisions"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": [
|
||||||
|
|
@ -207,6 +251,9 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"additionalProperties": false
|
"additionalProperties": false
|
||||||
|
},
|
||||||
|
"revisions": {
|
||||||
|
"$ref": "#/definitions/GitRevisions"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": [
|
||||||
|
|
@ -297,6 +344,9 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"additionalProperties": false
|
"additionalProperties": false
|
||||||
|
},
|
||||||
|
"revisions": {
|
||||||
|
"$ref": "#/definitions/GitRevisions"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"required": [
|
"required": [
|
||||||
|
|
|
||||||
19
yarn.lock
19
yarn.lock
|
|
@ -1327,6 +1327,11 @@
|
||||||
resolved "https://registry.yarnpkg.com/@types/argparse/-/argparse-2.0.16.tgz#3bb7ccd2844b3a8bcd6efbd217f6c0ea06a80d22"
|
resolved "https://registry.yarnpkg.com/@types/argparse/-/argparse-2.0.16.tgz#3bb7ccd2844b3a8bcd6efbd217f6c0ea06a80d22"
|
||||||
integrity sha512-aMqBra2JlqpFeCWOinCtpRpiCkPIXH8hahW2+FkGzvWjfE5sAqtOcrjN5DRcMnTQqFDe6gb1CVYuGnBH0lhXwA==
|
integrity sha512-aMqBra2JlqpFeCWOinCtpRpiCkPIXH8hahW2+FkGzvWjfE5sAqtOcrjN5DRcMnTQqFDe6gb1CVYuGnBH0lhXwA==
|
||||||
|
|
||||||
|
"@types/braces@*":
|
||||||
|
version "3.0.4"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/braces/-/braces-3.0.4.tgz#403488dc1c8d0db288270d3bbf0ce5f9c45678b4"
|
||||||
|
integrity sha512-0WR3b8eaISjEW7RpZnclONaLFDf7buaowRHdqLp4vLj54AsSAYWfh3DRbfiYJY9XDxMgx1B4sE1Afw2PGpuHOA==
|
||||||
|
|
||||||
"@types/json-schema@^7.0.15":
|
"@types/json-schema@^7.0.15":
|
||||||
version "7.0.15"
|
version "7.0.15"
|
||||||
resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.15.tgz#596a1747233694d50f6ad8a7869fcb6f56cf5841"
|
resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.15.tgz#596a1747233694d50f6ad8a7869fcb6f56cf5841"
|
||||||
|
|
@ -1342,6 +1347,13 @@
|
||||||
resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.17.10.tgz#64f3edf656af2fe59e7278b73d3e62404144a6e6"
|
resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.17.10.tgz#64f3edf656af2fe59e7278b73d3e62404144a6e6"
|
||||||
integrity sha512-YpS0zzoduEhuOWjAotS6A5AVCva7X4lVlYLF0FYHAY9sdraBfnatttHItlWeZdGhuEkf+OzMNg2ZYAx8t+52uQ==
|
integrity sha512-YpS0zzoduEhuOWjAotS6A5AVCva7X4lVlYLF0FYHAY9sdraBfnatttHItlWeZdGhuEkf+OzMNg2ZYAx8t+52uQ==
|
||||||
|
|
||||||
|
"@types/micromatch@^4.0.9":
|
||||||
|
version "4.0.9"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/micromatch/-/micromatch-4.0.9.tgz#8e5763a8c1fc7fbf26144d9215a01ab0ff702dbb"
|
||||||
|
integrity sha512-7V+8ncr22h4UoYRLnLXSpTxjQrNUXtWHGeMPRJt1nULXI57G9bIcpyrHlmrQ7QK24EyyuXvYcSSWAM8GA9nqCg==
|
||||||
|
dependencies:
|
||||||
|
"@types/braces" "*"
|
||||||
|
|
||||||
"@types/node@^20":
|
"@types/node@^20":
|
||||||
version "20.16.10"
|
version "20.16.10"
|
||||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-20.16.10.tgz#0cc3fdd3daf114a4776f54ba19726a01c907ef71"
|
resolved "https://registry.yarnpkg.com/@types/node/-/node-20.16.10.tgz#0cc3fdd3daf114a4776f54ba19726a01c907ef71"
|
||||||
|
|
@ -3444,7 +3456,7 @@ merge2@^1.3.0, merge2@^1.4.1:
|
||||||
resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae"
|
resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae"
|
||||||
integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==
|
integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==
|
||||||
|
|
||||||
micromatch@^4.0.4, micromatch@^4.0.5:
|
micromatch@^4.0.4, micromatch@^4.0.5, micromatch@^4.0.8:
|
||||||
version "4.0.8"
|
version "4.0.8"
|
||||||
resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.8.tgz#d66fa18f3a47076789320b9b1af32bd86d9fa202"
|
resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.8.tgz#d66fa18f3a47076789320b9b1af32bd86d9fa202"
|
||||||
integrity sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==
|
integrity sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==
|
||||||
|
|
@ -4544,6 +4556,11 @@ tailwind-merge@^2.5.2:
|
||||||
resolved "https://registry.yarnpkg.com/tailwind-merge/-/tailwind-merge-2.5.3.tgz#579546e14ddda24462e0303acd8798c50f5511bb"
|
resolved "https://registry.yarnpkg.com/tailwind-merge/-/tailwind-merge-2.5.3.tgz#579546e14ddda24462e0303acd8798c50f5511bb"
|
||||||
integrity sha512-d9ZolCAIzom1nf/5p4LdD5zvjmgSxY0BGgdSvmXIoMYAiPdAW/dSpP7joCDYFY7r/HkEa2qmPtkgsu0xjQeQtw==
|
integrity sha512-d9ZolCAIzom1nf/5p4LdD5zvjmgSxY0BGgdSvmXIoMYAiPdAW/dSpP7joCDYFY7r/HkEa2qmPtkgsu0xjQeQtw==
|
||||||
|
|
||||||
|
tailwind-scrollbar-hide@^1.1.7:
|
||||||
|
version "1.1.7"
|
||||||
|
resolved "https://registry.yarnpkg.com/tailwind-scrollbar-hide/-/tailwind-scrollbar-hide-1.1.7.tgz#90b481fb2e204030e3919427416650c54f56f847"
|
||||||
|
integrity sha512-X324n9OtpTmOMqEgDUEA/RgLrNfBF/jwJdctaPZDzB3mppxJk7TLIDmOreEDm1Bq4R9LSPu4Epf8VSdovNU+iA==
|
||||||
|
|
||||||
tailwindcss-animate@^1.0.7:
|
tailwindcss-animate@^1.0.7:
|
||||||
version "1.0.7"
|
version "1.0.7"
|
||||||
resolved "https://registry.yarnpkg.com/tailwindcss-animate/-/tailwindcss-animate-1.0.7.tgz#318b692c4c42676cc9e67b19b78775742388bef4"
|
resolved "https://registry.yarnpkg.com/tailwindcss-animate/-/tailwindcss-animate-1.0.7.tgz#318b692c4c42676cc9e67b19b78775742388bef4"
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue