Compare commits

...

49 commits
v4.9.0 ... main

Author SHA1 Message Date
msukkari
095474a901 update perm syncing docs
Some checks are pending
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Waiting to run
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Waiting to run
Publish to ghcr / merge (push) Blocked by required conditions
Update Roadmap Released / update (push) Waiting to run
2025-12-11 06:46:20 -08:00
Brendan Kellam
d63f3cf9d9
chore(web): Improve error messages for file loading errors (#665)
Some checks failed
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Has been cancelled
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Has been cancelled
Update Roadmap Released / update (push) Has been cancelled
Publish to ghcr / merge (push) Has been cancelled
2025-12-05 11:58:19 -08:00
Cade 🐀
3d85a0595c
fix: add support for anyuid to Dockerfile (#658)
Some checks are pending
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Waiting to run
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Waiting to run
Publish to ghcr / merge (push) Blocked by required conditions
Update Roadmap Released / update (push) Waiting to run
* fix: add support for anyuid to Dockerfile

* changelog

---------

Co-authored-by: Cade Schlaefli <cade.schlaefli@mouser.com>
Co-authored-by: Brendan Kellam <bshizzle1234@gmail.com>
2025-12-04 22:29:23 -08:00
Brian Phillips
84cf524d84
Add GHES support to the review agent (#611)
* add support for GHES to the review agent

* fix throttling types

---------

Co-authored-by: Brendan Kellam <bshizzle1234@gmail.com>
2025-12-04 22:08:24 -08:00
bkellam
7c72578765 sourcebot v4.10.2
Some checks are pending
Update Roadmap Released / update (push) Waiting to run
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Waiting to run
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Waiting to run
Publish to ghcr / merge (push) Blocked by required conditions
2025-12-04 10:41:41 -08:00
Brendan Kellam
483b433aab
fix(web): Respect disable telemetry flag for web server side events (#657)
* fix

* changelog
2025-12-04 10:32:32 -08:00
Brendan Kellam
bcca1d6d7d
chore(web): Fix mistake of upgrading to a breaking version of next (#656)
Some checks are pending
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Waiting to run
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Waiting to run
Publish to ghcr / merge (push) Blocked by required conditions
Update Roadmap Released / update (push) Waiting to run
2025-12-03 17:12:10 -08:00
bkellam
0e88eecc30 release @sourcebot/mcp v1.0.11
Some checks are pending
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Waiting to run
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Waiting to run
Publish to ghcr / merge (push) Blocked by required conditions
Update Roadmap Released / update (push) Waiting to run
2025-12-03 16:10:51 -08:00
bkellam
a4685e34ab sourcebot v4.10.1 2025-12-03 16:05:53 -08:00
Brendan Kellam
76dc2f5a12
chore(web): Server side search telemetry (#652) 2025-12-03 16:04:36 -08:00
Brendan Kellam
7fc068f8b2
fix(web): Fix CVE 2025-55182 (#654) 2025-12-03 15:59:43 -08:00
bkellam
91caf129ed chore: add default PostHog token in env.server.ts for development scenarios
Some checks failed
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Has been cancelled
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Has been cancelled
Update Roadmap Released / update (push) Has been cancelled
Publish to ghcr / merge (push) Has been cancelled
2025-12-01 20:18:23 -08:00
Brendan Kellam
92578881df
chore(web): Scope code nav to current repository by default (#647)
Some checks are pending
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Waiting to run
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Waiting to run
Publish to ghcr / merge (push) Blocked by required conditions
Update Roadmap Released / update (push) Waiting to run
2025-11-30 18:53:09 -08:00
Brendan Kellam
28986f4355
chore(web): Bake PostHog token into build 2025-11-30 18:29:01 -08:00
Adam
41a6eb48a0
Shrink Docker image size by ~1/3 by removing unnecessary ops (#642)
Some checks failed
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Has been cancelled
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Has been cancelled
Update Roadmap Released / update (push) Has been cancelled
Publish to ghcr / merge (push) Has been cancelled
* Remove duplicate copy, chown on copy

* Add Dockerfile syntax

* Revert entrypoint changes to avoid errors in some non-root cases
2025-11-29 12:43:12 -08:00
Brendan Kellam
92ae76168c
fix(web): Fix issue where creating a new Ask thread would result in a 404 (#641)
Some checks are pending
Publish to ghcr / merge (push) Blocked by required conditions
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Waiting to run
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Waiting to run
Update Roadmap Released / update (push) Waiting to run
2025-11-28 23:01:33 -08:00
Brendan Kellam
f1dd16be82
fix(web): Ask sourcebot perf improvements (#632)
Some checks failed
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Has been cancelled
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Has been cancelled
Update Roadmap Released / update (push) Has been cancelled
Publish to ghcr / merge (push) Has been cancelled
2025-11-27 16:56:11 -08:00
Brendan Kellam
cc2837b740
fix(web): Fix error when loading files with special characters (#637) 2025-11-27 14:24:45 -08:00
Brendan Kellam
0633d1f23c
fix discord link (#634)
Some checks are pending
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Waiting to run
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Waiting to run
Publish to ghcr / merge (push) Blocked by required conditions
Update Roadmap Released / update (push) Waiting to run
2025-11-26 13:56:00 -08:00
Brendan Kellam
8bc4f1e520
feat(worker): Add ALWAYS_INDEX_FILE_PATTERNS env var to specify files that should always be indexed (#631)
Some checks are pending
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Waiting to run
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Waiting to run
Publish to ghcr / merge (push) Blocked by required conditions
Update Roadmap Released / update (push) Waiting to run
2025-11-25 23:38:30 -08:00
Brendan Kellam
c962fdd636
fix(web): Fix issue where quotes cannot be used within a query (#629)
Some checks are pending
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Waiting to run
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Waiting to run
Publish to ghcr / merge (push) Blocked by required conditions
Update Roadmap Released / update (push) Waiting to run
2025-11-25 12:06:51 -08:00
bkellam
8e036a340f @sourcebot/mcp v1.0.10
Some checks are pending
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Waiting to run
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Waiting to run
Publish to ghcr / merge (push) Blocked by required conditions
Update Roadmap Released / update (push) Waiting to run
2025-11-24 14:29:42 -08:00
bkellam
fb305c2808 sourcebot v4.10.0 2025-11-24 13:44:12 -08:00
Brendan Kellam
c671e96139
feat(web): Add support for authentik sso (#627) 2025-11-24 13:28:04 -08:00
Brendan Kellam
f3a8fa3dab
feat(web): Streamed code search (#623)
Some checks failed
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Has been cancelled
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Has been cancelled
Update Roadmap Released / update (push) Has been cancelled
Publish to ghcr / merge (push) Has been cancelled
* generate protobuf types

* stream poc over SSE

* wip: make stream search api follow existing schema. Modify UI to support streaming

* fix scrolling issue

* Dockerfile

* wip on lezer parser grammar for query language

* add lezer tree -> grpc transformer

* remove spammy log message

* fix syntax highlighting by adding a module resolution for @lezer/common

* further wip on query language

* Add case sensitivity and regexp toggles

* Improved type safety / cleanup for query lang

* support search contexts

* update Dockerfile with query langauge package

* fix filter

* Add skeletons to filter panel when search is streaming

* add client side caching

* improved cancelation handling

* add isSearchExausted flag for flagging when a search captured all results

* Add back posthog search_finished event

* remove zoekt tenant enforcement

* migrate blocking search over to grpc. Centralize everything in searchApi

* branch handling

* plumb file weburl

* add repo_sets filter for repositories a user has access to

* refactor a bunch of stuff + add support for passing in Query IR to search api

* refactor

* dev README

* wip on better error handling

* error handling for stream path

* update mcp

* changelog wip

* type fix

* style

* Support rev:* wildcard

* changelog

* changelog nit

* feedback

* fix build

* update docs and remove uneeded test file
2025-11-22 15:33:31 -08:00
Brendan Kellam
09507d3e89
fix(worker): Permission syncer fixes (#624)
Some checks failed
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Has been cancelled
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Has been cancelled
Update Roadmap Released / update (push) Has been cancelled
Publish to ghcr / merge (push) Has been cancelled
2025-11-19 22:14:23 -08:00
Brendan Kellam
97dd54d48f
chore(web): Add count to members / requests / invites tabs in settings (#621)
Some checks failed
Update Roadmap Released / update (push) Has been cancelled
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Has been cancelled
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Has been cancelled
Publish to ghcr / merge (push) Has been cancelled
2025-11-18 10:41:40 -08:00
bkellam
831197980c release @sourcebot/mcp v1.0.9
Some checks are pending
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Waiting to run
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Waiting to run
Publish to ghcr / merge (push) Blocked by required conditions
Update Roadmap Released / update (push) Waiting to run
2025-11-17 17:11:03 -08:00
Teddy Reinert
9bee8c2c59
feat(mcp): Add pagination and filtering to list_repos tool (#614)
* feat(mcp): Add pagination and filtering to list_repos tool

Fixes #566

  - Add query parameter to filter repositories by name
  - Add pageNumber and limit parameters for pagination
  - Include pagination info in response when applicable
  - Add listReposRequestSchema for request validation
  - Update README with new list_repos parameters

* feat(mcp): Sort repositories alphabetically for consistent pagination

Fixes #566
- Updated CHANGELOG.md with pagination and filtering changes

---------

Co-authored-by: Brendan Kellam <bshizzle1234@gmail.com>
2025-11-17 17:08:20 -08:00
Jose Hernandez
e20d514569
feat(bitbucket): support glob patterns in repository exclusions (#620)
Some checks are pending
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Waiting to run
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Waiting to run
Publish to ghcr / merge (push) Blocked by required conditions
Update Roadmap Released / update (push) Waiting to run
* feat(bitbucket): support glob patterns in repository exclusions

Update Bitbucket Cloud and Server exclusion logic to support glob
patterns (e.g., "org/repo*") in the exclude.repos configuration,
matching the documented behavior and aligning with other providers
(GitHub, GitLab, Gitea, Azure DevOps).

Changes:
- Add micromatch import for pattern matching
- Replace Array.includes() with micromatch.isMatch() in
  cloudShouldExcludeRepo and serverShouldExcludeRepo functions
- Add reason logging for exclusion decisions to match GitHub's pattern

This enables users to exclude repositories using wildcard patterns
as documented in the Bitbucket Cloud connection documentation.

* update changelog

---------

Co-authored-by: Jose Hernandez <jose.hernandez@emilabs.ai>
Co-authored-by: bkellam <bshizzle1234@gmail.com>
2025-11-17 14:33:39 -08:00
Michael Sukkarieh
1dff20d47a
fix(ee): Wipe search contexts on init if we no longer have the entitlement (#618)
Some checks failed
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Has been cancelled
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Has been cancelled
Update Roadmap Released / update (push) Has been cancelled
Publish to ghcr / merge (push) Has been cancelled
2025-11-13 21:29:51 -08:00
Brendan Kellam
fbe1073d0e
fix(web): Fix loading issues with references / definitions list (#617) 2025-11-13 17:21:48 -08:00
bkellam
341836a2ed sourcebot v4.9.2
Some checks are pending
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Waiting to run
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Waiting to run
Publish to ghcr / merge (push) Blocked by required conditions
Update Roadmap Released / update (push) Waiting to run
2025-11-13 00:36:47 -08:00
Brendan Kellam
2e959b7d59
feat(web): Add env var to configure default max match count (#616) 2025-11-13 00:06:23 -08:00
Brendan Kellam
a814bd6f7e
fix(web): Search performance improvements (#615) 2025-11-12 23:20:26 -08:00
Brendan Kellam
06c84f0bf5
fix(worker): Fix issue where connections would always sync on startup (#613)
Some checks failed
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Has been cancelled
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Has been cancelled
Update Roadmap Released / update (push) Has been cancelled
Publish to ghcr / merge (push) Has been cancelled
2025-11-11 20:31:08 -08:00
Brendan Kellam
903d15a2c5
fix(worker): Fix issues with gracefully shutting down (#612) 2025-11-11 20:11:59 -08:00
Brendan Kellam
18fad64baa
feat(web): Add force resync buttons for repo & connections (#610) 2025-11-11 15:16:40 -08:00
bkellam
2dfafdae41 release @sourcebot/mcp v1.0.8
Some checks are pending
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Waiting to run
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Waiting to run
Publish to ghcr / merge (push) Blocked by required conditions
Update Roadmap Released / update (push) Waiting to run
2025-11-10 15:32:32 -08:00
Wayne Sun
278c0dc556
fix: return truncated content when token limit exceeded in MCP search_code (#604)
When search results exceed maxTokens limit, now returns partial truncated
content instead of discarding the file completely.

Changes:
- Calculate remaining token budget before breaking
- Truncate file content to fit within remaining tokens (if > 100 tokens left)
- Append truncation marker to indicate content was cut off
- Still add truncation message at end of all results

Benefits:
- Users get partial data instead of nothing
- Better debugging and analysis experience
- More useful for AI-powered code analysis tasks
- Consistent with expected behavior when limits are reached

Example: If file would use 10K tokens but only 2K remain, return
first ~8K chars of content + truncation marker instead of dropping it.

Signed-off-by: Wayne Sun <gsun@redhat.com>
2025-11-10 15:23:56 -08:00
Brendan Kellam
6f64d5bb8d
fix(worker): Run setInterval as blocking (#607)
Some checks are pending
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Waiting to run
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Waiting to run
Publish to ghcr / merge (push) Blocked by required conditions
Update Roadmap Released / update (push) Waiting to run
2025-11-09 14:49:24 -08:00
Brendan Kellam
1be6e8842e
fix(worker): properly shutdown PostHog client (#609) 2025-11-09 14:30:01 -08:00
Arman K.
f04ecab3ad
Update README.md (#608) 2025-11-09 13:28:28 -08:00
bkellam
d63da4b2c0 sourcebot v4.9.1
Some checks failed
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Has been cancelled
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Has been cancelled
Update Roadmap Released / update (push) Has been cancelled
Publish to ghcr / merge (push) Has been cancelled
2025-11-07 10:39:57 -08:00
Brendan Kellam
825cef9da4
feat(deployment): Basic docker-compose file (#480) 2025-11-07 10:38:24 -08:00
Brendan Kellam
dd5cf61977
fix discord links (#606) 2025-11-07 10:05:05 -08:00
Furbreeze
5f5690ec49
adding contribution step for generating database schema (#602)
Some checks failed
Update Roadmap Released / update (push) Has been cancelled
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Has been cancelled
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Has been cancelled
Publish to ghcr / merge (push) Has been cancelled
2025-11-06 10:50:41 -08:00
bkellam
720f2e4f4b update changelog for https://github.com/sourcebot-dev/sourcebot/pull/599
Some checks are pending
Publish to ghcr / build (linux/amd64, blacksmith-4vcpu-ubuntu-2404) (push) Waiting to run
Publish to ghcr / build (linux/arm64, blacksmith-8vcpu-ubuntu-2204-arm) (push) Waiting to run
Publish to ghcr / merge (push) Blocked by required conditions
Update Roadmap Released / update (push) Waiting to run
2025-11-05 22:29:15 -08:00
Brendan Kellam
612ecff93a
feat: Support running Docker container as non-root (#599) 2025-11-05 22:24:46 -08:00
242 changed files with 10551 additions and 2829 deletions

View file

@ -6,8 +6,6 @@ DATABASE_URL="postgresql://postgres:postgres@localhost:5432/postgres"
ZOEKT_WEBSERVER_URL="http://localhost:6070" ZOEKT_WEBSERVER_URL="http://localhost:6070"
# The command to use for generating ctags. # The command to use for generating ctags.
CTAGS_COMMAND=ctags CTAGS_COMMAND=ctags
# logging, strict
SRC_TENANT_ENFORCEMENT_MODE=strict
# Auth.JS # Auth.JS
# You can generate a new secret with: # You can generate a new secret with:
@ -23,7 +21,7 @@ AUTH_URL="http://localhost:3000"
DATA_CACHE_DIR=${PWD}/.sourcebot # Path to the sourcebot cache dir (ex. ~/sourcebot/.sourcebot) DATA_CACHE_DIR=${PWD}/.sourcebot # Path to the sourcebot cache dir (ex. ~/sourcebot/.sourcebot)
SOURCEBOT_PUBLIC_KEY_PATH=${PWD}/public.pem SOURCEBOT_PUBLIC_KEY_PATH=${PWD}/public.pem
# CONFIG_PATH=${PWD}/config.json # Path to the sourcebot config file (if one exists) CONFIG_PATH=${PWD}/config.json # Path to the sourcebot config file (if one exists)
# Email # Email
# EMAIL_FROM_ADDRESS="" # The from address for transactional emails. # EMAIL_FROM_ADDRESS="" # The from address for transactional emails.
@ -31,7 +29,6 @@ SOURCEBOT_PUBLIC_KEY_PATH=${PWD}/public.pem
# PostHog # PostHog
# POSTHOG_PAPIK="" # POSTHOG_PAPIK=""
# NEXT_PUBLIC_POSTHOG_PAPIK=""
# Sentry # Sentry
# SENTRY_BACKEND_DSN="" # SENTRY_BACKEND_DSN=""

View file

@ -1,4 +1,4 @@
contact_links: contact_links:
- name: 👾 Discord - name: 👾 Discord
url: https://discord.gg/f4Cbf3HT url: https://discord.gg/HDScTs3ptP
about: Something else? Join the Discord! about: Something else? Join the Discord!

View file

@ -55,7 +55,6 @@ jobs:
${{ env.IMAGE_PATH }}:latest ${{ env.IMAGE_PATH }}:latest
build-args: | build-args: |
NEXT_PUBLIC_SOURCEBOT_VERSION=${{ github.ref_name }} NEXT_PUBLIC_SOURCEBOT_VERSION=${{ github.ref_name }}
NEXT_PUBLIC_POSTHOG_PAPIK=${{ vars.NEXT_PUBLIC_POSTHOG_PAPIK }}
NEXT_PUBLIC_SOURCEBOT_CLOUD_ENVIRONMENT=${{ vars.NEXT_PUBLIC_SOURCEBOT_CLOUD_ENVIRONMENT }} NEXT_PUBLIC_SOURCEBOT_CLOUD_ENVIRONMENT=${{ vars.NEXT_PUBLIC_SOURCEBOT_CLOUD_ENVIRONMENT }}
NEXT_PUBLIC_SENTRY_ENVIRONMENT=${{ vars.NEXT_PUBLIC_SENTRY_ENVIRONMENT }} NEXT_PUBLIC_SENTRY_ENVIRONMENT=${{ vars.NEXT_PUBLIC_SENTRY_ENVIRONMENT }}
NEXT_PUBLIC_SENTRY_WEBAPP_DSN=${{ vars.NEXT_PUBLIC_SENTRY_WEBAPP_DSN }} NEXT_PUBLIC_SENTRY_WEBAPP_DSN=${{ vars.NEXT_PUBLIC_SENTRY_WEBAPP_DSN }}

View file

@ -77,7 +77,6 @@ jobs:
outputs: type=image,name=${{ env.REGISTRY_IMAGE }},push-by-digest=true,name-canonical=true,push=true,annotation.org.opencontainers.image.description=Blazingly fast code search outputs: type=image,name=${{ env.REGISTRY_IMAGE }},push-by-digest=true,name-canonical=true,push=true,annotation.org.opencontainers.image.description=Blazingly fast code search
build-args: | build-args: |
NEXT_PUBLIC_SOURCEBOT_VERSION=${{ github.ref_name }} NEXT_PUBLIC_SOURCEBOT_VERSION=${{ github.ref_name }}
NEXT_PUBLIC_POSTHOG_PAPIK=${{ vars.NEXT_PUBLIC_POSTHOG_PAPIK }}
- name: Export digest - name: Export digest
run: | run: |

View file

@ -7,9 +7,86 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased] ## [Unreleased]
### Fixed
- Fixed review agent so that it works with GHES instances [#611](https://github.com/sourcebot-dev/sourcebot/pull/611)
### Added
- Added support for arbitrary user IDs required for OpenShift. [#658](https://github.com/sourcebot-dev/sourcebot/pull/658)
### Updated
- Improved error messages in file source api. [#665](https://github.com/sourcebot-dev/sourcebot/pull/665)
## [4.10.2] - 2025-12-04
### Fixed
- Fixed issue where the disable telemetry flag was not being respected for web server telemetry. [#657](https://github.com/sourcebot-dev/sourcebot/pull/657)
## [4.10.1] - 2025-12-03
### Added
- Added `ALWAYS_INDEX_FILE_PATTERNS` environment variable to allow specifying a comma seperated list of glob patterns matching file paths that should always be indexed, regardless of size or # of trigrams. [#631](https://github.com/sourcebot-dev/sourcebot/pull/631)
- Added button to explore menu to toggle cross-repository search. [#647](https://github.com/sourcebot-dev/sourcebot/pull/647)
- Added server side telemetry for search metrics. [#652](https://github.com/sourcebot-dev/sourcebot/pull/652)
### Fixed
- Fixed issue where single quotes could not be used in search queries. [#629](https://github.com/sourcebot-dev/sourcebot/pull/629)
- Fixed issue where files with special characters would fail to load. [#636](https://github.com/sourcebot-dev/sourcebot/issues/636)
- Fixed Ask performance issues. [#632](https://github.com/sourcebot-dev/sourcebot/pull/632)
- Fixed regression where creating a new Ask thread when unauthenticated would result in a 404. [#641](https://github.com/sourcebot-dev/sourcebot/pull/641)
- Updated react and next package versions to fix CVE 2025-55182. [#654](https://github.com/sourcebot-dev/sourcebot/pull/654)
### Changed
- Changed the default behaviour for code nav to scope references & definitions search to the current repository. [#647](https://github.com/sourcebot-dev/sourcebot/pull/647)
## [4.10.0] - 2025-11-24
### Added
- Added support for streaming code search results. [#623](https://github.com/sourcebot-dev/sourcebot/pull/623)
- Added buttons to toggle case sensitivity and regex patterns. [#623](https://github.com/sourcebot-dev/sourcebot/pull/623)
- Added counts to members, requets, and invites tabs in the members settings. [#621](https://github.com/sourcebot-dev/sourcebot/pull/621)
- [Sourcebot EE] Add support for Authentik as a identity provider. [#627](https://github.com/sourcebot-dev/sourcebot/pull/627)
### Changed
- Changed the default search behaviour to match patterns as substrings and **not** regular expressions. Regular expressions can be used by toggling the regex button in search bar. [#623](https://github.com/sourcebot-dev/sourcebot/pull/623)
- Renamed `public` query prefix to `visibility`. Allowed values for `visibility` are `public`, `private`, and `any`. [#623](https://github.com/sourcebot-dev/sourcebot/pull/623)
- Changed `archived` query prefix to accept values `yes`, `no`, and `only`. [#623](https://github.com/sourcebot-dev/sourcebot/pull/623)
### Removed
- Removed `case` query prefix. [#623](https://github.com/sourcebot-dev/sourcebot/pull/623)
- Removed `branch` and `b` query prefixes. Please use `rev:` instead. [#623](https://github.com/sourcebot-dev/sourcebot/pull/623)
- Removed `regex` query prefix. [#623](https://github.com/sourcebot-dev/sourcebot/pull/623)
### Fixed
- Fixed spurious infinite loads with explore panel, file tree, and file search command. [#617](https://github.com/sourcebot-dev/sourcebot/pull/617)
- Wipe search context on init if entitlement no longer exists [#618](https://github.com/sourcebot-dev/sourcebot/pull/618)
- Fixed Bitbucket repository exclusions not supporting glob patterns. [#620](https://github.com/sourcebot-dev/sourcebot/pull/620)
- Fixed issue where the repo driven permission syncer was attempting to sync public repositories. [#624](https://github.com/sourcebot-dev/sourcebot/pull/624)
- Fixed issue where worker would not shutdown while a permission sync job (repo or user) was in progress. [#624](https://github.com/sourcebot-dev/sourcebot/pull/624)
## [4.9.2] - 2025-11-13
### Changed
- Bumped the default requested search result count from 5k to 10k after optimization pass. [#615](https://github.com/sourcebot-dev/sourcebot/pull/615)
### Fixed
- Fixed incorrect shutdown of PostHog SDK in the worker. [#609](https://github.com/sourcebot-dev/sourcebot/pull/609)
- Fixed race condition in job schedulers. [#607](https://github.com/sourcebot-dev/sourcebot/pull/607)
- Fixed connection sync jobs getting stuck in pending or in progress after restarting the worker. [#612](https://github.com/sourcebot-dev/sourcebot/pull/612)
- Fixed issue where connections would always sync on startup, regardless if they changed or not. [#613](https://github.com/sourcebot-dev/sourcebot/pull/613)
- Fixed performance bottleneck in search api. Result is a order of magnitutde improvement to average search time according to benchmarks. [#615](https://github.com/sourcebot-dev/sourcebot/pull/615)
### Added
- Added force resync buttons for connections and repositories. [#610](https://github.com/sourcebot-dev/sourcebot/pull/610)
- Added environment variable to configure default search result count. [#616](https://github.com/sourcebot-dev/sourcebot/pull/616)
## [4.9.1] - 2025-11-07
### Added
- Added support for running Sourcebot as non-root user. [#599](https://github.com/sourcebot-dev/sourcebot/pull/599)
## [4.9.0] - 2025-11-04 ## [4.9.0] - 2025-11-04
## Added ### Added
- [Experimental][Sourcebot EE] Added GitLab permission syncing. [#585](https://github.com/sourcebot-dev/sourcebot/pull/585) - [Experimental][Sourcebot EE] Added GitLab permission syncing. [#585](https://github.com/sourcebot-dev/sourcebot/pull/585)
- [Sourcebot EE] Added external identity provider config and support for multiple accounts. [#595](https://github.com/sourcebot-dev/sourcebot/pull/595) - [Sourcebot EE] Added external identity provider config and support for multiple accounts. [#595](https://github.com/sourcebot-dev/sourcebot/pull/595)
- Added ability to configure environment variables from the config. [#597](https://github.com/sourcebot-dev/sourcebot/pull/597) - Added ability to configure environment variables from the config. [#597](https://github.com/sourcebot-dev/sourcebot/pull/597)

View file

@ -36,15 +36,20 @@
docker compose -f docker-compose-dev.yml up -d docker compose -f docker-compose-dev.yml up -d
``` ```
6. Create a copy of `.env.development` and name it `.env.development.local`. Update the required environment variables. 6. Generate the database schema.
```sh
yarn dev:prisma:migrate:dev
```
7. If you're using a declarative configuration file, create a configuration file and update the `CONFIG_PATH` environment variable in your `.env.development.local` file. 7. Create a copy of `.env.development` and name it `.env.development.local`. Update the required environment variables.
8. Start Sourcebot with the command: 8. If you're using a declarative configuration file, create a configuration file and update the `CONFIG_PATH` environment variable in your `.env.development.local` file.
9. Start Sourcebot with the command:
```sh ```sh
yarn dev yarn dev
``` ```
A `.sourcebot` directory will be created and zoekt will begin to index the repositories found in the `config.json` file. A `.sourcebot` directory will be created and zoekt will begin to index the repositories found in the `config.json` file.
9. Start searching at `http://localhost:3000`. 10. Start searching at `http://localhost:3000`.

View file

@ -1,3 +1,4 @@
# syntax=docker/dockerfile:1
# ------ Global scope variables ------ # ------ Global scope variables ------
# Set of global build arguments. # Set of global build arguments.
@ -8,11 +9,6 @@
# @see: https://docs.docker.com/build/building/variables/#scoping # @see: https://docs.docker.com/build/building/variables/#scoping
ARG NEXT_PUBLIC_SOURCEBOT_VERSION ARG NEXT_PUBLIC_SOURCEBOT_VERSION
# PAPIK = Project API Key
# Note that this key does not need to be kept secret, so it's not
# necessary to use Docker build secrets here.
# @see: https://posthog.com/tutorials/api-capture-events#authenticating-with-the-project-api-key
ARG NEXT_PUBLIC_POSTHOG_PAPIK
ARG NEXT_PUBLIC_SENTRY_ENVIRONMENT ARG NEXT_PUBLIC_SENTRY_ENVIRONMENT
ARG NEXT_PUBLIC_SOURCEBOT_CLOUD_ENVIRONMENT ARG NEXT_PUBLIC_SOURCEBOT_CLOUD_ENVIRONMENT
ARG NEXT_PUBLIC_SENTRY_WEBAPP_DSN ARG NEXT_PUBLIC_SENTRY_WEBAPP_DSN
@ -43,10 +39,12 @@ COPY .yarn ./.yarn
COPY ./packages/db ./packages/db COPY ./packages/db ./packages/db
COPY ./packages/schemas ./packages/schemas COPY ./packages/schemas ./packages/schemas
COPY ./packages/shared ./packages/shared COPY ./packages/shared ./packages/shared
COPY ./packages/queryLanguage ./packages/queryLanguage
RUN yarn workspace @sourcebot/db install RUN yarn workspace @sourcebot/db install
RUN yarn workspace @sourcebot/schemas install RUN yarn workspace @sourcebot/schemas install
RUN yarn workspace @sourcebot/shared install RUN yarn workspace @sourcebot/shared install
RUN yarn workspace @sourcebot/query-language install
# ------------------------------------ # ------------------------------------
# ------ Build Web ------ # ------ Build Web ------
@ -55,8 +53,6 @@ ENV SKIP_ENV_VALIDATION=1
# ----------- # -----------
ARG NEXT_PUBLIC_SOURCEBOT_VERSION ARG NEXT_PUBLIC_SOURCEBOT_VERSION
ENV NEXT_PUBLIC_SOURCEBOT_VERSION=$NEXT_PUBLIC_SOURCEBOT_VERSION ENV NEXT_PUBLIC_SOURCEBOT_VERSION=$NEXT_PUBLIC_SOURCEBOT_VERSION
ARG NEXT_PUBLIC_POSTHOG_PAPIK
ENV NEXT_PUBLIC_POSTHOG_PAPIK=$NEXT_PUBLIC_POSTHOG_PAPIK
ARG NEXT_PUBLIC_SENTRY_ENVIRONMENT ARG NEXT_PUBLIC_SENTRY_ENVIRONMENT
ENV NEXT_PUBLIC_SENTRY_ENVIRONMENT=$NEXT_PUBLIC_SENTRY_ENVIRONMENT ENV NEXT_PUBLIC_SENTRY_ENVIRONMENT=$NEXT_PUBLIC_SENTRY_ENVIRONMENT
ARG NEXT_PUBLIC_SOURCEBOT_CLOUD_ENVIRONMENT ARG NEXT_PUBLIC_SOURCEBOT_CLOUD_ENVIRONMENT
@ -92,6 +88,7 @@ COPY --from=shared-libs-builder /app/node_modules ./node_modules
COPY --from=shared-libs-builder /app/packages/db ./packages/db COPY --from=shared-libs-builder /app/packages/db ./packages/db
COPY --from=shared-libs-builder /app/packages/schemas ./packages/schemas COPY --from=shared-libs-builder /app/packages/schemas ./packages/schemas
COPY --from=shared-libs-builder /app/packages/shared ./packages/shared COPY --from=shared-libs-builder /app/packages/shared ./packages/shared
COPY --from=shared-libs-builder /app/packages/queryLanguage ./packages/queryLanguage
# Fixes arm64 timeouts # Fixes arm64 timeouts
RUN yarn workspace @sourcebot/web install RUN yarn workspace @sourcebot/web install
@ -130,6 +127,7 @@ COPY --from=shared-libs-builder /app/node_modules ./node_modules
COPY --from=shared-libs-builder /app/packages/db ./packages/db COPY --from=shared-libs-builder /app/packages/db ./packages/db
COPY --from=shared-libs-builder /app/packages/schemas ./packages/schemas COPY --from=shared-libs-builder /app/packages/schemas ./packages/schemas
COPY --from=shared-libs-builder /app/packages/shared ./packages/shared COPY --from=shared-libs-builder /app/packages/shared ./packages/shared
COPY --from=shared-libs-builder /app/packages/queryLanguage ./packages/queryLanguage
RUN yarn workspace @sourcebot/backend install RUN yarn workspace @sourcebot/backend install
RUN yarn workspace @sourcebot/backend build RUN yarn workspace @sourcebot/backend build
@ -150,8 +148,6 @@ FROM node-alpine AS runner
# ----------- # -----------
ARG NEXT_PUBLIC_SOURCEBOT_VERSION ARG NEXT_PUBLIC_SOURCEBOT_VERSION
ENV NEXT_PUBLIC_SOURCEBOT_VERSION=$NEXT_PUBLIC_SOURCEBOT_VERSION ENV NEXT_PUBLIC_SOURCEBOT_VERSION=$NEXT_PUBLIC_SOURCEBOT_VERSION
ARG NEXT_PUBLIC_POSTHOG_PAPIK
ENV NEXT_PUBLIC_POSTHOG_PAPIK=$NEXT_PUBLIC_POSTHOG_PAPIK
ARG NEXT_PUBLIC_SENTRY_ENVIRONMENT ARG NEXT_PUBLIC_SENTRY_ENVIRONMENT
ENV NEXT_PUBLIC_SENTRY_ENVIRONMENT=$NEXT_PUBLIC_SENTRY_ENVIRONMENT ENV NEXT_PUBLIC_SENTRY_ENVIRONMENT=$NEXT_PUBLIC_SENTRY_ENVIRONMENT
ARG NEXT_PUBLIC_SENTRY_WEBAPP_DSN ARG NEXT_PUBLIC_SENTRY_WEBAPP_DSN
@ -173,8 +169,13 @@ ENV DATA_DIR=/data
ENV DATA_CACHE_DIR=$DATA_DIR/.sourcebot ENV DATA_CACHE_DIR=$DATA_DIR/.sourcebot
ENV DATABASE_DATA_DIR=$DATA_CACHE_DIR/db ENV DATABASE_DATA_DIR=$DATA_CACHE_DIR/db
ENV REDIS_DATA_DIR=$DATA_CACHE_DIR/redis ENV REDIS_DATA_DIR=$DATA_CACHE_DIR/redis
ENV SRC_TENANT_ENFORCEMENT_MODE=strict
ENV SOURCEBOT_PUBLIC_KEY_PATH=/app/public.pem ENV SOURCEBOT_PUBLIC_KEY_PATH=/app/public.pem
# PAPIK = Project API Key
# Note that this key does not need to be kept secret, so it's not
# necessary to use Docker build secrets here.
# @see: https://posthog.com/tutorials/api-capture-events#authenticating-with-the-project-api-key
# @note: this is also declared in the shared env.server.ts file.
ENV POSTHOG_PAPIK=phc_lLPuFFi5LH6c94eFJcqvYVFwiJffVcV6HD8U4a1OnRW
# Valid values are: debug, info, warn, error # Valid values are: debug, info, warn, error
ENV SOURCEBOT_LOG_LEVEL=info ENV SOURCEBOT_LOG_LEVEL=info
@ -182,6 +183,23 @@ ENV SOURCEBOT_LOG_LEVEL=info
# Sourcebot collects anonymous usage data using [PostHog](https://posthog.com/). Uncomment this line to disable. # Sourcebot collects anonymous usage data using [PostHog](https://posthog.com/). Uncomment this line to disable.
# ENV SOURCEBOT_TELEMETRY_DISABLED=1 # ENV SOURCEBOT_TELEMETRY_DISABLED=1
# Configure dependencies
RUN apk add --no-cache git ca-certificates bind-tools tini jansson wget supervisor uuidgen curl perl jq redis postgresql postgresql-contrib openssl util-linux unzip
ARG UID=1500
ARG GID=1500
# Always create the non-root user to support runtime user switching
# The container can be run as root (default) or as sourcebot user using docker run --user
RUN addgroup -g $GID sourcebot && \
adduser -D -u $UID -h /app -S sourcebot && \
adduser sourcebot postgres && \
adduser sourcebot redis && \
chown -R sourcebot /app && \
adduser sourcebot node && \
mkdir /var/log/sourcebot && \
chown sourcebot /var/log/sourcebot
COPY package.json yarn.lock* .yarnrc.yml public.pem ./ COPY package.json yarn.lock* .yarnrc.yml public.pem ./
COPY .yarn ./.yarn COPY .yarn ./.yarn
@ -201,21 +219,23 @@ COPY --from=zoekt-builder \
/cmd/zoekt-index \ /cmd/zoekt-index \
/usr/local/bin/ /usr/local/bin/
RUN chown -R sourcebot:sourcebot /app
# Copy zoekt proto files (needed for gRPC client at runtime)
COPY --chown=sourcebot:sourcebot vendor/zoekt/grpc/protos /app/vendor/zoekt/grpc/protos
# Copy all of the things # Copy all of the things
COPY --from=web-builder /app/packages/web/public ./packages/web/public COPY --chown=sourcebot:sourcebot --from=web-builder /app/packages/web/public ./packages/web/public
COPY --from=web-builder /app/packages/web/.next/standalone ./ COPY --chown=sourcebot:sourcebot --from=web-builder /app/packages/web/.next/standalone ./
COPY --from=web-builder /app/packages/web/.next/static ./packages/web/.next/static COPY --chown=sourcebot:sourcebot --from=web-builder /app/packages/web/.next/static ./packages/web/.next/static
COPY --from=backend-builder /app/node_modules ./node_modules COPY --chown=sourcebot:sourcebot --from=backend-builder /app/node_modules ./node_modules
COPY --from=backend-builder /app/packages/backend ./packages/backend COPY --chown=sourcebot:sourcebot --from=backend-builder /app/packages/backend ./packages/backend
COPY --from=shared-libs-builder /app/node_modules ./node_modules COPY --chown=sourcebot:sourcebot --from=shared-libs-builder /app/packages/db ./packages/db
COPY --from=shared-libs-builder /app/packages/db ./packages/db COPY --chown=sourcebot:sourcebot --from=shared-libs-builder /app/packages/schemas ./packages/schemas
COPY --from=shared-libs-builder /app/packages/schemas ./packages/schemas COPY --chown=sourcebot:sourcebot --from=shared-libs-builder /app/packages/shared ./packages/shared
COPY --from=shared-libs-builder /app/packages/shared ./packages/shared COPY --chown=sourcebot:sourcebot --from=shared-libs-builder /app/packages/queryLanguage ./packages/queryLanguage
# Configure dependencies
RUN apk add --no-cache git ca-certificates bind-tools tini jansson wget supervisor uuidgen curl perl jq redis postgresql postgresql-contrib openssl util-linux unzip
# Fixes git "dubious ownership" issues when the volume is mounted with different permissions to the container. # Fixes git "dubious ownership" issues when the volume is mounted with different permissions to the container.
RUN git config --global safe.directory "*" RUN git config --global safe.directory "*"
@ -225,12 +245,23 @@ RUN mkdir -p /run/postgresql && \
chown -R postgres:postgres /run/postgresql && \ chown -R postgres:postgres /run/postgresql && \
chmod 775 /run/postgresql chmod 775 /run/postgresql
# Make app directory accessible to both root and sourcebot user
RUN chown -R sourcebot /app \
&& chgrp -R 0 /app \
&& chmod -R g=u /app
# Make data directory accessible to both root and sourcebot user
RUN chown -R sourcebot /data
COPY supervisord.conf /etc/supervisor/conf.d/supervisord.conf COPY supervisord.conf /etc/supervisor/conf.d/supervisord.conf
COPY prefix-output.sh ./prefix-output.sh COPY prefix-output.sh ./prefix-output.sh
RUN chmod +x ./prefix-output.sh RUN chmod +x ./prefix-output.sh
COPY entrypoint.sh ./entrypoint.sh COPY entrypoint.sh ./entrypoint.sh
RUN chmod +x ./entrypoint.sh RUN chmod +x ./entrypoint.sh
# Note: for back-compat cases, we do _not_ set the USER directive here.
# Instead, the user can be overridden at runtime with --user flag.
# USER sourcebot
EXPOSE 3000 EXPOSE 3000
ENV PORT=3000 ENV PORT=3000
ENV HOSTNAME="0.0.0.0" ENV HOSTNAME="0.0.0.0"

View file

@ -72,15 +72,22 @@ https://github.com/user-attachments/assets/31ec0669-707d-4e03-b511-1bc33d44197a
# Deploy Sourcebot # Deploy Sourcebot
Sourcebot can be deployed in seconds using our official docker image. Visit our [docs](https://docs.sourcebot.dev/docs/deployment-guide) for more information. Sourcebot can be deployed in seconds using Docker Compose. Visit our [docs](https://docs.sourcebot.dev/docs/deployment/docker-compose) for more information.
1. Create a config 1. Download the docker-compose.yml file
```sh
curl -o docker-compose.yml https://raw.githubusercontent.com/sourcebot-dev/sourcebot/main/docker-compose.yml
```
2. In the same directory as the `docker-compose.yml` file, create a [configuration file](https://docs.sourcebot.dev/docs/configuration/config-file). The configuration file is a JSON file that configures Sourcebot's behaviour, including what repositories to index, language model providers, auth providers, and more.
```sh ```sh
touch config.json touch config.json
echo '{ echo '{
"$schema": "https://raw.githubusercontent.com/sourcebot-dev/sourcebot/main/schemas/v3/index.json", "$schema": "https://raw.githubusercontent.com/sourcebot-dev/sourcebot/main/schemas/v3/index.json",
// Comments are supported.
// This config creates a single connection to GitHub.com that
// indexes the Sourcebot repository
"connections": { "connections": {
// Comments are supported
"starter-connection": { "starter-connection": {
"type": "github", "type": "github",
"repos": [ "repos": [
@ -91,37 +98,19 @@ echo '{
}' > config.json }' > config.json
``` ```
2. Run the docker container 3. Update the secrets in the `docker-compose.yml` and then run Sourcebot using:
```sh ```sh
docker run \ docker compose up
-p 3000:3000 \
--pull=always \
--rm \
-v $(pwd):/data \
-e CONFIG_PATH=/data/config.json \
--name sourcebot \
ghcr.io/sourcebot-dev/sourcebot:latest
``` ```
<details>
<summary>What does this command do?</summary>
- Pull and run the Sourcebot docker image from [ghcr.io/sourcebot-dev/sourcebot:latest](https://github.com/sourcebot-dev/sourcebot/pkgs/container/sourcebot). 4. Visit `http://localhost:3000` to start using Sourcebot
- Mount the current directory (`-v $(pwd):/data`) to allow Sourcebot to persist the `.sourcebot` cache.
- Clones sourcebot at `HEAD` into `.sourcebot/github/sourcebot-dev/sourcebot`.
- Indexes sourcebot into a .zoekt index file in `.sourcebot/index/`.
- Map port 3000 between your machine and the docker image.
- Starts the web server on port 3000.
</details>
</br>
3. Visit `http://localhost:3000` to start using Sourcebot
</br> </br>
To configure Sourcebot (index your own repos, connect your LLMs, etc), check out our [docs](https://docs.sourcebot.dev/docs/configuration/config-file). To configure Sourcebot (index your own repos, connect your LLMs, etc), check out our [docs](https://docs.sourcebot.dev/docs/configuration/config-file).
> [!NOTE] > [!NOTE]
> Sourcebot collects <a href="https://demo.sourcebot.dev/~/search?query=captureEvent%5C(%20repo%3Asourcebot">anonymous usage data</a> by default to help us improve the product. No sensitive data is collected, but if you'd like to disable this you can do so by setting the `SOURCEBOT_TELEMETRY_DISABLED` environment > Sourcebot collects <a href="https://demo.sourcebot.dev/~/search?query=captureEvent%5C(%20repo%3Asourcebot">anonymous usage data</a> by default to help us improve the product. No sensitive data is collected, but if you'd like to disable this you can do so by setting the `SOURCEBOT_TELEMETRY_DISABLED` environment
> variable to `true`. Please refer to our [telemetry docs](https://docs.sourcebot.dev/self-hosting/overview#telemetry) for more information. > variable to `true`. Please refer to our [telemetry docs](https://docs.sourcebot.dev/docs/overview#telemetry) for more information.
# Build from source # Build from source
>[!NOTE] >[!NOTE]

66
docker-compose.yml Normal file
View file

@ -0,0 +1,66 @@
services:
sourcebot:
image: ghcr.io/sourcebot-dev/sourcebot:latest
user: sourcebot
restart: always
container_name: sourcebot
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
ports:
- "3000:3000"
volumes:
- ./config.json:/data/config.json
- sourcebot_data:/data
environment:
- CONFIG_PATH=/data/config.json
- AUTH_URL=${AUTH_URL:-http://localhost:3000}
- AUTH_SECRET=${AUTH_SECRET:-000000000000000000000000000000000} # CHANGEME: generate via `openssl rand -base64 33`
- SOURCEBOT_ENCRYPTION_KEY=${SOURCEBOT_ENCRYPTION_KEY:-000000000000000000000000000000000} # CHANGEME: generate via `openssl rand -base64 24`
- DATABASE_URL=${DATABASE_URL:-postgresql://postgres:postgres@postgres:5432/postgres} # CHANGEME
- REDIS_URL=${REDIS_URL:-redis://redis:6379} # CHANGEME
- SOURCEBOT_EE_LICENSE_KEY=${SOURCEBOT_EE_LICENSE_KEY:-}
- SOURCEBOT_TELEMETRY_DISABLED=${SOURCEBOT_TELEMETRY_DISABLED:-false}
# For the full list of environment variables see:
# https://docs.sourcebot.dev/docs/configuration/environment-variables
postgres:
image: docker.io/postgres:${POSTGRES_VERSION:-latest}
restart: always
healthcheck:
test: ["CMD-SHELL", "pg_isready -U postgres"]
interval: 3s
timeout: 3s
retries: 10
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres # CHANGEME
POSTGRES_DB: postgres
ports:
- 127.0.0.1:5432:5432
volumes:
- sourcebot_postgres_data:/var/lib/postgresql/data
redis:
image: docker.io/redis:${REDIS_VERSION:-latest}
restart: always
ports:
- 127.0.0.1:6379:6379
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 3s
timeout: 10s
retries: 10
volumes:
- sourcebot_redis_data:/data
volumes:
sourcebot_data:
driver: local
sourcebot_postgres_data:
driver: local
sourcebot_redis_data:
driver: local

View file

@ -21,7 +21,13 @@
"group": "Getting Started", "group": "Getting Started",
"pages": [ "pages": [
"docs/overview", "docs/overview",
"docs/deployment-guide" {
"group": "Deployment",
"pages": [
"docs/deployment/docker-compose",
"docs/deployment/k8s"
]
}
] ]
}, },
{ {
@ -138,7 +144,7 @@
"socials": { "socials": {
"github": "https://github.com/sourcebot-dev/sourcebot", "github": "https://github.com/sourcebot-dev/sourcebot",
"twitter": "https://x.com/sourcebot_dev", "twitter": "https://x.com/sourcebot_dev",
"discord": "https://discord.gg/Y6b78RqM", "discord": "https://discord.gg/HDScTs3ptP",
"linkedin": "https://www.linkedin.com/company/sourcebot" "linkedin": "https://www.linkedin.com/company/sourcebot"
} }
}, },

View file

@ -25,4 +25,4 @@ Sourcebot's built-in authentication system gates your deployment, and allows adm
# Troubleshooting # Troubleshooting
- If you experience issues logging in, logging out, or accessing an organization you should have access to, try clearing your cookies & performing a full page refresh (`Cmd/Ctrl + Shift + R` on most browsers). - If you experience issues logging in, logging out, or accessing an organization you should have access to, try clearing your cookies & performing a full page refresh (`Cmd/Ctrl + Shift + R` on most browsers).
- Still not working? Reach out to us on our [discord](https://discord.com/invite/6Fhp27x7Pb) or [GitHub](https://github.com/sourcebot-dev/sourcebot/issues/new/choose) - Still not working? Reach out to us on our [discord](https://discord.gg/HDScTs3ptP) or [GitHub](https://github.com/sourcebot-dev/sourcebot/issues/new/choose)

View file

@ -3,7 +3,7 @@ title: Environment variables
sidebarTitle: Environment variables sidebarTitle: Environment variables
--- ---
<Note>This page provides a detailed reference of all environment variables supported by Sourcebot. If you're just looking to get up and running, we recommend starting with the [deployment guide](/docs/deployment-guide) instead.</Note> <Note>This page provides a detailed reference of all environment variables supported by Sourcebot. If you're just looking to get up and running, we recommend starting with the [deployment guides](/docs/deployment/docker-compose) instead.</Note>
### Core Environment Variables ### Core Environment Variables
The following environment variables allow you to configure your Sourcebot deployment. The following environment variables allow you to configure your Sourcebot deployment.
@ -34,6 +34,8 @@ The following environment variables allow you to configure your Sourcebot deploy
| `SOURCEBOT_STRUCTURED_LOGGING_ENABLED` | `false` | <p>Enables/disable structured JSON logging. See [this doc](/docs/configuration/structured-logging) for more info.</p> | | `SOURCEBOT_STRUCTURED_LOGGING_ENABLED` | `false` | <p>Enables/disable structured JSON logging. See [this doc](/docs/configuration/structured-logging) for more info.</p> |
| `SOURCEBOT_STRUCTURED_LOGGING_FILE` | - | <p>Optional file to log to if structured logging is enabled</p> | | `SOURCEBOT_STRUCTURED_LOGGING_FILE` | - | <p>Optional file to log to if structured logging is enabled</p> |
| `SOURCEBOT_TELEMETRY_DISABLED` | `false` | <p>Enables/disables telemetry collection in Sourcebot. See [this doc](/docs/overview.mdx#telemetry) for more info.</p> | | `SOURCEBOT_TELEMETRY_DISABLED` | `false` | <p>Enables/disables telemetry collection in Sourcebot. See [this doc](/docs/overview.mdx#telemetry) for more info.</p> |
| `DEFAULT_MAX_MATCH_COUNT` | `10000` | <p>The default maximum number of search results to return when using search in the web app.</p> |
| `ALWAYS_INDEX_FILE_PATTERNS` | - | <p>A comma separated list of glob patterns matching file paths that should always be indexed, regardless of size or number of trigrams.</p> |
### Enterprise Environment Variables ### Enterprise Environment Variables
| Variable | Default | Description | | Variable | Default | Description |

View file

@ -366,3 +366,53 @@ A Microsoft Entra ID connection can be used for [authentication](/docs/configura
</Steps> </Steps>
</Accordion> </Accordion>
### Authentik
[Auth.js Authentik Provider Docs](https://authjs.dev/getting-started/providers/authentik)
An Authentik connection can be used for [authentication](/docs/configuration/auth).
<Accordion title="instructions">
<Steps>
<Step title="Create a OAuth2/OpenID Connect application">
To begin, you must create a OAuth2/OpenID Connect application in Authentik. For more information, see the [Authentik documentation](https://docs.goauthentik.io/add-secure-apps/applications/manage_apps/#create-an-application-and-provider-pair).
When configuring your application:
- Set the provider type to "OAuth2/OpenID Connect"
- Set the client type to "Confidential"
- Add `<sourcebot_url>/api/auth/callback/authentik` to the redirect URIs (ex. https://sourcebot.coolcorp.com/api/auth/callback/authentik)
After creating the application, open the application details to obtain the client id, client secret, and issuer URL (typically in the format `https://<authentik-domain>/application/o/<provider-slug>/`).
</Step>
<Step title="Define environment variables">
The client id, secret, and issuer URL are provided to Sourcebot via environment variables. These can be named whatever you like
(ex. `AUTHENTIK_IDENTITY_PROVIDER_CLIENT_ID`, `AUTHENTIK_IDENTITY_PROVIDER_CLIENT_SECRET`, and `AUTHENTIK_IDENTITY_PROVIDER_ISSUER`)
</Step>
<Step title="Define the identity provider config">
Create a `identityProvider` object in the [config file](/docs/configuration/config-file) with the following fields:
```json wrap icon="code"
{
"$schema": "https://raw.githubusercontent.com/sourcebot-dev/sourcebot/main/schemas/v3/index.json",
"identityProviders": [
{
"provider": "authentik",
"purpose": "sso",
"clientId": {
"env": "AUTHENTIK_IDENTITY_PROVIDER_CLIENT_ID"
},
"clientSecret": {
"env": "AUTHENTIK_IDENTITY_PROVIDER_CLIENT_SECRET"
},
"issuer": {
"env": "AUTHENTIK_IDENTITY_PROVIDER_ISSUER"
}
}
]
}
```
</Step>
</Steps>
</Accordion>

View file

@ -69,6 +69,26 @@ To learn more about how to create a connection for a specific code host, check o
<Note>Missing your code host? [Submit a feature request on GitHub](https://github.com/sourcebot-dev/sourcebot/issues/new?template=feature_request.md).</Note> <Note>Missing your code host? [Submit a feature request on GitHub](https://github.com/sourcebot-dev/sourcebot/issues/new?template=feature_request.md).</Note>
## Indexing Large Files
By default, Sourcebot will skip indexing files that are larger than 2MB or have more than 20,000 trigrams. You can configure this by setting the `maxFileSize` and `maxTrigramCount` [settings](/docs/configuration/config-file#settings).
These limits can be ignored for specific files by passing in a comma separated list of glob patterns matching file paths to the `ALWAYS_INDEX_FILE_PATTERNS` environment variable. For example:
```bash
# Always index all .sum and .lock files
ALWAYS_INDEX_FILE_PATTERNS=**/*.sum,**/*.lock
```
Files that have been skipped are assigned the `skipped` language. You can view a list of all skipped files by using the following query:
```
lang:skipped
```
## Indexing Binary Files
Binary files cannot be indexed by Sourcebot. See [#575](https://github.com/sourcebot-dev/sourcebot/issues/575) for more information.
## Schema reference ## Schema reference
--- ---

View file

@ -1,88 +0,0 @@
---
title: "Deployment guide"
---
import SupportedPlatforms from '/snippets/platform-support.mdx'
The following guide will walk you through the steps to deploy Sourcebot on your own infrastructure. Sourcebot is distributed as a [single docker container](/docs/overview#architecture) that can be deployed to a k8s cluster, a VM, or any platform that supports docker.
<Note>Hit an issue? Please let us know on [GitHub](https://github.com/sourcebot-dev/sourcebot/issues/new/choose) or by [emailing us](mailto:team@sourcebot.dev).</Note>
<Steps>
<Step title="Requirements">
- Docker -> use [Docker Desktop](https://www.docker.com/products/docker-desktop/) on Mac or Windows.
</Step>
<Step title="Create a config.json">
Create a `config.json` file that tells Sourcebot which repositories to sync and index:
```bash wrap icon="terminal" Create example config
touch config.json
echo '{
"$schema": "https://raw.githubusercontent.com/sourcebot-dev/sourcebot/main/schemas/v3/index.json",
"connections": {
// comments are supported
"starter-connection": {
"type": "github",
"repos": [
"sourcebot-dev/sourcebot"
]
}
}
}' > config.json
```
This config creates a single GitHub connection named `starter-connection` that specifies [Sourcebot](https://github.com/sourcebot-dev/sourcebot) as a repo to sync. [Learn more about the config file](/docs/configuration/config-file).
</Step>
<Step title="Launch your instance">
<Warning>If you're deploying Sourcebot behind a domain, you must set the [AUTH_URL](/docs/configuration/environment-variables) environment variable.</Warning>
In the same directory as `config.json`, run the following command to start your instance:
``` bash icon="terminal" Start the Sourcebot container
docker run \
-p 3000:3000 \
--pull=always \
--rm \
-v $(pwd):/data \
-e CONFIG_PATH=/data/config.json \
--name sourcebot \
ghcr.io/sourcebot-dev/sourcebot:latest
```
<Accordion title="Details">
**This command**:
- pulls the latest version of the `sourcebot` docker image.
- mounts the working directory to `/data` in the container to allow Sourcebot to persist data across restarts, and to access the `config.json`. In your local directory, you should see a `.sourcebot` folder created that contains all persistent data.
- runs any pending database migrations.
- starts up all services, including the webserver exposed on port 3000.
- reads `config.json` and starts syncing.
</Accordion>
</Step>
<Step title="Complete onboarding">
Navigate to `http://localhost:3000` and complete the onboarding flow.
</Step>
<Step title="Done">
You're all set! If you'd like to setup [Ask Sourcebot](/docs/features/ask/overview), configure a language model [provider](/docs/configuration/language-model-providers).
</Step>
</Steps>
## Next steps
---
<CardGroup cols={3}>
<Card title="Index your code" icon="code" href="/docs/connections/overview">
Learn how to index your code using Sourcebot
</Card>
<Card title="Language models" icon="brain" href="/docs/configuration/language-model-providers">
Learn how to configure language model providers to start using [Ask Sourcebot](/docs/features/ask/overview)
</Card>
<Card title="Authentication" icon="lock" href="/docs/configuration/auth/overview">
Learn more about how to setup SSO, email codes, and other authentication providers.
</Card>
</CardGroup>

View file

@ -0,0 +1,61 @@
---
title: "Docker Compose"
---
This guide will walk you through deploying Sourcebot locally or on a VM using Docker Compose. We will use the [docker-compose.yml](https://github.com/sourcebot-dev/sourcebot/blob/main/docker-compose.yml) file from the Sourcebot repository. This is the simplest way to get started with Sourcebot.
If you are looking to deploy onto Kubernetes, see the [Kubernetes (Helm)](/docs/deployment/k8s) guide.
## Get started
<Steps>
<Step title="Requirements">
- docker & docker compose. Use [Docker Desktop](https://www.docker.com/products/docker-desktop/) on Mac or Windows.
</Step>
<Step title="Obtain the Docker Compose file">
Download the [docker-compose.yml](https://github.com/sourcebot-dev/sourcebot/blob/main/docker-compose.yml) file from the Sourcebot repository.
```bash wrap icon="terminal"
curl -o docker-compose.yml https://raw.githubusercontent.com/sourcebot-dev/sourcebot/main/docker-compose.yml
```
</Step>
<Step title="Create a config.json">
In the same directory as the `docker-compose.yml` file, create a [configuration file](/docs/configuration/config-file). The configuration file is a JSON file that configures Sourcebot's behaviour, including what repositories to index, language model providers, auth providers, and more.
```bash wrap icon="terminal" Create example config
touch config.json
echo '{
"$schema": "https://raw.githubusercontent.com/sourcebot-dev/sourcebot/main/schemas/v3/index.json",
// Comments are supported.
// This config creates a single connection to GitHub.com that
// indexes the Sourcebot repository
"connections": {
"starter-connection": {
"type": "github",
"repos": [
"sourcebot-dev/sourcebot"
]
}
}
}' > config.json
```
</Step>
<Step title="Launch your instance">
Update the secrets in the `docker-compose.yml` and then run Sourcebot using:
```bash wrap icon="terminal"
docker compose up
```
</Step>
<Step title="Done">
You're all set! Navigate to [http://localhost:3000](http://localhost:3000) to access your Sourcebot instance.
</Step>
</Steps>
## Next steps

View file

@ -0,0 +1,4 @@
---
title: "Kubernetes (Helm)"
url: https://github.com/sourcebot-dev/sourcebot-helm-chart
---

View file

@ -10,7 +10,7 @@ codebase that the agent may fetch to perform the review.
This agent provides codebase-aware reviews for your PRs. For each diff, this agent fetches relevant context from Sourcebot and feeds it into an LLM for a detailed review of your changes. This agent provides codebase-aware reviews for your PRs. For each diff, this agent fetches relevant context from Sourcebot and feeds it into an LLM for a detailed review of your changes.
The AI Code Review Agent is [fair source](https://github.com/sourcebot-dev/sourcebot/tree/main/packages/web/src/features/agents/review-agent) and packaged in [Sourcebot](https://github.com/sourcebot-dev/sourcebot). To get started using this agent, [deploy Sourcebot](/docs/deployment-guide) The AI Code Review Agent is [fair source](https://github.com/sourcebot-dev/sourcebot/tree/main/packages/web/src/features/agents/review-agent) and packaged in [Sourcebot](https://github.com/sourcebot-dev/sourcebot). To get started using this agent, [deploy Sourcebot](/docs/deployment/docker-compose)
and then follow the configuration instructions below. and then follow the configuration instructions below.
![AI Code Review Agent Example](/images/review_agent_example.png) ![AI Code Review Agent Example](/images/review_agent_example.png)

View file

@ -14,7 +14,7 @@ follow code nav references, and provide an answer thats rich with inline cita
<Card title="Index repos" icon="book" href="/docs/connections/overview" horizontal="true"> <Card title="Index repos" icon="book" href="/docs/connections/overview" horizontal="true">
Learn how to index your repos so you can ask questions about them Learn how to index your repos so you can ask questions about them
</Card> </Card>
<Card title="Deployment guide" icon="server" href="/docs/deployment-guide" horizontal="true"> <Card title="Deployment guide" icon="server" href="/docs/deployment/docker-compose" horizontal="true">
Learn how to self-host Sourcebot in a few simple steps. Learn how to self-host Sourcebot in a few simple steps.
</Card> </Card>
<Card title="Public demo" icon="globe" href="https://demo.sourcebot.dev/" horizontal="true"> <Card title="Public demo" icon="globe" href="https://demo.sourcebot.dev/" horizontal="true">

View file

@ -21,6 +21,7 @@ import LicenseKeyRequired from '/snippets/license-key-required.mdx'
| **Go to definition** | Clicking the "go to definition" button in the popover or clicking the symbol name navigates to the symbol's definition. | | **Go to definition** | Clicking the "go to definition" button in the popover or clicking the symbol name navigates to the symbol's definition. |
| **Find references** | Clicking the "find all references" button in the popover lists all references in the explore panel. | | **Find references** | Clicking the "find all references" button in the popover lists all references in the explore panel. |
| **Explore panel** | Lists all references and definitions for the symbol selected in the popover. | | **Explore panel** | Lists all references and definitions for the symbol selected in the popover. |
| **Cross-repository navigation** | You can search across all repositories by clicking the globe icon in the explore panel. By default, references and definitions are scoped to the repository where the symbol is being resolved. |
## How does it work? ## How does it work?

View file

@ -9,7 +9,7 @@ The [Model Context Protocol](https://modelcontextprotocol.io/introduction) (MCP)
<Steps> <Steps>
<Step title="Launch Sourcebot"> <Step title="Launch Sourcebot">
Follow the [deployment guide](/docs/deployment-guide) to launch Sourcebot and get your code indexed. The host url of your instance (e.g., `http://localhost:3000`) is passed to the MCP server via the `SOURCEBOT_HOST` url. Follow the [deployment guides](/docs/deployment/docker-compose) to launch Sourcebot and get your code indexed. The host url of your instance (e.g., `http://localhost:3000`) is passed to the MCP server via the `SOURCEBOT_HOST` url.
If a host is not provided, then the server will fallback to using the demo instance hosted at https://demo.sourcebot.dev. You can see the list of repositories indexed [here](https://demo.sourcebot.dev/~/repos). Add additional repositories by [opening a PR](https://github.com/sourcebot-dev/sourcebot/blob/main/demo-site-config.json). If a host is not provided, then the server will fallback to using the demo instance hosted at https://demo.sourcebot.dev. You can see the list of repositories indexed [here](https://demo.sourcebot.dev/~/repos). Add additional repositories by [opening a PR](https://github.com/sourcebot-dev/sourcebot/blob/main/demo-site-config.json).
</Step> </Step>

View file

@ -1,14 +1,11 @@
--- ---
title: "Permission syncing" title: "Permission syncing"
sidebarTitle: "Permission syncing" sidebarTitle: "Permission syncing"
tag: "experimental"
--- ---
import LicenseKeyRequired from '/snippets/license-key-required.mdx' import LicenseKeyRequired from '/snippets/license-key-required.mdx'
import ExperimentalFeatureWarning from '/snippets/experimental-feature-warning.mdx'
<LicenseKeyRequired /> <LicenseKeyRequired />
<ExperimentalFeatureWarning />
# Overview # Overview

View file

@ -22,7 +22,7 @@ Search across all your repos/branches across any code host platform. Blazingly f
<Card title="Branches" icon="split" href="/docs/features/search/multi-branch-indexing" horizontal="true"> <Card title="Branches" icon="split" href="/docs/features/search/multi-branch-indexing" horizontal="true">
Learn how to index and search through your branches Learn how to index and search through your branches
</Card> </Card>
<Card title="Deployment guide" icon="server" href="/docs/deployment-guide" horizontal="true"> <Card title="Deployment guides" icon="server" href="/docs/deployment/docker-compose" horizontal="true">
Learn how to self-host Sourcebot in a few simple steps. Learn how to self-host Sourcebot in a few simple steps.
</Card> </Card>
<Card title="Public demo" icon="globe" href="https://demo.sourcebot.dev/" horizontal="true"> <Card title="Public demo" icon="globe" href="https://demo.sourcebot.dev/" horizontal="true">

View file

@ -4,32 +4,51 @@ title: Writing search queries
Sourcebot uses a powerful regex-based query language that enabled precise code search within large codebases. Sourcebot uses a powerful regex-based query language that enabled precise code search within large codebases.
## Syntax reference guide ## Syntax reference guide
Queries consist of space-separated regular expressions. Wrapping expressions in `""` combines them. By default, a file must have at least one match for each expression to be included. Queries consist of space-separated search patterns that are matched against file contents. A file must have at least one match for each expression to be included. Queries can optionally contain search filters to further refine the search results.
## Keyword search (default)
Keyword search matches search patterns exactly in file contents. Wrapping search patterns in `""` combines them as a single expression.
| Example | Explanation |
| :--- | :--- |
| `foo` | Match files containing the keyword `foo` |
| `foo bar` | Match files containing both `foo` **and** `bar` |
| `"foo bar"` | Match files containing the phrase `foo bar` |
| `"foo \"bar\""` | Match files containing `foo "bar"` exactly (escaped quotes) |
## Regex search
Toggle the regex button (`.*`) in the search bar to interpret search patterns as regular expressions.
| Example | Explanation | | Example | Explanation |
| :--- | :--- | | :--- | :--- |
| `foo` | Match files with regex `/foo/` | | `foo` | Match files with regex `/foo/` |
| `foo bar` | Match files with regex `/foo/` **and** `/bar/` | | `foo.*bar` | Match files with regex `/foo.*bar/` (foo followed by any characters, then bar) |
| `"foo bar"` | Match files with regex `/foo bar/` | | `^function\s+\w+` | Match files with regex `/^function\s+\w+/` (function at start of line, followed by whitespace and word characters) |
| `"foo bar"` | Match files with regex `/foo bar/`. Quotes are not matched. |
Multiple expressions can be or'd together with `or`, negated with `-`, or grouped with `()`. ## Search filters
| Example | Explanation | Search queries (keyword or regex) can include multiple search filters to further refine the search results. Some filters can be negated using the `-` prefix.
| :--- | :--- |
| `foo or bar` | Match files with regex `/foo/` **or** `/bar/` |
| `foo -bar` | Match files with regex `/foo/` but **not** `/bar/` |
| `foo (bar or baz)` | Match files with regex `/foo/` **and** either `/bar/` **or** `/baz/` |
Expressions can be prefixed with certain keywords to modify search behavior. Some keywords can be negated using the `-` prefix.
| Prefix | Description | Example | | Prefix | Description | Example |
| :--- | :--- | :--- | | :--- | :--- | :--- |
| `file:` | Filter results from filepaths that match the regex. By default all files are searched. | `file:README` - Filter results to filepaths that match regex `/README/`<br/>`file:"my file"` - Filter results to filepaths that match regex `/my file/`<br/>`-file:test\.ts$` - Ignore results from filepaths match regex `/test\.ts$/` | | `file:` | Filter results from filepaths that match the regex. By default all files are searched. | `file:README` - Filter results to filepaths that match regex `/README/`<br/>`file:"my file"` - Filter results to filepaths that match regex `/my file/`<br/>`-file:test\.ts$` - Ignore results from filepaths match regex `/test\.ts$/` |
| `repo:` | Filter results from repos that match the regex. By default all repos are searched. | `repo:linux` - Filter results to repos that match regex `/linux/`<br/>`-repo:^web/.*` - Ignore results from repos that match regex `/^web\/.*` | | `repo:` | Filter results from repos that match the regex. By default all repos are searched. | `repo:linux` - Filter results to repos that match regex `/linux/`<br/>`-repo:^web/.*` - Ignore results from repos that match regex `/^web\/.*/` |
| `rev:` | Filter results from a specific branch or tag. By default **only** the default branch is searched. | `rev:beta` - Filter results to branches that match regex `/beta/` | | `rev:` | Filter results from a specific branch or tag. By default **only** the default branch is searched. | `rev:beta` - Filter results to branches that match regex `/beta/` |
| `lang:` | Filter results by language (as defined by [linguist](https://github.com/github-linguist/linguist/blob/main/lib/linguist/languages.yml)). By default all languages are searched. | `lang:TypeScript` - Filter results to TypeScript files<br/>`-lang:YAML` - Ignore results from YAML files | | `lang:` | Filter results by language (as defined by [linguist](https://github.com/github-linguist/linguist/blob/main/lib/linguist/languages.yml)). By default all languages are searched. | `lang:TypeScript` - Filter results to TypeScript files<br/>`-lang:YAML` - Ignore results from YAML files |
| `sym:` | Match symbol definitions created by [universal ctags](https://ctags.io/) at index time. | `sym:\bmain\b` - Filter results to symbols that match regex `/\bmain\b/` | | `sym:` | Match symbol definitions created by [universal ctags](https://ctags.io/) at index time. | `sym:\bmain\b` - Filter results to symbols that match regex `/\bmain\b/` |
| `context:` | Filter results to a predefined [search context](/docs/features/search/search-contexts). | `context:web` - Filter results to the web context<br/>`-context:pipelines` - Ignore results from the pipelines context | | `context:` | Filter results to a predefined [search context](/docs/features/search/search-contexts). | `context:web` - Filter results to the web context<br/>`-context:pipelines` - Ignore results from the pipelines context |
## Boolean operators & grouping
By default, space-separated expressions are and'd together. Using the `or` keyword as well as parentheses `()` can be used to create more complex boolean logic. Parentheses can be negated using the `-` prefix.
| Example | Explanation |
| :--- | :--- |
| `foo or bar` | Match files containing `foo` **or** `bar` |
| `foo (bar or baz)` | Match files containing `foo` **and** either `bar` **or** `baz`. |
| `-(foo) bar` | Match files containing `bar` **and not** `foo`. |

View file

@ -9,7 +9,7 @@ title: "Overview"
- [MCP](/docs/features/mcp-server): Enrich agent context windows with code across your organization - [MCP](/docs/features/mcp-server): Enrich agent context windows with code across your organization
<CardGroup> <CardGroup>
<Card title="Deployment guide" icon="server" href="/docs/deployment-guide" horizontal="true"> <Card title="Deployment guides" icon="server" href="/docs/deployment/docker-compose" horizontal="true">
Learn how to self-host Sourcebot in a few simple steps. Learn how to self-host Sourcebot in a few simple steps.
</Card> </Card>
<Card title="Public demo" icon="globe" href="https://demo.sourcebot.dev/" horizontal="true"> <Card title="Public demo" icon="globe" href="https://demo.sourcebot.dev/" horizontal="true">
@ -162,7 +162,7 @@ Sourcebot is designed to be easily self-hosted, allowing you to deploy it onto y
--- ---
<CardGroup cols={2}> <CardGroup cols={2}>
<Card horizontal title="Deployment guide ->" href="/docs/deployment-guide" /> <Card horizontal title="Deployment guides ->" href="/docs/deployment/docker-compose" />
<Card horizontal title="Connecting your code ->" href="/docs/connections/overview" /> <Card horizontal title="Connecting your code ->" href="/docs/connections/overview" />
<Card horizontal title="Search syntax reference ->" href="/docs/features/search/syntax-reference" /> <Card horizontal title="Search syntax reference ->" href="/docs/features/search/syntax-reference" />
<Card horizontal title="Code navigation overview ->" href="/docs/features/code-navigation" /> <Card horizontal title="Code navigation overview ->" href="/docs/features/code-navigation" />

View file

@ -78,7 +78,7 @@ If your deployment is dependent on these features, please [reach out](https://gi
After updating your configuration file, restart your Sourcebot deployment to pick up the new changes. After updating your configuration file, restart your Sourcebot deployment to pick up the new changes.
</Step> </Step>
<Step title="You're done!"> <Step title="You're done!">
Congrats, you've successfully migrated to v3! Please let us know what you think of the new features by reaching out on our [discord](https://discord.gg/6Fhp27x7Pb) or on [GitHub](https://github.com/sourcebot-dev/sourcebot/issues/new/choose). Congrats, you've successfully migrated to v3! Please let us know what you think of the new features by reaching out on our [discord](https://discord.gg/HDScTs3ptP) or on [GitHub](https://github.com/sourcebot-dev/sourcebot/issues/new/choose).
</Step> </Step>
</Steps> </Steps>
@ -90,4 +90,4 @@ Some things to check:
- Make sure you have a name for each `connection`, and that the name only contains letters, digits, hyphens, or underscores - Make sure you have a name for each `connection`, and that the name only contains letters, digits, hyphens, or underscores
- Make sure each `connection` has a `type` field with a valid value (`gitlab`, `github`, `gitea`, `gerrit`) - Make sure each `connection` has a `type` field with a valid value (`gitlab`, `github`, `gitea`, `gerrit`)
Having troubles migrating from v2 to v3? Reach out to us on [discord](https://discord.gg/6Fhp27x7Pb) or [GitHub](https://github.com/sourcebot-dev/sourcebot/issues/new/choose) and we'll try our best to help Having troubles migrating from v2 to v3? Reach out to us on [discord](https://discord.gg/HDScTs3ptP) or [GitHub](https://github.com/sourcebot-dev/sourcebot/issues/new/choose) and we'll try our best to help

View file

@ -40,7 +40,7 @@ Please note that the following features are no longer supported in v4:
</Step> </Step>
<Step title="You're done!"> <Step title="You're done!">
Congrats, you've successfully migrated to v4! Please let us know what you think of the new features by reaching out on our [discord](https://discord.gg/6Fhp27x7Pb) or [GitHub](https://github.com/sourcebot-dev/sourcebot/issues/new/choose) Congrats, you've successfully migrated to v4! Please let us know what you think of the new features by reaching out on our [discord](https://discord.gg/HDScTs3ptP) or [GitHub](https://github.com/sourcebot-dev/sourcebot/issues/new/choose)
</Step> </Step>
</Steps> </Steps>
@ -58,4 +58,4 @@ to finish upgrading to v4 in single-tenant mode.
- If you're hitting issues with signing into your Sourcebot instance, make sure you're setting `AUTH_URL` correctly to your deployment domain (ex. `https://sourcebot.yourcompany.com`) - If you're hitting issues with signing into your Sourcebot instance, make sure you're setting `AUTH_URL` correctly to your deployment domain (ex. `https://sourcebot.yourcompany.com`)
Having troubles migrating from v3 to v4? Reach out to us on [discord](https://discord.gg/6Fhp27x7Pb) or [GitHub](https://github.com/sourcebot-dev/sourcebot/issues/new/choose) and we'll try our best to help Having troubles migrating from v3 to v4? Reach out to us on [discord](https://discord.gg/HDScTs3ptP) or [GitHub](https://github.com/sourcebot-dev/sourcebot/issues/new/choose) and we'll try our best to help

View file

@ -647,6 +647,115 @@
"purpose", "purpose",
"audience" "audience"
] ]
},
"AuthentikIdentityProviderConfig": {
"type": "object",
"additionalProperties": false,
"properties": {
"provider": {
"const": "authentik"
},
"purpose": {
"const": "sso"
},
"clientId": {
"anyOf": [
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token."
}
},
"required": [
"env"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"googleCloudSecret": {
"type": "string",
"description": "The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets"
}
},
"required": [
"googleCloudSecret"
],
"additionalProperties": false
}
]
},
"clientSecret": {
"anyOf": [
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token."
}
},
"required": [
"env"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"googleCloudSecret": {
"type": "string",
"description": "The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets"
}
},
"required": [
"googleCloudSecret"
],
"additionalProperties": false
}
]
},
"issuer": {
"anyOf": [
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token."
}
},
"required": [
"env"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"googleCloudSecret": {
"type": "string",
"description": "The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets"
}
},
"required": [
"googleCloudSecret"
],
"additionalProperties": false
}
]
}
},
"required": [
"provider",
"purpose",
"clientId",
"clientSecret",
"issuer"
]
} }
}, },
"oneOf": [ "oneOf": [
@ -1293,6 +1402,115 @@
"purpose", "purpose",
"audience" "audience"
] ]
},
{
"type": "object",
"additionalProperties": false,
"properties": {
"provider": {
"const": "authentik"
},
"purpose": {
"const": "sso"
},
"clientId": {
"anyOf": [
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token."
}
},
"required": [
"env"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"googleCloudSecret": {
"type": "string",
"description": "The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets"
}
},
"required": [
"googleCloudSecret"
],
"additionalProperties": false
}
]
},
"clientSecret": {
"anyOf": [
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token."
}
},
"required": [
"env"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"googleCloudSecret": {
"type": "string",
"description": "The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets"
}
},
"required": [
"googleCloudSecret"
],
"additionalProperties": false
}
]
},
"issuer": {
"anyOf": [
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token."
}
},
"required": [
"env"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"googleCloudSecret": {
"type": "string",
"description": "The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets"
}
},
"required": [
"googleCloudSecret"
],
"additionalProperties": false
}
]
}
},
"required": [
"provider",
"purpose",
"clientId",
"clientSecret",
"issuer"
]
} }
] ]
} }

View file

@ -5163,6 +5163,115 @@
"purpose", "purpose",
"audience" "audience"
] ]
},
"AuthentikIdentityProviderConfig": {
"type": "object",
"additionalProperties": false,
"properties": {
"provider": {
"const": "authentik"
},
"purpose": {
"const": "sso"
},
"clientId": {
"anyOf": [
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token."
}
},
"required": [
"env"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"googleCloudSecret": {
"type": "string",
"description": "The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets"
}
},
"required": [
"googleCloudSecret"
],
"additionalProperties": false
}
]
},
"clientSecret": {
"anyOf": [
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token."
}
},
"required": [
"env"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"googleCloudSecret": {
"type": "string",
"description": "The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets"
}
},
"required": [
"googleCloudSecret"
],
"additionalProperties": false
}
]
},
"issuer": {
"anyOf": [
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token."
}
},
"required": [
"env"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"googleCloudSecret": {
"type": "string",
"description": "The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets"
}
},
"required": [
"googleCloudSecret"
],
"additionalProperties": false
}
]
}
},
"required": [
"provider",
"purpose",
"clientId",
"clientSecret",
"issuer"
]
} }
}, },
"oneOf": [ "oneOf": [
@ -5809,6 +5918,115 @@
"purpose", "purpose",
"audience" "audience"
] ]
},
{
"type": "object",
"additionalProperties": false,
"properties": {
"provider": {
"const": "authentik"
},
"purpose": {
"const": "sso"
},
"clientId": {
"anyOf": [
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token."
}
},
"required": [
"env"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"googleCloudSecret": {
"type": "string",
"description": "The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets"
}
},
"required": [
"googleCloudSecret"
],
"additionalProperties": false
}
]
},
"clientSecret": {
"anyOf": [
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token."
}
},
"required": [
"env"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"googleCloudSecret": {
"type": "string",
"description": "The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets"
}
},
"required": [
"googleCloudSecret"
],
"additionalProperties": false
}
]
},
"issuer": {
"anyOf": [
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token."
}
},
"required": [
"env"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"googleCloudSecret": {
"type": "string",
"description": "The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets"
}
},
"required": [
"googleCloudSecret"
],
"additionalProperties": false
}
]
}
},
"required": [
"provider",
"purpose",
"clientId",
"clientSecret",
"issuer"
]
} }
] ]
} }

View file

@ -5,6 +5,18 @@ set -e
# Disable auto-exporting of variables # Disable auto-exporting of variables
set +a set +a
# Detect if running as root
IS_ROOT=false
if [ "$(id -u)" -eq 0 ]; then
IS_ROOT=true
fi
if [ "$IS_ROOT" = "true" ]; then
echo -e "\e[34m[Info] Running as root user.\e[0m"
else
echo -e "\e[34m[Info] Running as non-root user.\e[0m"
fi
# If a CONFIG_PATH is set, resolve the environment overrides from the config file. # If a CONFIG_PATH is set, resolve the environment overrides from the config file.
# The overrides will be written into variables scopped to the current shell. This is # The overrides will be written into variables scopped to the current shell. This is
# required in case one of the variables used in this entrypoint is overriden (e.g., # required in case one of the variables used in this entrypoint is overriden (e.g.,
@ -54,12 +66,6 @@ fi
echo -e "\e[34m[Info] Sourcebot version: $NEXT_PUBLIC_SOURCEBOT_VERSION\e[0m" echo -e "\e[34m[Info] Sourcebot version: $NEXT_PUBLIC_SOURCEBOT_VERSION\e[0m"
# If we don't have a PostHog key, then we need to disable telemetry.
if [ -z "$NEXT_PUBLIC_POSTHOG_PAPIK" ]; then
echo -e "\e[33m[Warning] NEXT_PUBLIC_POSTHOG_PAPIK was not set. Setting SOURCEBOT_TELEMETRY_DISABLED.\e[0m"
export SOURCEBOT_TELEMETRY_DISABLED=true
fi
if [ -n "$SOURCEBOT_TELEMETRY_DISABLED" ]; then if [ -n "$SOURCEBOT_TELEMETRY_DISABLED" ]; then
# Validate that SOURCEBOT_TELEMETRY_DISABLED is either "true" or "false" # Validate that SOURCEBOT_TELEMETRY_DISABLED is either "true" or "false"
if [ "$SOURCEBOT_TELEMETRY_DISABLED" != "true" ] && [ "$SOURCEBOT_TELEMETRY_DISABLED" != "false" ]; then if [ "$SOURCEBOT_TELEMETRY_DISABLED" != "true" ] && [ "$SOURCEBOT_TELEMETRY_DISABLED" != "false" ]; then
@ -83,8 +89,13 @@ fi
# Check if DATABASE_DATA_DIR exists, if not initialize it # Check if DATABASE_DATA_DIR exists, if not initialize it
if [ "$DATABASE_EMBEDDED" = "true" ] && [ ! -d "$DATABASE_DATA_DIR" ]; then if [ "$DATABASE_EMBEDDED" = "true" ] && [ ! -d "$DATABASE_DATA_DIR" ]; then
echo -e "\e[34m[Info] Initializing database at $DATABASE_DATA_DIR...\e[0m" echo -e "\e[34m[Info] Initializing database at $DATABASE_DATA_DIR...\e[0m"
mkdir -p $DATABASE_DATA_DIR && chown -R postgres:postgres "$DATABASE_DATA_DIR" mkdir -p $DATABASE_DATA_DIR
su postgres -c "initdb -D $DATABASE_DATA_DIR" if [ "$IS_ROOT" = "true" ]; then
chown -R postgres:postgres "$DATABASE_DATA_DIR"
su postgres -c "initdb -D $DATABASE_DATA_DIR"
else
initdb -D "$DATABASE_DATA_DIR" -U postgres
fi
fi fi
# Create the redis data directory if it doesn't exist # Create the redis data directory if it doesn't exist
@ -142,7 +153,7 @@ if [ ! -f "$FIRST_RUN_FILE" ]; then
# (if telemetry is enabled) # (if telemetry is enabled)
if [ "$SOURCEBOT_TELEMETRY_DISABLED" = "false" ]; then if [ "$SOURCEBOT_TELEMETRY_DISABLED" = "false" ]; then
if ! ( curl -L --output /dev/null --silent --fail --header "Content-Type: application/json" -d '{ if ! ( curl -L --output /dev/null --silent --fail --header "Content-Type: application/json" -d '{
"api_key": "'"$NEXT_PUBLIC_POSTHOG_PAPIK"'", "api_key": "'"$POSTHOG_PAPIK"'",
"event": "install", "event": "install",
"distinct_id": "'"$SOURCEBOT_INSTALL_ID"'", "distinct_id": "'"$SOURCEBOT_INSTALL_ID"'",
"properties": { "properties": {
@ -162,7 +173,7 @@ else
if [ "$SOURCEBOT_TELEMETRY_DISABLED" = "false" ]; then if [ "$SOURCEBOT_TELEMETRY_DISABLED" = "false" ]; then
if ! ( curl -L --output /dev/null --silent --fail --header "Content-Type: application/json" -d '{ if ! ( curl -L --output /dev/null --silent --fail --header "Content-Type: application/json" -d '{
"api_key": "'"$NEXT_PUBLIC_POSTHOG_PAPIK"'", "api_key": "'"$POSTHOG_PAPIK"'",
"event": "upgrade", "event": "upgrade",
"distinct_id": "'"$SOURCEBOT_INSTALL_ID"'", "distinct_id": "'"$SOURCEBOT_INSTALL_ID"'",
"properties": { "properties": {
@ -180,13 +191,31 @@ echo "{\"version\": \"$NEXT_PUBLIC_SOURCEBOT_VERSION\", \"install_id\": \"$SOURC
# Start the database and wait for it to be ready before starting any other service # Start the database and wait for it to be ready before starting any other service
if [ "$DATABASE_EMBEDDED" = "true" ]; then if [ "$DATABASE_EMBEDDED" = "true" ]; then
su postgres -c "postgres -D $DATABASE_DATA_DIR" & if [ "$IS_ROOT" = "true" ]; then
su postgres -c "postgres -D $DATABASE_DATA_DIR" &
else
postgres -D "$DATABASE_DATA_DIR" &
fi
until pg_isready -h localhost -p 5432 -U postgres; do until pg_isready -h localhost -p 5432 -U postgres; do
echo -e "\e[34m[Info] Waiting for the database to be ready...\e[0m" echo -e "\e[34m[Info] Waiting for the database to be ready...\e[0m"
sleep 1 sleep 1
# As postgres runs in the background, we must check if it is still
# running, otherwise the "until" loop will be running indefinitely.
if ! pgrep -x "postgres" > /dev/null; then
echo "postgres failed to run"
exit 1
fi
done done
# Check if the database already exists, and create it if it dne if [ "$IS_ROOT" = "false" ]; then
# Running as non-root we need to ensure the postgres account is created.
psql -U postgres -tc "SELECT 1 FROM pg_roles WHERE rolname='postgres'" | grep -q 1 \
|| createuser postgres -s
fi
# Check if the database already exists, and create it if it doesn't exist
EXISTING_DB=$(psql -U postgres -tAc "SELECT 1 FROM pg_database WHERE datname = 'sourcebot'") EXISTING_DB=$(psql -U postgres -tAc "SELECT 1 FROM pg_database WHERE datname = 'sourcebot'")
if [ "$EXISTING_DB" = "1" ]; then if [ "$EXISTING_DB" = "1" ]; then
@ -201,7 +230,7 @@ fi
echo -e "\e[34m[Info] Running database migration...\e[0m" echo -e "\e[34m[Info] Running database migration...\e[0m"
DATABASE_URL="$DATABASE_URL" yarn workspace @sourcebot/db prisma:migrate:prod DATABASE_URL="$DATABASE_URL" yarn workspace @sourcebot/db prisma:migrate:prod
# Create the log directory # Create the log directory if it doesn't exist
mkdir -p /var/log/sourcebot mkdir -p /var/log/sourcebot
# Run supervisord # Run supervisord

View file

@ -18,7 +18,7 @@
"dev:prisma:studio": "yarn with-env yarn workspace @sourcebot/db prisma:studio", "dev:prisma:studio": "yarn with-env yarn workspace @sourcebot/db prisma:studio",
"dev:prisma:migrate:reset": "yarn with-env yarn workspace @sourcebot/db prisma:migrate:reset", "dev:prisma:migrate:reset": "yarn with-env yarn workspace @sourcebot/db prisma:migrate:reset",
"dev:prisma:db:push": "yarn with-env yarn workspace @sourcebot/db prisma:db:push", "dev:prisma:db:push": "yarn with-env yarn workspace @sourcebot/db prisma:db:push",
"build:deps": "yarn workspaces foreach --recursive --topological --from '{@sourcebot/schemas,@sourcebot/db,@sourcebot/shared}' run build" "build:deps": "yarn workspaces foreach --recursive --topological --from '{@sourcebot/schemas,@sourcebot/db,@sourcebot/shared,@sourcebot/query-language}' run build"
}, },
"devDependencies": { "devDependencies": {
"concurrently": "^9.2.1", "concurrently": "^9.2.1",
@ -27,6 +27,7 @@
}, },
"packageManager": "yarn@4.7.0", "packageManager": "yarn@4.7.0",
"resolutions": { "resolutions": {
"prettier": "3.5.3" "prettier": "3.5.3",
"@lezer/common": "1.3.0"
} }
} }

View file

@ -40,6 +40,8 @@
"cross-fetch": "^4.0.0", "cross-fetch": "^4.0.0",
"dotenv": "^16.4.5", "dotenv": "^16.4.5",
"express": "^4.21.2", "express": "^4.21.2",
"express-async-errors": "^3.1.1",
"fast-deep-equal": "^3.1.3",
"git-url-parse": "^16.1.0", "git-url-parse": "^16.1.0",
"gitea-js": "^1.22.0", "gitea-js": "^1.22.0",
"glob": "^11.0.0", "glob": "^11.0.0",

103
packages/backend/src/api.ts Normal file
View file

@ -0,0 +1,103 @@
import { PrismaClient, RepoIndexingJobType } from '@sourcebot/db';
import { createLogger } from '@sourcebot/shared';
import express, { Request, Response } from 'express';
import 'express-async-errors';
import * as http from "http";
import z from 'zod';
import { ConnectionManager } from './connectionManager.js';
import { PromClient } from './promClient.js';
import { RepoIndexManager } from './repoIndexManager.js';
const logger = createLogger('api');
const PORT = 3060;
export class Api {
private server: http.Server;
constructor(
promClient: PromClient,
private prisma: PrismaClient,
private connectionManager: ConnectionManager,
private repoIndexManager: RepoIndexManager,
) {
const app = express();
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
// Prometheus metrics endpoint
app.use('/metrics', async (_req: Request, res: Response) => {
res.set('Content-Type', promClient.registry.contentType);
const metrics = await promClient.registry.metrics();
res.end(metrics);
});
app.post('/api/sync-connection', this.syncConnection.bind(this));
app.post('/api/index-repo', this.indexRepo.bind(this));
this.server = app.listen(PORT, () => {
logger.info(`API server is running on port ${PORT}`);
});
}
private async syncConnection(req: Request, res: Response) {
const schema = z.object({
connectionId: z.number(),
}).strict();
const parsed = schema.safeParse(req.body);
if (!parsed.success) {
res.status(400).json({ error: parsed.error.message });
return;
}
const { connectionId } = parsed.data;
const connection = await this.prisma.connection.findUnique({
where: {
id: connectionId,
}
});
if (!connection) {
res.status(404).json({ error: 'Connection not found' });
return;
}
const [jobId] = await this.connectionManager.createJobs([connection]);
res.status(200).json({ jobId });
}
private async indexRepo(req: Request, res: Response) {
const schema = z.object({
repoId: z.number(),
}).strict();
const parsed = schema.safeParse(req.body);
if (!parsed.success) {
res.status(400).json({ error: parsed.error.message });
return;
}
const { repoId } = parsed.data;
const repo = await this.prisma.repo.findUnique({
where: { id: repoId },
});
if (!repo) {
res.status(404).json({ error: 'Repo not found' });
return;
}
const [jobId] = await this.repoIndexManager.createJobs([repo], RepoIndexingJobType.INDEX);
res.status(200).json({ jobId });
}
public async dispose() {
return new Promise<void>((resolve, reject) => {
this.server.close((err) => {
if (err) reject(err);
else resolve(undefined);
});
});
}
}

View file

@ -5,6 +5,7 @@ import type { ClientOptions, ClientPathsWithMethod } from "openapi-fetch";
import { createLogger } from "@sourcebot/shared"; import { createLogger } from "@sourcebot/shared";
import { measure, fetchWithRetry } from "./utils.js"; import { measure, fetchWithRetry } from "./utils.js";
import * as Sentry from "@sentry/node"; import * as Sentry from "@sentry/node";
import micromatch from "micromatch";
import { import {
SchemaRepository as CloudRepository, SchemaRepository as CloudRepository,
} from "@coderabbitai/bitbucket/cloud/openapi"; } from "@coderabbitai/bitbucket/cloud/openapi";
@ -346,10 +347,15 @@ async function cloudGetRepos(client: BitbucketClient, repoList: string[]): Promi
function cloudShouldExcludeRepo(repo: BitbucketRepository, config: BitbucketConnectionConfig): boolean { function cloudShouldExcludeRepo(repo: BitbucketRepository, config: BitbucketConnectionConfig): boolean {
const cloudRepo = repo as CloudRepository; const cloudRepo = repo as CloudRepository;
let reason = '';
const repoName = cloudRepo.full_name!;
const shouldExclude = (() => { const shouldExclude = (() => {
if (config.exclude?.repos && config.exclude.repos.includes(cloudRepo.full_name!)) { if (config.exclude?.repos) {
return true; if (micromatch.isMatch(repoName, config.exclude.repos)) {
reason = `\`exclude.repos\` contains ${repoName}`;
return true;
}
} }
if (!!config.exclude?.archived) { if (!!config.exclude?.archived) {
@ -357,12 +363,15 @@ function cloudShouldExcludeRepo(repo: BitbucketRepository, config: BitbucketConn
} }
if (!!config.exclude?.forks && cloudRepo.parent !== undefined) { if (!!config.exclude?.forks && cloudRepo.parent !== undefined) {
reason = `\`exclude.forks\` is true`;
return true; return true;
} }
return false;
})(); })();
if (shouldExclude) { if (shouldExclude) {
logger.debug(`Excluding repo ${cloudRepo.full_name} because it matches the exclude pattern`); logger.debug(`Excluding repo ${repoName}. Reason: ${reason}`);
return true; return true;
} }
return false; return false;
@ -548,23 +557,32 @@ function serverShouldExcludeRepo(repo: BitbucketRepository, config: BitbucketCon
const projectName = serverRepo.project!.key; const projectName = serverRepo.project!.key;
const repoSlug = serverRepo.slug!; const repoSlug = serverRepo.slug!;
const repoName = `${projectName}/${repoSlug}`;
let reason = '';
const shouldExclude = (() => { const shouldExclude = (() => {
if (config.exclude?.repos && config.exclude.repos.includes(`${projectName}/${repoSlug}`)) { if (config.exclude?.repos) {
return true; if (micromatch.isMatch(repoName, config.exclude.repos)) {
reason = `\`exclude.repos\` contains ${repoName}`;
return true;
}
} }
if (!!config.exclude?.archived && serverRepo.archived) { if (!!config.exclude?.archived && serverRepo.archived) {
reason = `\`exclude.archived\` is true`;
return true; return true;
} }
if (!!config.exclude?.forks && serverRepo.origin !== undefined) { if (!!config.exclude?.forks && serverRepo.origin !== undefined) {
reason = `\`exclude.forks\` is true`;
return true; return true;
} }
return false;
})(); })();
if (shouldExclude) { if (shouldExclude) {
logger.debug(`Excluding repo ${projectName}/${repoSlug} because it matches the exclude pattern`); logger.debug(`Excluding repo ${repoName}. Reason: ${reason}`);
return true; return true;
} }
return false; return false;

View file

@ -6,6 +6,7 @@ import chokidar, { FSWatcher } from 'chokidar';
import { ConnectionManager } from "./connectionManager.js"; import { ConnectionManager } from "./connectionManager.js";
import { SINGLE_TENANT_ORG_ID } from "./constants.js"; import { SINGLE_TENANT_ORG_ID } from "./constants.js";
import { syncSearchContexts } from "./ee/syncSearchContexts.js"; import { syncSearchContexts } from "./ee/syncSearchContexts.js";
import isEqual from 'fast-deep-equal';
const logger = createLogger('config-manager'); const logger = createLogger('config-manager');
@ -64,8 +65,8 @@ export class ConfigManager {
const existingConnectionConfig = existingConnection ? existingConnection.config as unknown as ConnectionConfig : undefined; const existingConnectionConfig = existingConnection ? existingConnection.config as unknown as ConnectionConfig : undefined;
const connectionNeedsSyncing = const connectionNeedsSyncing =
!existingConnection || !existingConnectionConfig ||
(JSON.stringify(existingConnectionConfig) !== JSON.stringify(newConnectionConfig)); !isEqual(existingConnectionConfig, newConnectionConfig);
// Either update the existing connection or create a new one. // Either update the existing connection or create a new one.
const connection = existingConnection ? const connection = existingConnection ?
@ -93,8 +94,8 @@ export class ConfigManager {
}); });
if (connectionNeedsSyncing) { if (connectionNeedsSyncing) {
const [jobId] = await this.connectionManager.createJobs([connection]); logger.info(`Change detected for connection '${key}' (id: ${connection.id}). Creating sync job.`);
logger.info(`Change detected for connection '${key}' (id: ${connection.id}). Created sync job ${jobId}.`); await this.connectionManager.createJobs([connection]);
} }
} }
} }

View file

@ -7,14 +7,16 @@ import { Job, Queue, ReservedJob, Worker } from "groupmq";
import { Redis } from 'ioredis'; import { Redis } from 'ioredis';
import { compileAzureDevOpsConfig, compileBitbucketConfig, compileGenericGitHostConfig, compileGerritConfig, compileGiteaConfig, compileGithubConfig, compileGitlabConfig } from "./repoCompileUtils.js"; import { compileAzureDevOpsConfig, compileBitbucketConfig, compileGenericGitHostConfig, compileGerritConfig, compileGiteaConfig, compileGithubConfig, compileGitlabConfig } from "./repoCompileUtils.js";
import { Settings } from "./types.js"; import { Settings } from "./types.js";
import { groupmqLifecycleExceptionWrapper } from "./utils.js"; import { groupmqLifecycleExceptionWrapper, setIntervalAsync } from "./utils.js";
import { syncSearchContexts } from "./ee/syncSearchContexts.js"; import { syncSearchContexts } from "./ee/syncSearchContexts.js";
import { captureEvent } from "./posthog.js"; import { captureEvent } from "./posthog.js";
import { PromClient } from "./promClient.js"; import { PromClient } from "./promClient.js";
import { GROUPMQ_WORKER_STOP_GRACEFUL_TIMEOUT_MS } from "./constants.js";
const LOG_TAG = 'connection-manager'; const LOG_TAG = 'connection-manager';
const logger = createLogger(LOG_TAG); const logger = createLogger(LOG_TAG);
const createJobLogger = (jobId: string) => createLogger(`${LOG_TAG}:job:${jobId}`); const createJobLogger = (jobId: string) => createLogger(`${LOG_TAG}:job:${jobId}`);
const QUEUE_NAME = 'connection-sync-queue';
type JobPayload = { type JobPayload = {
jobId: string, jobId: string,
@ -30,19 +32,19 @@ type JobResult = {
const JOB_TIMEOUT_MS = 1000 * 60 * 60 * 2; // 2 hour timeout const JOB_TIMEOUT_MS = 1000 * 60 * 60 * 2; // 2 hour timeout
export class ConnectionManager { export class ConnectionManager {
private worker: Worker; private worker: Worker<JobPayload>;
private queue: Queue<JobPayload>; private queue: Queue<JobPayload>;
private interval?: NodeJS.Timeout; private interval?: NodeJS.Timeout;
constructor( constructor(
private db: PrismaClient, private db: PrismaClient,
private settings: Settings, private settings: Settings,
redis: Redis, private redis: Redis,
private promClient: PromClient, private promClient: PromClient,
) { ) {
this.queue = new Queue<JobPayload>({ this.queue = new Queue<JobPayload>({
redis, redis,
namespace: 'connection-sync-queue', namespace: QUEUE_NAME,
jobTimeoutMs: JOB_TIMEOUT_MS, jobTimeoutMs: JOB_TIMEOUT_MS,
maxAttempts: 3, maxAttempts: 3,
logger: env.DEBUG_ENABLE_GROUPMQ_LOGGING === 'true', logger: env.DEBUG_ENABLE_GROUPMQ_LOGGING === 'true',
@ -62,11 +64,15 @@ export class ConnectionManager {
this.worker.on('failed', this.onJobFailed.bind(this)); this.worker.on('failed', this.onJobFailed.bind(this));
this.worker.on('stalled', this.onJobStalled.bind(this)); this.worker.on('stalled', this.onJobStalled.bind(this));
this.worker.on('error', this.onWorkerError.bind(this)); this.worker.on('error', this.onWorkerError.bind(this));
// graceful-timeout is triggered when a job is still processing after
// worker.close() is called and the timeout period has elapsed. In this case,
// we fail the job with no retry.
this.worker.on('graceful-timeout', this.onJobGracefulTimeout.bind(this));
} }
public startScheduler() { public startScheduler() {
logger.debug('Starting scheduler'); logger.debug('Starting scheduler');
this.interval = setInterval(async () => { this.interval = setIntervalAsync(async () => {
const thresholdDate = new Date(Date.now() - this.settings.resyncConnectionIntervalMs); const thresholdDate = new Date(Date.now() - this.settings.resyncConnectionIntervalMs);
const timeoutDate = new Date(Date.now() - JOB_TIMEOUT_MS); const timeoutDate = new Date(Date.now() - JOB_TIMEOUT_MS);
@ -128,6 +134,7 @@ export class ConnectionManager {
}); });
for (const job of jobs) { for (const job of jobs) {
logger.info(`Scheduling job ${job.id} for connection ${job.connection.name} (id: ${job.connectionId})`);
await this.queue.add({ await this.queue.add({
groupId: `connection:${job.connectionId}`, groupId: `connection:${job.connectionId}`,
data: { data: {
@ -150,6 +157,22 @@ export class ConnectionManager {
const logger = createJobLogger(jobId); const logger = createJobLogger(jobId);
logger.info(`Running connection sync job ${jobId} for connection ${connectionName} (id: ${job.data.connectionId}) (attempt ${job.attempts + 1} / ${job.maxAttempts})`); logger.info(`Running connection sync job ${jobId} for connection ${connectionName} (id: ${job.data.connectionId}) (attempt ${job.attempts + 1} / ${job.maxAttempts})`);
const currentStatus = await this.db.connectionSyncJob.findUniqueOrThrow({
where: {
id: jobId,
},
select: {
status: true,
}
});
// Fail safe: if the job is not PENDING (first run) or IN_PROGRESS (retry), it indicates the job
// is in an invalid state and should be skipped.
if (currentStatus.status !== ConnectionSyncJobStatus.PENDING && currentStatus.status !== ConnectionSyncJobStatus.IN_PROGRESS) {
throw new Error(`Job ${jobId} is not in a valid state. Expected: ${ConnectionSyncJobStatus.PENDING} or ${ConnectionSyncJobStatus.IN_PROGRESS}. Actual: ${currentStatus.status}. Skipping.`);
}
this.promClient.pendingConnectionSyncJobs.dec({ connection: connectionName }); this.promClient.pendingConnectionSyncJobs.dec({ connection: connectionName });
this.promClient.activeConnectionSyncJobs.inc({ connection: connectionName }); this.promClient.activeConnectionSyncJobs.inc({ connection: connectionName });
@ -178,7 +201,7 @@ export class ConnectionManager {
const result = await (async () => { const result = await (async () => {
switch (config.type) { switch (config.type) {
case 'github': { case 'github': {
return await compileGithubConfig(config, job.data.connectionId, abortController); return await compileGithubConfig(config, job.data.connectionId, abortController.signal);
} }
case 'gitlab': { case 'gitlab': {
return await compileGitlabConfig(config, job.data.connectionId); return await compileGitlabConfig(config, job.data.connectionId);
@ -383,6 +406,33 @@ export class ConnectionManager {
}); });
}); });
private onJobGracefulTimeout = async (job: Job<JobPayload>) =>
groupmqLifecycleExceptionWrapper('onJobGracefulTimeout', logger, async () => {
const logger = createJobLogger(job.id);
const { connection } = await this.db.connectionSyncJob.update({
where: { id: job.id },
data: {
status: ConnectionSyncJobStatus.FAILED,
completedAt: new Date(),
errorMessage: 'Job timed out',
},
select: {
connection: true,
}
});
this.promClient.activeConnectionSyncJobs.dec({ connection: connection.name });
this.promClient.connectionSyncJobFailTotal.inc({ connection: connection.name });
logger.error(`Job ${job.id} timed out for connection ${connection.name} (id: ${connection.id})`);
captureEvent('backend_connection_sync_job_failed', {
connectionId: connection.id,
error: 'Job timed out',
});
});
private async onWorkerError(error: Error) { private async onWorkerError(error: Error) {
Sentry.captureException(error); Sentry.captureException(error);
logger.error(`Connection syncer worker error.`, error); logger.error(`Connection syncer worker error.`, error);
@ -392,8 +442,28 @@ export class ConnectionManager {
if (this.interval) { if (this.interval) {
clearInterval(this.interval); clearInterval(this.interval);
} }
await this.worker.close();
await this.queue.close(); const inProgressJobs = this.worker.getCurrentJobs();
await this.worker.close(GROUPMQ_WORKER_STOP_GRACEFUL_TIMEOUT_MS);
// Manually release group locks for in progress jobs to prevent deadlocks.
// @see: https://github.com/Openpanel-dev/groupmq/issues/8
for (const { job } of inProgressJobs) {
const lockKey = `groupmq:${QUEUE_NAME}:lock:${job.groupId}`;
logger.debug(`Releasing group lock ${lockKey} for in progress job ${job.id}`);
try {
await this.redis.del(lockKey);
} catch (error) {
Sentry.captureException(error);
logger.error(`Failed to release group lock ${lockKey} for in progress job ${job.id}. Error: `, error);
}
}
// @note: As of groupmq v1.0.0, queue.close() will just close the underlying
// redis connection. Since we share the same redis client between, skip this
// step and close the redis client directly in index.ts.
// @see: https://github.com/Openpanel-dev/groupmq/blob/main/src/queue.ts#L1900
// await this.queue.close();
} }
} }

View file

@ -11,3 +11,23 @@ export const PERMISSION_SYNC_SUPPORTED_CODE_HOST_TYPES: CodeHostType[] = [
export const REPOS_CACHE_DIR = path.join(env.DATA_CACHE_DIR, 'repos'); export const REPOS_CACHE_DIR = path.join(env.DATA_CACHE_DIR, 'repos');
export const INDEX_CACHE_DIR = path.join(env.DATA_CACHE_DIR, 'index'); export const INDEX_CACHE_DIR = path.join(env.DATA_CACHE_DIR, 'index');
// Maximum time to wait for current job to finish
export const GROUPMQ_WORKER_STOP_GRACEFUL_TIMEOUT_MS = 5 * 1000; // 5 seconds
// List of shutdown signals
export const SHUTDOWN_SIGNALS: string[] = [
'SIGHUP',
'SIGINT',
'SIGQUIT',
'SIGILL',
'SIGTRAP',
'SIGABRT',
'SIGBUS',
'SIGFPE',
'SIGSEGV',
'SIGUSR2',
'SIGTERM',
// @note: SIGKILL and SIGSTOP cannot have listeners installed.
// @see: https://nodejs.org/api/process.html#signal-events
];

View file

@ -7,6 +7,7 @@ import { PERMISSION_SYNC_SUPPORTED_CODE_HOST_TYPES } from "../constants.js";
import { createOctokitFromToken, getReposForAuthenticatedUser } from "../github.js"; import { createOctokitFromToken, getReposForAuthenticatedUser } from "../github.js";
import { createGitLabFromOAuthToken, getProjectsForAuthenticatedUser } from "../gitlab.js"; import { createGitLabFromOAuthToken, getProjectsForAuthenticatedUser } from "../gitlab.js";
import { Settings } from "../types.js"; import { Settings } from "../types.js";
import { setIntervalAsync } from "../utils.js";
const LOG_TAG = 'user-permission-syncer'; const LOG_TAG = 'user-permission-syncer';
const logger = createLogger(LOG_TAG); const logger = createLogger(LOG_TAG);
@ -46,7 +47,7 @@ export class AccountPermissionSyncer {
logger.debug('Starting scheduler'); logger.debug('Starting scheduler');
this.interval = setInterval(async () => { this.interval = setIntervalAsync(async () => {
const thresholdDate = new Date(Date.now() - this.settings.experiment_userDrivenPermissionSyncIntervalMs); const thresholdDate = new Date(Date.now() - this.settings.experiment_userDrivenPermissionSyncIntervalMs);
const accounts = await this.db.account.findMany({ const accounts = await this.db.account.findMany({
@ -101,7 +102,7 @@ export class AccountPermissionSyncer {
if (this.interval) { if (this.interval) {
clearInterval(this.interval); clearInterval(this.interval);
} }
await this.worker.close(); await this.worker.close(/* force = */ true);
await this.queue.close(); await this.queue.close();
} }

View file

@ -8,7 +8,7 @@ import { PERMISSION_SYNC_SUPPORTED_CODE_HOST_TYPES } from "../constants.js";
import { createOctokitFromToken, getRepoCollaborators, GITHUB_CLOUD_HOSTNAME } from "../github.js"; import { createOctokitFromToken, getRepoCollaborators, GITHUB_CLOUD_HOSTNAME } from "../github.js";
import { createGitLabFromPersonalAccessToken, getProjectMembers } from "../gitlab.js"; import { createGitLabFromPersonalAccessToken, getProjectMembers } from "../gitlab.js";
import { Settings } from "../types.js"; import { Settings } from "../types.js";
import { getAuthCredentialsForRepo } from "../utils.js"; import { getAuthCredentialsForRepo, setIntervalAsync } from "../utils.js";
type RepoPermissionSyncJob = { type RepoPermissionSyncJob = {
jobId: string; jobId: string;
@ -48,26 +48,34 @@ export class RepoPermissionSyncer {
logger.debug('Starting scheduler'); logger.debug('Starting scheduler');
this.interval = setInterval(async () => { this.interval = setIntervalAsync(async () => {
// @todo: make this configurable // @todo: make this configurable
const thresholdDate = new Date(Date.now() - this.settings.experiment_repoDrivenPermissionSyncIntervalMs); const thresholdDate = new Date(Date.now() - this.settings.experiment_repoDrivenPermissionSyncIntervalMs);
const repos = await this.db.repo.findMany({ const repos = await this.db.repo.findMany({
// Repos need their permissions to be synced against the code host when... // Repos need their permissions to be synced against the code host when...
where: { where: {
// They belong to a code host that supports permissions syncing
AND: [ AND: [
// They are not public. Public repositories are always visible to all users, therefore we don't
// need to explicitly perform permission syncing for them.
// @see: packages/web/src/prisma.ts
{
isPublic: false
},
// They belong to a code host that supports permissions syncing
{ {
external_codeHostType: { external_codeHostType: {
in: PERMISSION_SYNC_SUPPORTED_CODE_HOST_TYPES, in: PERMISSION_SYNC_SUPPORTED_CODE_HOST_TYPES,
} }
}, },
// They have not been synced within the threshold date.
{ {
OR: [ OR: [
{ permissionSyncedAt: null }, { permissionSyncedAt: null },
{ permissionSyncedAt: { lt: thresholdDate } }, { permissionSyncedAt: { lt: thresholdDate } },
], ],
}, },
// There aren't any active or recently failed jobs.
{ {
NOT: { NOT: {
permissionSyncJobs: { permissionSyncJobs: {
@ -106,7 +114,7 @@ export class RepoPermissionSyncer {
if (this.interval) { if (this.interval) {
clearInterval(this.interval); clearInterval(this.interval);
} }
await this.worker.close(); await this.worker.close(/* force = */ true);
await this.queue.close(); await this.queue.close();
} }

View file

@ -1,17 +1,20 @@
import "./instrument.js"; import "./instrument.js";
import * as Sentry from "@sentry/node";
import { PrismaClient } from "@sourcebot/db"; import { PrismaClient } from "@sourcebot/db";
import { createLogger } from "@sourcebot/shared"; import { createLogger, env, getConfigSettings, getDBConnectionString, hasEntitlement } from "@sourcebot/shared";
import { env, getConfigSettings, hasEntitlement, getDBConnectionString } from '@sourcebot/shared'; import 'express-async-errors';
import { existsSync } from 'fs'; import { existsSync } from 'fs';
import { mkdir } from 'fs/promises'; import { mkdir } from 'fs/promises';
import { Redis } from 'ioredis'; import { Redis } from 'ioredis';
import { Api } from "./api.js";
import { ConfigManager } from "./configManager.js"; import { ConfigManager } from "./configManager.js";
import { ConnectionManager } from './connectionManager.js'; import { ConnectionManager } from './connectionManager.js';
import { INDEX_CACHE_DIR, REPOS_CACHE_DIR } from './constants.js'; import { INDEX_CACHE_DIR, REPOS_CACHE_DIR, SHUTDOWN_SIGNALS } from './constants.js';
import { AccountPermissionSyncer } from "./ee/accountPermissionSyncer.js";
import { GithubAppManager } from "./ee/githubAppManager.js"; import { GithubAppManager } from "./ee/githubAppManager.js";
import { RepoPermissionSyncer } from './ee/repoPermissionSyncer.js'; import { RepoPermissionSyncer } from './ee/repoPermissionSyncer.js';
import { AccountPermissionSyncer } from "./ee/accountPermissionSyncer.js"; import { shutdownPosthog } from "./posthog.js";
import { PromClient } from './promClient.js'; import { PromClient } from './promClient.js';
import { RepoIndexManager } from "./repoIndexManager.js"; import { RepoIndexManager } from "./repoIndexManager.js";
@ -39,13 +42,14 @@ const prisma = new PrismaClient({
const redis = new Redis(env.REDIS_URL, { const redis = new Redis(env.REDIS_URL, {
maxRetriesPerRequest: null maxRetriesPerRequest: null
}); });
redis.ping().then(() => {
try {
await redis.ping();
logger.info('Connected to redis'); logger.info('Connected to redis');
}).catch((err: unknown) => { } catch (err: unknown) {
logger.error('Failed to connect to redis'); logger.error('Failed to connect to redis. Error:', err);
logger.error(err);
process.exit(1); process.exit(1);
}); }
const promClient = new PromClient(); const promClient = new PromClient();
@ -73,46 +77,74 @@ else if (env.EXPERIMENT_EE_PERMISSION_SYNC_ENABLED === 'true' && hasEntitlement(
accountPermissionSyncer.startScheduler(); accountPermissionSyncer.startScheduler();
} }
const api = new Api(
promClient,
prisma,
connectionManager,
repoIndexManager,
);
logger.info('Worker started.'); logger.info('Worker started.');
const cleanup = async (signal: string) => { const listenToShutdownSignals = () => {
logger.info(`Received ${signal}, cleaning up...`); const signals = SHUTDOWN_SIGNALS;
const shutdownTimeout = 30000; // 30 seconds let receivedSignal = false;
try { const cleanup = async (signal: string) => {
await Promise.race([ try {
Promise.all([ if (receivedSignal) {
repoIndexManager.dispose(), return;
connectionManager.dispose(), }
repoPermissionSyncer.dispose(), receivedSignal = true;
accountPermissionSyncer.dispose(),
promClient.dispose(), logger.info(`Received ${signal}, cleaning up...`);
configManager.dispose(),
]), await repoIndexManager.dispose()
new Promise((_, reject) => await connectionManager.dispose()
setTimeout(() => reject(new Error('Shutdown timeout')), shutdownTimeout) await repoPermissionSyncer.dispose()
) await accountPermissionSyncer.dispose()
]); await configManager.dispose()
logger.info('All workers shut down gracefully');
} catch (error) { await prisma.$disconnect();
logger.warn('Shutdown timeout or error, forcing exit:', error instanceof Error ? error.message : String(error)); await redis.quit();
await api.dispose();
await shutdownPosthog();
logger.info('All workers shut down gracefully');
signals.forEach(sig => process.removeListener(sig, cleanup));
return 0;
} catch (error) {
Sentry.captureException(error);
logger.error('Error shutting down worker:', error);
return 1;
}
} }
await prisma.$disconnect(); signals.forEach(signal => {
await redis.quit(); process.on(signal, (err) => {
cleanup(err).then(code => {
process.exit(code);
});
});
});
// Register handlers for uncaught exceptions and unhandled rejections
process.on('uncaughtException', (err) => {
logger.error(`Uncaught exception: ${err.message}`);
cleanup('uncaughtException').then(() => {
process.exit(1);
});
});
process.on('unhandledRejection', (reason, promise) => {
logger.error(`Unhandled rejection at: ${promise}, reason: ${reason}`);
cleanup('unhandledRejection').then(() => {
process.exit(1);
});
});
} }
process.on('SIGINT', () => cleanup('SIGINT').finally(() => process.exit(0))); listenToShutdownSignals();
process.on('SIGTERM', () => cleanup('SIGTERM').finally(() => process.exit(0)));
// Register handlers for uncaught exceptions and unhandled rejections
process.on('uncaughtException', (err) => {
logger.error(`Uncaught exception: ${err.message}`);
cleanup('uncaughtException').finally(() => process.exit(1));
});
process.on('unhandledRejection', (reason, promise) => {
logger.error(`Unhandled rejection at: ${promise}, reason: ${reason}`);
cleanup('unhandledRejection').finally(() => process.exit(1));
});

View file

@ -5,9 +5,9 @@ import { PosthogEvent, PosthogEventMap } from './posthogEvents.js';
let posthog: PostHog | undefined = undefined; let posthog: PostHog | undefined = undefined;
if (clientEnv.NEXT_PUBLIC_POSTHOG_PAPIK) { if (env.POSTHOG_PAPIK) {
posthog = new PostHog( posthog = new PostHog(
clientEnv.NEXT_PUBLIC_POSTHOG_PAPIK, env.POSTHOG_PAPIK,
{ {
host: "https://us.i.posthog.com", host: "https://us.i.posthog.com",
} }
@ -29,4 +29,6 @@ export function captureEvent<E extends PosthogEvent>(event: E, properties: Posth
}); });
} }
await posthog?.shutdown(); export async function shutdownPosthog() {
await posthog?.shutdown();
}

View file

@ -1,14 +1,6 @@
import express, { Request, Response } from 'express';
import { Server } from 'http';
import client, { Registry, Counter, Gauge } from 'prom-client'; import client, { Registry, Counter, Gauge } from 'prom-client';
import { createLogger } from "@sourcebot/shared";
const logger = createLogger('prometheus-client');
export class PromClient { export class PromClient {
private registry: Registry; public registry: Registry;
private app: express.Application;
private server: Server;
public activeRepoIndexJobs: Gauge<string>; public activeRepoIndexJobs: Gauge<string>;
public pendingRepoIndexJobs: Gauge<string>; public pendingRepoIndexJobs: Gauge<string>;
@ -22,8 +14,6 @@ export class PromClient {
public connectionSyncJobFailTotal: Counter<string>; public connectionSyncJobFailTotal: Counter<string>;
public connectionSyncJobSuccessTotal: Counter<string>; public connectionSyncJobSuccessTotal: Counter<string>;
public readonly PORT = 3060;
constructor() { constructor() {
this.registry = new Registry(); this.registry = new Registry();
@ -100,26 +90,5 @@ export class PromClient {
client.collectDefaultMetrics({ client.collectDefaultMetrics({
register: this.registry, register: this.registry,
}); });
this.app = express();
this.app.get('/metrics', async (req: Request, res: Response) => {
res.set('Content-Type', this.registry.contentType);
const metrics = await this.registry.metrics();
res.end(metrics);
});
this.server = this.app.listen(this.PORT, () => {
logger.info(`Prometheus metrics server is running on port ${this.PORT}`);
});
}
async dispose() {
return new Promise<void>((resolve, reject) => {
this.server.close((err) => {
if (err) reject(err);
else resolve();
});
});
} }
} }

View file

@ -39,8 +39,8 @@ type CompileResult = {
export const compileGithubConfig = async ( export const compileGithubConfig = async (
config: GithubConnectionConfig, config: GithubConnectionConfig,
connectionId: number, connectionId: number,
abortController: AbortController): Promise<CompileResult> => { signal: AbortSignal): Promise<CompileResult> => {
const gitHubReposResult = await getGitHubReposFromConfig(config, abortController.signal); const gitHubReposResult = await getGitHubReposFromConfig(config, signal);
const gitHubRepos = gitHubReposResult.repos; const gitHubRepos = gitHubReposResult.repos;
const warnings = gitHubReposResult.warnings; const warnings = gitHubReposResult.warnings;

View file

@ -7,12 +7,12 @@ import { readdir, rm } from 'fs/promises';
import { Job, Queue, ReservedJob, Worker } from "groupmq"; import { Job, Queue, ReservedJob, Worker } from "groupmq";
import { Redis } from 'ioredis'; import { Redis } from 'ioredis';
import micromatch from 'micromatch'; import micromatch from 'micromatch';
import { INDEX_CACHE_DIR } from './constants.js'; import { GROUPMQ_WORKER_STOP_GRACEFUL_TIMEOUT_MS, INDEX_CACHE_DIR } from './constants.js';
import { cloneRepository, fetchRepository, getBranches, getCommitHashForRefName, getTags, isPathAValidGitRepoRoot, unsetGitConfig, upsertGitConfig } from './git.js'; import { cloneRepository, fetchRepository, getBranches, getCommitHashForRefName, getTags, isPathAValidGitRepoRoot, unsetGitConfig, upsertGitConfig } from './git.js';
import { captureEvent } from './posthog.js'; import { captureEvent } from './posthog.js';
import { PromClient } from './promClient.js'; import { PromClient } from './promClient.js';
import { RepoWithConnections, Settings } from "./types.js"; import { RepoWithConnections, Settings } from "./types.js";
import { getAuthCredentialsForRepo, getRepoPath, getShardPrefix, groupmqLifecycleExceptionWrapper, measure } from './utils.js'; import { getAuthCredentialsForRepo, getRepoPath, getShardPrefix, groupmqLifecycleExceptionWrapper, measure, setIntervalAsync } from './utils.js';
import { indexGitRepository } from './zoekt.js'; import { indexGitRepository } from './zoekt.js';
const LOG_TAG = 'repo-index-manager'; const LOG_TAG = 'repo-index-manager';
@ -45,7 +45,7 @@ export class RepoIndexManager {
constructor( constructor(
private db: PrismaClient, private db: PrismaClient,
private settings: Settings, private settings: Settings,
redis: Redis, private redis: Redis,
private promClient: PromClient, private promClient: PromClient,
) { ) {
this.queue = new Queue<JobPayload>({ this.queue = new Queue<JobPayload>({
@ -70,11 +70,15 @@ export class RepoIndexManager {
this.worker.on('failed', this.onJobFailed.bind(this)); this.worker.on('failed', this.onJobFailed.bind(this));
this.worker.on('stalled', this.onJobStalled.bind(this)); this.worker.on('stalled', this.onJobStalled.bind(this));
this.worker.on('error', this.onWorkerError.bind(this)); this.worker.on('error', this.onWorkerError.bind(this));
// graceful-timeout is triggered when a job is still processing after
// worker.close() is called and the timeout period has elapsed. In this case,
// we fail the job with no retry.
this.worker.on('graceful-timeout', this.onJobGracefulTimeout.bind(this));
} }
public async startScheduler() { public startScheduler() {
logger.debug('Starting scheduler'); logger.debug('Starting scheduler');
this.interval = setInterval(async () => { this.interval = setIntervalAsync(async () => {
await this.scheduleIndexJobs(); await this.scheduleIndexJobs();
await this.scheduleCleanupJobs(); await this.scheduleCleanupJobs();
}, this.settings.reindexRepoPollingIntervalMs); }, this.settings.reindexRepoPollingIntervalMs);
@ -192,7 +196,7 @@ export class RepoIndexManager {
} }
} }
private async createJobs(repos: Repo[], type: RepoIndexingJobType) { public async createJobs(repos: Repo[], type: RepoIndexingJobType) {
// @note: we don't perform this in a transaction because // @note: we don't perform this in a transaction because
// we want to avoid the situation where a job is created and run // we want to avoid the situation where a job is created and run
// prior to the transaction being committed. // prior to the transaction being committed.
@ -221,6 +225,8 @@ export class RepoIndexManager {
const jobTypeLabel = getJobTypePrometheusLabel(type); const jobTypeLabel = getJobTypePrometheusLabel(type);
this.promClient.pendingRepoIndexJobs.inc({ repo: job.repo.name, type: jobTypeLabel }); this.promClient.pendingRepoIndexJobs.inc({ repo: job.repo.name, type: jobTypeLabel });
} }
return jobs.map(job => job.id);
} }
private async runJob(job: ReservedJob<JobPayload>) { private async runJob(job: ReservedJob<JobPayload>) {
@ -228,6 +234,23 @@ export class RepoIndexManager {
const logger = createJobLogger(id); const logger = createJobLogger(id);
logger.info(`Running ${job.data.type} job ${id} for repo ${job.data.repoName} (id: ${job.data.repoId}) (attempt ${job.attempts + 1} / ${job.maxAttempts})`); logger.info(`Running ${job.data.type} job ${id} for repo ${job.data.repoName} (id: ${job.data.repoId}) (attempt ${job.attempts + 1} / ${job.maxAttempts})`);
const currentStatus = await this.db.repoIndexingJob.findUniqueOrThrow({
where: {
id,
},
select: {
status: true,
}
});
// Fail safe: if the job is not PENDING (first run) or IN_PROGRESS (retry), it indicates the job
// is in an invalid state and should be skipped.
if (
currentStatus.status !== RepoIndexingJobStatus.PENDING &&
currentStatus.status !== RepoIndexingJobStatus.IN_PROGRESS
) {
throw new Error(`Job ${id} is not in a valid state. Expected: ${RepoIndexingJobStatus.PENDING} or ${RepoIndexingJobStatus.IN_PROGRESS}. Actual: ${currentStatus.status}. Skipping.`);
}
const { repo, type: jobType } = await this.db.repoIndexingJob.update({ const { repo, type: jobType } = await this.db.repoIndexingJob.update({
where: { where: {
@ -538,6 +561,28 @@ export class RepoIndexManager {
logger.error(`Job ${jobId} stalled for repo ${repo.name} (id: ${repo.id})`); logger.error(`Job ${jobId} stalled for repo ${repo.name} (id: ${repo.id})`);
}); });
private onJobGracefulTimeout = async (job: Job<JobPayload>) =>
groupmqLifecycleExceptionWrapper('onJobGracefulTimeout', logger, async () => {
const logger = createJobLogger(job.data.jobId);
const jobTypeLabel = getJobTypePrometheusLabel(job.data.type);
const { repo } = await this.db.repoIndexingJob.update({
where: { id: job.data.jobId },
data: {
status: RepoIndexingJobStatus.FAILED,
completedAt: new Date(),
errorMessage: 'Job timed out',
},
select: { repo: true }
});
this.promClient.activeRepoIndexJobs.dec({ repo: job.data.repoName, type: jobTypeLabel });
this.promClient.repoIndexJobFailTotal.inc({ repo: job.data.repoName, type: jobTypeLabel });
logger.error(`Job ${job.data.jobId} timed out for repo ${repo.name} (id: ${repo.id}). Failing job.`);
});
private async onWorkerError(error: Error) { private async onWorkerError(error: Error) {
Sentry.captureException(error); Sentry.captureException(error);
logger.error(`Index syncer worker error.`, error); logger.error(`Index syncer worker error.`, error);
@ -547,8 +592,20 @@ export class RepoIndexManager {
if (this.interval) { if (this.interval) {
clearInterval(this.interval); clearInterval(this.interval);
} }
await this.worker.close(); const inProgressJobs = this.worker.getCurrentJobs();
await this.queue.close(); await this.worker.close(GROUPMQ_WORKER_STOP_GRACEFUL_TIMEOUT_MS);
// Manually release group locks for in progress jobs to prevent deadlocks.
// @see: https://github.com/Openpanel-dev/groupmq/issues/8
for (const { job } of inProgressJobs) {
const lockKey = `groupmq:repo-index-queue:lock:${job.groupId}`;
logger.debug(`Releasing group lock ${lockKey} for in progress job ${job.id}`);
await this.redis.del(lockKey);
}
// @note: As of groupmq v1.0.0, queue.close() will just close the underlying
// redis connection. Since we share the same redis client between, skip this
// step and close the redis client directly in index.ts.
// await this.queue.close();
} }
} }

View file

@ -268,3 +268,27 @@ export const groupmqLifecycleExceptionWrapper = async (name: string, logger: Log
} }
} }
// setInterval wrapper that ensures async callbacks are not executed concurrently.
// @see: https://mottaquikarim.github.io/dev/posts/setinterval-that-blocks-on-await/
export const setIntervalAsync = (target: () => Promise<void>, pollingIntervalMs: number): NodeJS.Timeout => {
const setIntervalWithPromise = <T extends (...args: any[]) => Promise<any>>(
target: T
): (...args: Parameters<T>) => Promise<void> => {
return async function (...args: Parameters<T>): Promise<void> {
if ((target as any).isRunning) return;
(target as any).isRunning = true;
try {
await target(...args);
} finally {
(target as any).isRunning = false;
}
};
}
return setInterval(
setIntervalWithPromise(target),
pollingIntervalMs
);
}

View file

@ -1,5 +1,5 @@
import { Repo } from "@sourcebot/db"; import { Repo } from "@sourcebot/db";
import { createLogger } from "@sourcebot/shared"; import { createLogger, env } from "@sourcebot/shared";
import { exec } from "child_process"; import { exec } from "child_process";
import { INDEX_CACHE_DIR } from "./constants.js"; import { INDEX_CACHE_DIR } from "./constants.js";
import { Settings } from "./types.js"; import { Settings } from "./types.js";
@ -11,6 +11,8 @@ export const indexGitRepository = async (repo: Repo, settings: Settings, revisio
const { path: repoPath } = getRepoPath(repo); const { path: repoPath } = getRepoPath(repo);
const shardPrefix = getShardPrefix(repo.orgId, repo.id); const shardPrefix = getShardPrefix(repo.orgId, repo.id);
const largeFileGlobPatterns = env.ALWAYS_INDEX_FILE_PATTERNS?.split(',').map(pattern => pattern.trim()) ?? [];
const command = [ const command = [
'zoekt-git-index', 'zoekt-git-index',
'-allow_missing_branches', '-allow_missing_branches',
@ -21,6 +23,7 @@ export const indexGitRepository = async (repo: Repo, settings: Settings, revisio
`-tenant_id ${repo.orgId}`, `-tenant_id ${repo.orgId}`,
`-repo_id ${repo.id}`, `-repo_id ${repo.id}`,
`-shard_prefix ${shardPrefix}`, `-shard_prefix ${shardPrefix}`,
...largeFileGlobPatterns.map((pattern) => `-large_file ${pattern}`),
repoPath repoPath
].join(' '); ].join(' ');

View file

@ -0,0 +1,5 @@
-- First, remove the NOT NULL constraint on the createdById column.
ALTER TABLE "Chat" ALTER COLUMN "createdById" DROP NOT NULL;
-- Then, set all chats created by the guest user (id: 1) to have a NULL createdById.
UPDATE "Chat" SET "createdById" = NULL WHERE "createdById" = '1';

View file

@ -437,8 +437,8 @@ model Chat {
name String? name String?
createdBy User @relation(fields: [createdById], references: [id], onDelete: Cascade) createdBy User? @relation(fields: [createdById], references: [id], onDelete: Cascade)
createdById String createdById String?
createdAt DateTime @default(now()) createdAt DateTime @default(now())
updatedAt DateTime @updatedAt updatedAt DateTime @updatedAt

View file

@ -1 +1,3 @@
import type { User, Account } from ".prisma/client";
export type UserWithAccounts = User & { accounts: Account[] };
export * from ".prisma/client"; export * from ".prisma/client";

View file

@ -7,10 +7,34 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased] ## [Unreleased]
## [1.0.11] - 2025-12-03
### Changed
- Updated API client to match the latest Sourcebot release. [#652](https://github.com/sourcebot-dev/sourcebot/pull/652)
## [1.0.10] - 2025-11-24
### Changed
- Updated API client to match the latest Sourcebot release. [#555](https://github.com/sourcebot-dev/sourcebot/pull/555)
## [1.0.9] - 2025-11-17
### Added
- Added pagination and filtering to `list_repos` tool to handle large repository lists efficiently and prevent oversized responses that waste token context. [#614](https://github.com/sourcebot-dev/sourcebot/pull/614)
## [1.0.8] - 2025-11-10
### Fixed
- Fixed issue where search results exceeding token limits would be completely discarded instead of returning truncated content. [#604](https://github.com/sourcebot-dev/sourcebot/pull/604)
## [1.0.7] - 2025-10-28 ## [1.0.7] - 2025-10-28
### Changed
- Updated API client to match the latest Sourcebot release. [#555](https://github.com/sourcebot-dev/sourcebot/pull/555) - Updated API client to match the latest Sourcebot release. [#555](https://github.com/sourcebot-dev/sourcebot/pull/555)
## [1.0.6] - 2025-09-26 ## [1.0.6] - 2025-09-26
### Fixed
- Fix `linkedConnections is required` schema error. - Fix `linkedConnections is required` schema error.
## [1.0.5] - 2025-09-15 ## [1.0.5] - 2025-09-15

View file

@ -182,7 +182,18 @@ Fetches code that matches the provided regex pattern in `query`.
### list_repos ### list_repos
Lists all repositories indexed by Sourcebot. Lists repositories indexed by Sourcebot with optional filtering and pagination.
<details>
<summary>Parameters</summary>
| Name | Required | Description |
|:-------------|:---------|:--------------------------------------------------------------------|
| `query` | no | Filter repositories by name (case-insensitive). |
| `pageNumber` | no | Page number (1-indexed, default: 1). |
| `limit` | no | Number of repositories per page (default: 50). |
</details>
### get_file_source ### get_file_source

View file

@ -1,6 +1,6 @@
{ {
"name": "@sourcebot/mcp", "name": "@sourcebot/mcp",
"version": "1.0.7", "version": "1.0.11",
"type": "module", "type": "module",
"main": "dist/index.js", "main": "dist/index.js",
"types": "dist/index.d.ts", "types": "dist/index.d.ts",

View file

@ -8,7 +8,6 @@ export const search = async (request: SearchRequest): Promise<SearchResponse | S
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
'X-Org-Domain': '~',
...(env.SOURCEBOT_API_KEY ? { 'X-Sourcebot-Api-Key': env.SOURCEBOT_API_KEY } : {}) ...(env.SOURCEBOT_API_KEY ? { 'X-Sourcebot-Api-Key': env.SOURCEBOT_API_KEY } : {})
}, },
body: JSON.stringify(request) body: JSON.stringify(request)
@ -26,7 +25,6 @@ export const listRepos = async (): Promise<ListRepositoriesResponse | ServiceErr
method: 'GET', method: 'GET',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
'X-Org-Domain': '~',
...(env.SOURCEBOT_API_KEY ? { 'X-Sourcebot-Api-Key': env.SOURCEBOT_API_KEY } : {}) ...(env.SOURCEBOT_API_KEY ? { 'X-Sourcebot-Api-Key': env.SOURCEBOT_API_KEY } : {})
}, },
}).then(response => response.json()); }).then(response => response.json());
@ -43,7 +41,6 @@ export const getFileSource = async (request: FileSourceRequest): Promise<FileSou
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
'X-Org-Domain': '~',
...(env.SOURCEBOT_API_KEY ? { 'X-Sourcebot-Api-Key': env.SOURCEBOT_API_KEY } : {}) ...(env.SOURCEBOT_API_KEY ? { 'X-Sourcebot-Api-Key': env.SOURCEBOT_API_KEY } : {})
}, },
body: JSON.stringify(request) body: JSON.stringify(request)

View file

@ -7,6 +7,7 @@ import escapeStringRegexp from 'escape-string-regexp';
import { z } from 'zod'; import { z } from 'zod';
import { listRepos, search, getFileSource } from './client.js'; import { listRepos, search, getFileSource } from './client.js';
import { env, numberSchema } from './env.js'; import { env, numberSchema } from './env.js';
import { listReposRequestSchema } from './schemas.js';
import { TextContent } from './types.js'; import { TextContent } from './types.js';
import { isServiceError } from './utils.js'; import { isServiceError } from './utils.js';
@ -69,16 +70,13 @@ server.tool(
query += ` ( lang:${languages.join(' or lang:')} )`; query += ` ( lang:${languages.join(' or lang:')} )`;
} }
if (caseSensitive) {
query += ` case:yes`;
} else {
query += ` case:no`;
}
const response = await search({ const response = await search({
query, query,
matches: env.DEFAULT_MATCHES, matches: env.DEFAULT_MATCHES,
contextLines: env.DEFAULT_CONTEXT_LINES, contextLines: env.DEFAULT_CONTEXT_LINES,
isRegexEnabled: true,
isCaseSensitivityEnabled: caseSensitive,
source: 'mcp'
}); });
if (isServiceError(response)) { if (isServiceError(response)) {
@ -123,6 +121,22 @@ server.tool(
const tokens = text.length / 4; const tokens = text.length / 4;
if ((totalTokens + tokens) > maxTokens) { if ((totalTokens + tokens) > maxTokens) {
// Calculate remaining token budget
const remainingTokens = maxTokens - totalTokens;
if (remainingTokens > 100) { // Only truncate if meaningful space left
// Truncate text to fit remaining tokens (tokens ≈ chars/4)
const maxLength = Math.floor(remainingTokens * 4);
const truncatedText = text.substring(0, maxLength) + "\n\n...[content truncated due to token limit]";
content.push({
type: "text",
text: truncatedText,
});
totalTokens += remainingTokens;
}
isResponseTruncated = true; isResponseTruncated = true;
break; break;
} }
@ -149,8 +163,13 @@ server.tool(
server.tool( server.tool(
"list_repos", "list_repos",
"Lists all repositories in the organization. If you receive an error that indicates that you're not authenticated, please inform the user to set the SOURCEBOT_API_KEY environment variable.", "Lists repositories in the organization with optional filtering and pagination. If you receive an error that indicates that you're not authenticated, please inform the user to set the SOURCEBOT_API_KEY environment variable.",
async () => { listReposRequestSchema.shape,
async ({ query, pageNumber = 1, limit = 50 }: {
query?: string;
pageNumber?: number;
limit?: number;
}) => {
const response = await listRepos(); const response = await listRepos();
if (isServiceError(response)) { if (isServiceError(response)) {
return { return {
@ -161,13 +180,45 @@ server.tool(
}; };
} }
const content: TextContent[] = response.map(repo => { // Apply query filter if provided
let filtered = response;
if (query) {
const lowerQuery = query.toLowerCase();
filtered = response.filter(repo =>
repo.repoName.toLowerCase().includes(lowerQuery) ||
repo.repoDisplayName?.toLowerCase().includes(lowerQuery)
);
}
// Sort alphabetically for consistent pagination
filtered.sort((a, b) => a.repoName.localeCompare(b.repoName));
// Apply pagination
const startIndex = (pageNumber - 1) * limit;
const endIndex = startIndex + limit;
const paginated = filtered.slice(startIndex, endIndex);
// Format output
const content: TextContent[] = paginated.map(repo => {
return { return {
type: "text", type: "text",
text: `id: ${repo.repoName}\nurl: ${repo.webUrl}`, text: `id: ${repo.repoName}\nurl: ${repo.webUrl}`,
} }
}); });
// Add pagination info
if (content.length === 0 && filtered.length > 0) {
content.push({
type: "text",
text: `No results on page ${pageNumber}. Total matching repositories: ${filtered.length}`,
});
} else if (filtered.length > endIndex) {
content.push({
type: "text",
text: `Showing ${paginated.length} repositories (page ${pageNumber}). Total matching: ${filtered.length}. Use pageNumber ${pageNumber + 1} to see more.`,
});
}
return { return {
content, content,
}; };

View file

@ -21,15 +21,18 @@ export const symbolSchema = z.object({
kind: z.string(), kind: z.string(),
}); });
export const searchOptionsSchema = z.object({
matches: z.number(), // The number of matches to return.
contextLines: z.number().optional(), // The number of context lines to return.
whole: z.boolean().optional(), // Whether to return the whole file as part of the response.
isRegexEnabled: z.boolean().optional(), // Whether to enable regular expression search.
isCaseSensitivityEnabled: z.boolean().optional(), // Whether to enable case sensitivity.
});
export const searchRequestSchema = z.object({ export const searchRequestSchema = z.object({
// The zoekt query to execute. query: z.string(), // The zoekt query to execute.
query: z.string(), source: z.string().optional(), // The source of the search request.
// The number of matches to return. ...searchOptionsSchema.shape,
matches: z.number(),
// The number of context lines to return.
contextLines: z.number().optional(),
// Whether to return the whole file as part of the response.
whole: z.boolean().optional(),
}); });
export const repositoryInfoSchema = z.object({ export const repositoryInfoSchema = z.object({
@ -109,7 +112,7 @@ export const searchStatsSchema = z.object({
regexpsConsidered: z.number(), regexpsConsidered: z.number(),
// FlushReason explains why results were flushed. // FlushReason explains why results were flushed.
flushReason: z.number(), flushReason: z.string(),
}); });
export const searchResponseSchema = z.object({ export const searchResponseSchema = z.object({
@ -139,7 +142,6 @@ export const searchResponseSchema = z.object({
content: z.string().optional(), content: z.string().optional(),
})), })),
repositoryInfo: z.array(repositoryInfoSchema), repositoryInfo: z.array(repositoryInfoSchema),
isBranchFilteringEnabled: z.boolean(),
isSearchExhaustive: z.boolean(), isSearchExhaustive: z.boolean(),
}); });
@ -156,6 +158,25 @@ export const repositoryQuerySchema = z.object({
export const listRepositoriesResponseSchema = repositoryQuerySchema.array(); export const listRepositoriesResponseSchema = repositoryQuerySchema.array();
export const listReposRequestSchema = z.object({
query: z
.string()
.describe("Filter repositories by name or displayName (case-insensitive)")
.optional(),
pageNumber: z
.number()
.int()
.positive()
.describe("Page number (1-indexed, default: 1)")
.default(1),
limit: z
.number()
.int()
.positive()
.describe("Number of repositories per page (default: 50)")
.default(50),
});
export const fileSourceRequestSchema = z.object({ export const fileSourceRequestSchema = z.object({
fileName: z.string(), fileName: z.string(),
repository: z.string(), repository: z.string(),

2
packages/queryLanguage/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
/node_modules/
/dist

View file

@ -0,0 +1,20 @@
{
"name": "@sourcebot/query-language",
"private": true,
"main": "dist/index.js",
"scripts": {
"build": "lezer-generator src/query.grammar -o src/parser --typeScript --names && tsc",
"test": "vitest",
"postinstall": "yarn build"
},
"devDependencies": {
"@lezer/generator": "^1.8.0",
"tsx": "^4.19.1",
"typescript": "^5.7.3",
"vitest": "^2.1.9"
},
"dependencies": {
"@lezer/common": "^1.3.0",
"@lezer/lr": "^1.4.3"
}
}

View file

@ -0,0 +1,7 @@
import { parser } from "./parser";
type Tree = ReturnType<typeof parser.parse>;
type SyntaxNode = Tree['topNode'];
export type { Tree, SyntaxNode };
export * from "./parser";
export * from "./parser.terms";

View file

@ -0,0 +1,22 @@
// This file was generated by lezer-generator. You probably shouldn't edit it.
export const
negate = 23,
Program = 1,
OrExpr = 2,
AndExpr = 3,
NegateExpr = 4,
PrefixExpr = 5,
ArchivedExpr = 6,
RevisionExpr = 7,
ContentExpr = 8,
ContextExpr = 9,
FileExpr = 10,
ForkExpr = 11,
VisibilityExpr = 12,
RepoExpr = 13,
LangExpr = 14,
SymExpr = 15,
RepoSetExpr = 16,
ParenExpr = 17,
QuotedTerm = 18,
Term = 19

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,102 @@
@external tokens negateToken from "./tokens" { negate }
@top Program { query }
@precedence {
negate,
and,
or @left
}
query {
OrExpr |
AndExpr |
expr
}
OrExpr { andExpr (or andExpr)+ }
AndExpr { expr expr+ }
andExpr { AndExpr | expr }
expr {
NegateExpr |
ParenExpr |
PrefixExpr |
QuotedTerm |
Term
}
NegateExpr { !negate negate (PrefixExpr | ParenExpr) }
ParenExpr { "(" query? ")" }
PrefixExpr {
ArchivedExpr |
RevisionExpr |
ContentExpr |
ContextExpr |
FileExpr |
ForkExpr |
VisibilityExpr |
RepoExpr |
LangExpr |
SymExpr |
RepoSetExpr
}
RevisionExpr { revisionKw value }
ContentExpr { contentKw value }
ContextExpr { contextKw value }
FileExpr { fileKw value }
RepoExpr { repoKw value }
LangExpr { langKw value }
SymExpr { symKw value }
RepoSetExpr { reposetKw value }
// Modifiers
ArchivedExpr { archivedKw archivedValue }
ForkExpr { forkKw forkValue }
VisibilityExpr { visibilityKw visibilityValue }
archivedValue { "yes" | "no" | "only" }
forkValue { "yes" | "no" | "only" }
visibilityValue { "public" | "private" | "any" }
QuotedTerm { quotedString }
Term { word }
value { quotedString | word }
@skip { space }
@tokens {
archivedKw { "archived:" }
revisionKw { "rev:" }
contentKw { "content:" | "c:" }
contextKw { "context:" }
fileKw { "file:" | "f:" }
forkKw { "fork:" }
visibilityKw { "visibility:" }
repoKw { "repo:" | "r:" }
langKw { "lang:" }
symKw { "sym:" }
reposetKw { "reposet:" }
or { "or" ![a-zA-Z0-9_] }
quotedString { '"' (!["\\\n] | "\\" _)* '"' }
word { (![ \t\n()]) (![ \t\n():] | ":" | "-")* }
space { $[ \t\n]+ }
@precedence {
quotedString,
archivedKw, revisionKw, contentKw, contextKw, fileKw,
forkKw, visibilityKw, repoKw, langKw,
symKw, reposetKw, or,
word
}
}

View file

@ -0,0 +1,59 @@
import { ExternalTokenizer } from "@lezer/lr";
import { negate } from "./parser.terms";
// External tokenizer for negation
// Only tokenizes `-` as negate when followed by a prefix keyword or `(`
export const negateToken = new ExternalTokenizer((input) => {
if (input.next !== 45 /* '-' */) return; // Not a dash
const startPos = input.pos;
// Look ahead to see what follows the dash
input.advance();
// Skip whitespace
let ch = input.next;
while (ch === 32 || ch === 9 || ch === 10) {
input.advance();
ch = input.next;
}
// Check if followed by opening paren
if (ch === 40 /* '(' */) {
input.acceptToken(negate, -input.pos + startPos + 1); // Accept just the dash
return;
}
// Check if followed by a prefix keyword (by checking for keyword followed by colon)
// Look ahead until we hit a delimiter or colon
const checkPos = input.pos;
let foundColon = false;
// Look ahead until we hit a delimiter or colon
while (ch >= 0) {
if (ch === 58 /* ':' */) {
foundColon = true;
break;
}
// Hit a delimiter (whitespace, paren, or quote) - not a prefix keyword
if (ch === 32 || ch === 9 || ch === 10 || ch === 40 || ch === 41 || ch === 34) {
break;
}
input.advance();
ch = input.next;
}
// Reset position
while (input.pos > checkPos) {
input.advance(-1);
}
if (foundColon) {
// It's a prefix keyword, accept as negate
input.acceptToken(negate, -input.pos + startPos + 1);
return;
}
// Otherwise, don't tokenize as negate (let word handle it)
});

View file

@ -0,0 +1,72 @@
# Single term
hello
==>
Program(Term)
# Multiple terms
hello world
==>
Program(AndExpr(Term,Term))
# Multiple terms with various characters
console.log error_handler
==>
Program(AndExpr(Term,Term))
# Term with underscores
my_variable_name
==>
Program(Term)
# Term with dots
com.example.package
==>
Program(Term)
# Term with numbers
func123 test_456
==>
Program(AndExpr(Term,Term))
# Regex pattern
[a-z]+
==>
Program(Term)
# Wildcard pattern
test.*
==>
Program(Term)
# Multiple regex patterns
\w+ [0-9]+ \s*
==>
Program(AndExpr(Term,Term,Term))

View file

@ -0,0 +1,21 @@
import { parser } from "../src/parser";
import { fileTests } from "@lezer/generator/dist/test";
import { describe, it } from "vitest";
import { fileURLToPath } from "url"
import * as fs from "fs";
import * as path from "path";
const caseDir = path.dirname(fileURLToPath(import.meta.url))
for (const file of fs.readdirSync(caseDir)) {
if (!/\.txt$/.test(file)) {
continue;
}
let name = /^[^\.]*/.exec(file)?.[0];
describe(name ?? "unknown", () => {
for (const { name, run } of fileTests(fs.readFileSync(path.join(caseDir, file), "utf8"), file)) {
it(name, () => run(parser));
}
});
}

View file

@ -0,0 +1,120 @@
# Empty parentheses
()
==>
Program(ParenExpr)
# Simple grouping
(test)
==>
Program(ParenExpr(Term))
# Multiple terms in group
(hello world)
==>
Program(ParenExpr(AndExpr(Term,Term)))
# Nested parentheses
((test))
==>
Program(ParenExpr(ParenExpr(Term)))
# Multiple groups
(first) (second)
==>
Program(AndExpr(ParenExpr(Term),ParenExpr(Term)))
# Group with multiple terms
(one two three)
==>
Program(ParenExpr(AndExpr(Term,Term,Term)))
# Mixed grouped and ungrouped
test (grouped) another
==>
Program(AndExpr(Term,ParenExpr(Term),Term))
# Deeply nested
(((nested)))
==>
Program(ParenExpr(ParenExpr(ParenExpr(Term))))
# Multiple nested groups
((a b) (c d))
==>
Program(ParenExpr(AndExpr(ParenExpr(AndExpr(Term,Term)),ParenExpr(AndExpr(Term,Term)))))
# Group at start
(start) middle end
==>
Program(AndExpr(ParenExpr(Term),Term,Term))
# Group at end
start middle (end)
==>
Program(AndExpr(Term,Term,ParenExpr(Term)))
# Complex grouping pattern
(a (b c) d)
==>
Program(ParenExpr(AndExpr(Term,ParenExpr(AndExpr(Term,Term)),Term)))
# Sequential groups
(a)(b)(c)
==>
Program(AndExpr(ParenExpr(Term),ParenExpr(Term),ParenExpr(Term)))
# Group with regex
([a-z]+)
==>
Program(ParenExpr(Term))
# Group with dots
(com.example.test)
==>
Program(ParenExpr(Term))

View file

@ -0,0 +1,255 @@
# Literal dash term
-test
==>
Program(Term)
# Quoted dash term
"-excluded"
==>
Program(QuotedTerm)
# Dash in middle
test-case
==>
Program(Term)
# Multiple dash terms
-one -two -three
==>
Program(AndExpr(Term,Term,Term))
# Negate file prefix
-file:test.js
==>
Program(NegateExpr(PrefixExpr(FileExpr)))
# Negate repo prefix
-repo:archived
==>
Program(NegateExpr(PrefixExpr(RepoExpr)))
# Negate lang prefix
-lang:python
==>
Program(NegateExpr(PrefixExpr(LangExpr)))
# Negate content prefix
-content:TODO
==>
Program(NegateExpr(PrefixExpr(ContentExpr)))
# Negate revision prefix
-rev:develop
==>
Program(NegateExpr(PrefixExpr(RevisionExpr)))
# Negate archived prefix
-archived:yes
==>
Program(NegateExpr(PrefixExpr(ArchivedExpr)))
# Negate fork prefix
-fork:yes
==>
Program(NegateExpr(PrefixExpr(ForkExpr)))
# Negate visibility prefix
-visibility:any
==>
Program(NegateExpr(PrefixExpr(VisibilityExpr)))
# Negate context prefix
-context:backend
==>
Program(NegateExpr(PrefixExpr(ContextExpr)))
# Negate symbol prefix
-sym:OldClass
==>
Program(NegateExpr(PrefixExpr(SymExpr)))
# Negate parentheses
-(test)
==>
Program(NegateExpr(ParenExpr(Term)))
# Negate group with multiple terms
-(test exclude)
==>
Program(NegateExpr(ParenExpr(AndExpr(Term,Term))))
# Negate group with prefix
-(file:test.js console.log)
==>
Program(NegateExpr(ParenExpr(AndExpr(PrefixExpr(FileExpr),Term))))
# Prefix with negated term
file:test.js -console
==>
Program(AndExpr(PrefixExpr(FileExpr),Term))
# Multiple prefixes with negation
file:test.js -lang:python
==>
Program(AndExpr(PrefixExpr(FileExpr),NegateExpr(PrefixExpr(LangExpr))))
# Complex negation pattern
function -file:test.js -lang:java
==>
Program(AndExpr(Term,NegateExpr(PrefixExpr(FileExpr)),NegateExpr(PrefixExpr(LangExpr))))
# Negation inside parentheses
(-file:test.js)
==>
Program(ParenExpr(NegateExpr(PrefixExpr(FileExpr))))
# Multiple negations in group
(-file:a.js -lang:python)
==>
Program(ParenExpr(AndExpr(NegateExpr(PrefixExpr(FileExpr)),NegateExpr(PrefixExpr(LangExpr)))))
# Mixed in parentheses
(include -file:test.js)
==>
Program(ParenExpr(AndExpr(Term,NegateExpr(PrefixExpr(FileExpr)))))
# Negate nested group
-((file:test.js))
==>
Program(NegateExpr(ParenExpr(ParenExpr(PrefixExpr(FileExpr)))))
# Negate short form prefix
-f:test.js
==>
Program(NegateExpr(PrefixExpr(FileExpr)))
# Negate short form repo
-r:myrepo
==>
Program(NegateExpr(PrefixExpr(RepoExpr)))
# Negate short form content
-c:console
==>
Program(NegateExpr(PrefixExpr(ContentExpr)))
# Negate with prefix in quotes
-file:"test file.js"
==>
Program(NegateExpr(PrefixExpr(FileExpr)))
# Complex with multiple negated prefixes
lang:typescript -file:*.test.ts -file:*.spec.ts
==>
Program(AndExpr(PrefixExpr(LangExpr),NegateExpr(PrefixExpr(FileExpr)),NegateExpr(PrefixExpr(FileExpr))))
# Negated group with prefix
-(file:test.js lang:python)
==>
Program(NegateExpr(ParenExpr(AndExpr(PrefixExpr(FileExpr),PrefixExpr(LangExpr)))))
# Negate empty group
-()
==>
Program(NegateExpr(ParenExpr))
# Negate with space after dash
- file:test.js
==>
Program(NegateExpr(PrefixExpr(FileExpr)))

View file

@ -0,0 +1,271 @@
# Simple OR
test or example
==>
Program(OrExpr(Term,Term))
# Multiple OR
one or two or three
==>
Program(OrExpr(Term,Term,Term))
# OR with prefixes
file:test.js or file:example.js
==>
Program(OrExpr(PrefixExpr(FileExpr),PrefixExpr(FileExpr)))
# OR with negation
test or -file:excluded.js
==>
Program(OrExpr(Term,NegateExpr(PrefixExpr(FileExpr))))
# OR with quoted strings
"first option" or "second option"
==>
Program(OrExpr(QuotedTerm,QuotedTerm))
# OR with different prefixes
lang:python or lang:javascript
==>
Program(OrExpr(PrefixExpr(LangExpr),PrefixExpr(LangExpr)))
# Multiple terms with OR
function test or class example
==>
Program(OrExpr(AndExpr(Term,Term),AndExpr(Term,Term)))
# OR in parentheses
(test or example)
==>
Program(ParenExpr(OrExpr(Term,Term)))
# OR with parentheses outside
(test) or (example)
==>
Program(OrExpr(ParenExpr(Term),ParenExpr(Term)))
# Complex OR with grouping
(file:*.js lang:javascript) or (file:*.ts lang:typescript)
==>
Program(OrExpr(ParenExpr(AndExpr(PrefixExpr(FileExpr),PrefixExpr(LangExpr))),ParenExpr(AndExpr(PrefixExpr(FileExpr),PrefixExpr(LangExpr)))))
# OR with mixed content
test or file:example.js
==>
Program(OrExpr(Term,PrefixExpr(FileExpr)))
# Prefix OR term
file:test.js or example
==>
Program(OrExpr(PrefixExpr(FileExpr),Term))
# OR with short form prefixes
f:test.js or r:myrepo
==>
Program(OrExpr(PrefixExpr(FileExpr),PrefixExpr(RepoExpr)))
# OR with repo prefixes
repo:project1 or repo:project2
==>
Program(OrExpr(PrefixExpr(RepoExpr),PrefixExpr(RepoExpr)))
# OR with revision prefixes
rev:main or rev:develop
==>
Program(OrExpr(PrefixExpr(RevisionExpr),PrefixExpr(RevisionExpr)))
# OR with lang prefixes
lang:rust or lang:go
==>
Program(OrExpr(PrefixExpr(LangExpr),PrefixExpr(LangExpr)))
# OR with content
content:TODO or content:FIXME
==>
Program(OrExpr(PrefixExpr(ContentExpr),PrefixExpr(ContentExpr)))
# OR with negated terms
-file:test.js or -file:spec.js
==>
Program(OrExpr(NegateExpr(PrefixExpr(FileExpr)),NegateExpr(PrefixExpr(FileExpr))))
# OR in nested parentheses
((a or b) or (c or d))
==>
Program(ParenExpr(OrExpr(ParenExpr(OrExpr(Term,Term)),ParenExpr(OrExpr(Term,Term)))))
# Multiple OR with parentheses and implicit AND
(a or b) and (c or d)
==>
Program(AndExpr(ParenExpr(OrExpr(Term,Term)),Term,ParenExpr(OrExpr(Term,Term))))
# OR with wildcards
*.test.js or *.spec.js
==>
Program(OrExpr(Term,Term))
# OR with regex patterns
[a-z]+ or [0-9]+
==>
Program(OrExpr(Term,Term))
# OR with dots
com.example.test or org.example.test
==>
Program(OrExpr(Term,Term))
# OR with dashes
test-one or test-two
==>
Program(OrExpr(Term,Term))
# Word containing 'or'
order
==>
Program(Term)
# Word containing 'or' in middle
before
==>
Program(Term)
# OR at start
or test
==>
Program(⚠,Term)
# OR at end (or becomes term)
test or
==>
Program(AndExpr(Term,Term))
# Multiple consecutive OR
test or or example
==>
Program(OrExpr(Term,⚠,Term))
# OR with all prefix types
file:*.js or repo:myrepo or lang:javascript
==>
Program(OrExpr(PrefixExpr(FileExpr),PrefixExpr(RepoExpr),PrefixExpr(LangExpr)))
# Complex query with OR and negation
(lang:python or lang:ruby) -file:test.py
==>
Program(AndExpr(ParenExpr(OrExpr(PrefixExpr(LangExpr),PrefixExpr(LangExpr))),NegateExpr(PrefixExpr(FileExpr))))
# OR with quoted prefix values
file:"test one.js" or file:"test two.js"
==>
Program(OrExpr(PrefixExpr(FileExpr),PrefixExpr(FileExpr)))
# OR with empty parentheses
() or ()
==>
Program(OrExpr(ParenExpr,ParenExpr))
# OR with negated groups
-(file:a.js) or -(file:b.js)
==>
Program(OrExpr(NegateExpr(ParenExpr(PrefixExpr(FileExpr))),NegateExpr(ParenExpr(PrefixExpr(FileExpr)))))

View file

@ -0,0 +1,200 @@
# OR has lowest precedence - implicit AND groups first
a b or c d
==>
Program(OrExpr(AndExpr(Term,Term),AndExpr(Term,Term)))
# Multiple OR operators are left-associative
a or b or c
==>
Program(OrExpr(Term,Term,Term))
# AND before OR
file:test.js error or file:test.go panic
==>
Program(OrExpr(AndExpr(PrefixExpr(FileExpr),Term),AndExpr(PrefixExpr(FileExpr),Term)))
# Negation binds tighter than AND
-file:test.js error
==>
Program(AndExpr(NegateExpr(PrefixExpr(FileExpr)),Term))
# Negation binds tighter than OR
-file:a.js or file:b.js
==>
Program(OrExpr(NegateExpr(PrefixExpr(FileExpr)),PrefixExpr(FileExpr)))
# Parentheses override precedence
(a or b) c
==>
Program(AndExpr(ParenExpr(OrExpr(Term,Term)),Term))
# Parentheses override - OR inside parens groups first
a (b or c)
==>
Program(AndExpr(Term,ParenExpr(OrExpr(Term,Term))))
# Complex: AND, OR, and negation
a -b or c d
==>
Program(OrExpr(AndExpr(Term,Term),AndExpr(Term,Term)))
# Negated group in OR expression
-(a b) or c
==>
Program(OrExpr(NegateExpr(ParenExpr(AndExpr(Term,Term))),Term))
# Multiple negations in OR
-file:a.js or -file:b.js or file:c.js
==>
Program(OrExpr(NegateExpr(PrefixExpr(FileExpr)),NegateExpr(PrefixExpr(FileExpr)),PrefixExpr(FileExpr)))
# Prefix binds to its value only
file:a.js b.js
==>
Program(AndExpr(PrefixExpr(FileExpr),Term))
# OR with prefixes and terms mixed
repo:backend error or repo:frontend warning
==>
Program(OrExpr(AndExpr(PrefixExpr(RepoExpr),Term),AndExpr(PrefixExpr(RepoExpr),Term)))
# Nested parentheses with OR
((a or b) c) or d
==>
Program(OrExpr(ParenExpr(AndExpr(ParenExpr(OrExpr(Term,Term)),Term)),Term))
# OR at different nesting levels
(a or (b or c))
==>
Program(ParenExpr(OrExpr(Term,ParenExpr(OrExpr(Term,Term)))))
# Implicit AND groups all adjacent terms before OR
a b c or d e f
==>
Program(OrExpr(AndExpr(Term,Term,Term),AndExpr(Term,Term,Term)))
# Mixed prefix and regular terms with OR
lang:go func or lang:rust fn
==>
Program(OrExpr(AndExpr(PrefixExpr(LangExpr),Term),AndExpr(PrefixExpr(LangExpr),Term)))
# Negation doesn't affect OR grouping
a or -b or c
==>
Program(OrExpr(Term,Term,Term))
# Parentheses can isolate OR from surrounding AND
a (b or c) d
==>
Program(AndExpr(Term,ParenExpr(OrExpr(Term,Term)),Term))
# Multiple parenthesized groups with AND
(a or b) (c or d)
==>
Program(AndExpr(ParenExpr(OrExpr(Term,Term)),ParenExpr(OrExpr(Term,Term))))
# Quoted strings are atomic - no precedence inside
"a or b"
==>
Program(QuotedTerm)
# Prefix with OR value doesn't split
file:"a.js or b.js"
==>
Program(PrefixExpr(FileExpr))
# Negated prefix in complex expression
-file:test.js lang:go error or warning
==>
Program(OrExpr(AndExpr(NegateExpr(PrefixExpr(FileExpr)),PrefixExpr(LangExpr),Term),Term))
# OR followed by parenthesized AND
a or (b c)
==>
Program(OrExpr(Term,ParenExpr(AndExpr(Term,Term))))
# Empty parens don't affect precedence
() or a b
==>
Program(OrExpr(ParenExpr,AndExpr(Term,Term)))
# Negation of empty group
-() a
==>
Program(AndExpr(NegateExpr(ParenExpr),Term))

View file

@ -0,0 +1,336 @@
# File prefix
file:README.md
==>
Program(PrefixExpr(FileExpr))
# File prefix short form
f:index.ts
==>
Program(PrefixExpr(FileExpr))
# Repo prefix
repo:myproject
==>
Program(PrefixExpr(RepoExpr))
# Repo prefix short form
r:github.com/user/repo
==>
Program(PrefixExpr(RepoExpr))
# Content prefix
content:function
==>
Program(PrefixExpr(ContentExpr))
# Content prefix short form
c:console.log
==>
Program(PrefixExpr(ContentExpr))
# Revision prefix
rev:main
==>
Program(PrefixExpr(RevisionExpr))
# Lang prefix
lang:typescript
==>
Program(PrefixExpr(LangExpr))
# Archived prefix - no
archived:no
==>
Program(PrefixExpr(ArchivedExpr))
# Archived prefix - only
archived:only
==>
Program(PrefixExpr(ArchivedExpr))
# Fork prefix - yes
fork:yes
==>
Program(PrefixExpr(ForkExpr))
# Fork prefix - only
fork:only
==>
Program(PrefixExpr(ForkExpr))
# Visibility prefix - public
visibility:public
==>
Program(PrefixExpr(VisibilityExpr))
# Context prefix
context:web
==>
Program(PrefixExpr(ContextExpr))
# Symbol prefix
sym:MyClass
==>
Program(PrefixExpr(SymExpr))
# RepoSet prefix
reposet:repo1,repo2
==>
Program(PrefixExpr(RepoSetExpr))
# File with wildcard
file:*.ts
==>
Program(PrefixExpr(FileExpr))
# File with path
file:src/components/Button.tsx
==>
Program(PrefixExpr(FileExpr))
# Repo with full URL
repo:github.com/org/project
==>
Program(PrefixExpr(RepoExpr))
# Multiple prefixes
file:test.js repo:myproject
==>
Program(AndExpr(PrefixExpr(FileExpr),PrefixExpr(RepoExpr)))
# Prefix with term
file:test.js console.log
==>
Program(AndExpr(PrefixExpr(FileExpr),Term))
# Term then prefix
console.log file:handler.ts
==>
Program(AndExpr(Term,PrefixExpr(FileExpr)))
# Multiple prefixes and terms
lang:typescript function file:handler.ts
==>
Program(AndExpr(PrefixExpr(LangExpr),Term,PrefixExpr(FileExpr)))
# Prefix with regex pattern
file:[a-z]+\.test\.js
==>
Program(PrefixExpr(FileExpr))
# Content with spaces in value (no quotes)
content:hello
==>
Program(PrefixExpr(ContentExpr))
# Revision with slashes
rev:feature/new-feature
==>
Program(PrefixExpr(RevisionExpr))
# RepoSet with multiple repos
reposet:repo1,repo2,repo3
==>
Program(PrefixExpr(RepoSetExpr))
# Symbol with dots
sym:package.Class.method
==>
Program(PrefixExpr(SymExpr))
# Lang with various languages
lang:python
==>
Program(PrefixExpr(LangExpr))
# Archived prefix - yes
archived:yes
==>
Program(PrefixExpr(ArchivedExpr))
# Archived prefix - invalid value (error case)
archived:invalid
==>
Program(AndExpr(PrefixExpr(ArchivedExpr(⚠)),Term))
# Fork prefix - no
fork:no
==>
Program(PrefixExpr(ForkExpr))
# Fork prefix - invalid value (error case)
fork:invalid
==>
Program(AndExpr(PrefixExpr(ForkExpr(⚠)),Term))
# Visibility prefix - private
visibility:private
==>
Program(PrefixExpr(VisibilityExpr))
# Visibility prefix - any
visibility:any
==>
Program(PrefixExpr(VisibilityExpr))
# Visibility prefix - invalid value (error case)
visibility:invalid
==>
Program(AndExpr(PrefixExpr(VisibilityExpr(⚠)),Term))
# File with dashes
file:my-component.tsx
==>
Program(PrefixExpr(FileExpr))
# Repo with numbers
repo:project123
==>
Program(PrefixExpr(RepoExpr))
# Content with special chars
content:@Component
==>
Program(PrefixExpr(ContentExpr))
# Context with underscores
context:data_engineering
==>
Program(PrefixExpr(ContextExpr))
# Prefix in parentheses
(file:test.js)
==>
Program(ParenExpr(PrefixExpr(FileExpr)))
# Multiple prefixes in group
(file:*.ts lang:typescript)
==>
Program(ParenExpr(AndExpr(PrefixExpr(FileExpr),PrefixExpr(LangExpr))))

View file

@ -0,0 +1,503 @@
# Simple quoted string
"hello"
==>
Program(QuotedTerm)
# Leading unclosed quote
"hello
==>
Program(Term)
# Trailing unclosed quote
hello"
==>
Program(Term)
# Quoted string with spaces
"hello world"
==>
Program(QuotedTerm)
# Multiple words in quotes
"this is a search term"
==>
Program(QuotedTerm)
# Quoted string with escaped quote
"hello \"world\""
==>
Program(QuotedTerm)
# Quoted string with escaped backslash
"path\\to\\file"
==>
Program(QuotedTerm)
# Double backslash
"test\\\\path"
==>
Program(QuotedTerm)
# Multiple escaped quotes
"\"quoted\" \"words\""
==>
Program(QuotedTerm)
# Mixed escaped characters
"test\\nvalue\"quoted"
==>
Program(QuotedTerm)
# Empty quoted string
""
==>
Program(QuotedTerm)
# Quoted string with only spaces
" "
==>
Program(QuotedTerm)
# Quoted string in file prefix
file:"my file.txt"
==>
Program(PrefixExpr(FileExpr))
# Quoted string in repo prefix
repo:"github.com/user/repo name"
==>
Program(PrefixExpr(RepoExpr))
# Quoted string in content prefix
content:"console.log"
==>
Program(PrefixExpr(ContentExpr))
# Quoted string in revision prefix
rev:"feature/my feature"
==>
Program(PrefixExpr(RevisionExpr))
# Multiple quoted strings
"first string" "second string"
==>
Program(AndExpr(QuotedTerm,QuotedTerm))
# Quoted and unquoted mixed
unquoted "quoted string" another
==>
Program(AndExpr(Term,QuotedTerm,Term))
# Quoted string with parentheses inside
"(test)"
==>
Program(QuotedTerm)
# Quoted string with brackets
"[a-z]+"
==>
Program(QuotedTerm)
# Quoted string with special chars
"test@example.com"
==>
Program(QuotedTerm)
# Quoted string with colons
"key:value"
==>
Program(QuotedTerm)
# Quoted string with dashes
"test-case-example"
==>
Program(QuotedTerm)
# Quoted string with dots
"com.example.package"
==>
Program(QuotedTerm)
# Quoted string with regex pattern
"\\w+\\s*=\\s*\\d+"
==>
Program(QuotedTerm)
# Quoted string with forward slashes
"path/to/file"
==>
Program(QuotedTerm)
# Quoted string with underscores
"my_variable_name"
==>
Program(QuotedTerm)
# Quoted string with numbers
"test123"
==>
Program(QuotedTerm)
# Quoted string with mixed case
"CamelCaseTest"
==>
Program(QuotedTerm)
# Quoted prefix value with spaces
file:"test file.js"
==>
Program(PrefixExpr(FileExpr))
# Multiple prefixes with quoted values
file:"my file.txt" repo:"my repo"
==>
Program(AndExpr(PrefixExpr(FileExpr),PrefixExpr(RepoExpr)))
# Quoted string in parentheses
("quoted term")
==>
Program(ParenExpr(QuotedTerm))
# Multiple quoted in parentheses
("first" "second")
==>
Program(ParenExpr(AndExpr(QuotedTerm,QuotedTerm)))
# Quoted with escaped newline
"line1\\nline2"
==>
Program(QuotedTerm)
# Quoted with tab character
"value\\ttab"
==>
Program(QuotedTerm)
# Lang prefix with quoted value
lang:"objective-c"
==>
Program(PrefixExpr(LangExpr))
# Sym prefix with quoted value
sym:"My Class"
==>
Program(PrefixExpr(SymExpr))
# Content with quoted phrase
content:"TODO: fix this"
==>
Program(PrefixExpr(ContentExpr))
# Quoted string with at symbol
"@decorator"
==>
Program(QuotedTerm)
# Quoted string with hash
"#define"
==>
Program(QuotedTerm)
# Quoted string with dollar sign
"$variable"
==>
Program(QuotedTerm)
# Quoted string with percent
"100%"
==>
Program(QuotedTerm)
# Quoted string with ampersand
"foo&bar"
==>
Program(QuotedTerm)
# Quoted string with asterisk
"test*"
==>
Program(QuotedTerm)
# Quoted string with plus
"a+b"
==>
Program(QuotedTerm)
# Quoted string with equals
"a=b"
==>
Program(QuotedTerm)
# Quoted string with angle brackets
"<template>"
==>
Program(QuotedTerm)
# Quoted string with pipe
"a|b"
==>
Program(QuotedTerm)
# Quoted string with tilde
"~/.config"
==>
Program(QuotedTerm)
# Quoted string with backtick
"`code`"
==>
Program(QuotedTerm)
# Quoted string with question mark
"what?"
==>
Program(QuotedTerm)
# Quoted string with exclamation
"important!"
==>
Program(QuotedTerm)
# Quoted string with semicolon
"stmt;"
==>
Program(QuotedTerm)
# Quoted string with comma
"a,b,c"
==>
Program(QuotedTerm)
# Multiple quotes in content
content:"function \"test\" {"
==>
Program(PrefixExpr(ContentExpr))
# Quoted prefix keyword becomes literal
"repo:hello"
==>
Program(QuotedTerm)
# Quoted file prefix as literal
"file:test.js"
==>
Program(QuotedTerm)
# Quoted lang prefix as literal
"lang:python"
==>
Program(QuotedTerm)
# Quoted partial prefix
"repo:"
==>
Program(QuotedTerm)
# Mix of quoted prefix and real prefix
"repo:test" file:actual.js
==>
Program(AndExpr(QuotedTerm,PrefixExpr(FileExpr)))
# Quoted short form prefix
"f:test"
==>
Program(QuotedTerm)
# Quoted revision prefix
"rev:main"
==>
Program(QuotedTerm)
# Quotes can be used within words
name\s*=\s*"projectmanagementlugapi lang:HCL
==>
Program(AndExpr(Term, PrefixExpr(LangExpr)))

View file

@ -0,0 +1,23 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "Node16",
"moduleResolution": "Node16",
"lib": ["ES2023"],
"outDir": "dist",
"rootDir": "src",
"declaration": true,
"declarationMap": true,
"sourceMap": true,
"strict": true,
"noImplicitAny": true,
"strictNullChecks": true,
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"skipLibCheck": true,
"isolatedModules": true,
"resolveJsonModule": true
},
"include": ["src/index.ts"],
"exclude": ["node_modules", "dist"]
}

View file

@ -0,0 +1,8 @@
import { defineConfig } from 'vitest/config';
export default defineConfig({
test: {
environment: 'node',
watch: false,
}
});

View file

@ -646,6 +646,115 @@ const schema = {
"purpose", "purpose",
"audience" "audience"
] ]
},
"AuthentikIdentityProviderConfig": {
"type": "object",
"additionalProperties": false,
"properties": {
"provider": {
"const": "authentik"
},
"purpose": {
"const": "sso"
},
"clientId": {
"anyOf": [
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token."
}
},
"required": [
"env"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"googleCloudSecret": {
"type": "string",
"description": "The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets"
}
},
"required": [
"googleCloudSecret"
],
"additionalProperties": false
}
]
},
"clientSecret": {
"anyOf": [
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token."
}
},
"required": [
"env"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"googleCloudSecret": {
"type": "string",
"description": "The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets"
}
},
"required": [
"googleCloudSecret"
],
"additionalProperties": false
}
]
},
"issuer": {
"anyOf": [
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token."
}
},
"required": [
"env"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"googleCloudSecret": {
"type": "string",
"description": "The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets"
}
},
"required": [
"googleCloudSecret"
],
"additionalProperties": false
}
]
}
},
"required": [
"provider",
"purpose",
"clientId",
"clientSecret",
"issuer"
]
} }
}, },
"oneOf": [ "oneOf": [
@ -1292,6 +1401,115 @@ const schema = {
"purpose", "purpose",
"audience" "audience"
] ]
},
{
"type": "object",
"additionalProperties": false,
"properties": {
"provider": {
"const": "authentik"
},
"purpose": {
"const": "sso"
},
"clientId": {
"anyOf": [
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token."
}
},
"required": [
"env"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"googleCloudSecret": {
"type": "string",
"description": "The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets"
}
},
"required": [
"googleCloudSecret"
],
"additionalProperties": false
}
]
},
"clientSecret": {
"anyOf": [
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token."
}
},
"required": [
"env"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"googleCloudSecret": {
"type": "string",
"description": "The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets"
}
},
"required": [
"googleCloudSecret"
],
"additionalProperties": false
}
]
},
"issuer": {
"anyOf": [
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token."
}
},
"required": [
"env"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"googleCloudSecret": {
"type": "string",
"description": "The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets"
}
},
"required": [
"googleCloudSecret"
],
"additionalProperties": false
}
]
}
},
"required": [
"provider",
"purpose",
"clientId",
"clientSecret",
"issuer"
]
} }
] ]
} as const; } as const;

View file

@ -7,7 +7,8 @@ export type IdentityProviderConfig =
| OktaIdentityProviderConfig | OktaIdentityProviderConfig
| KeycloakIdentityProviderConfig | KeycloakIdentityProviderConfig
| MicrosoftEntraIDIdentityProviderConfig | MicrosoftEntraIDIdentityProviderConfig
| GCPIAPIdentityProviderConfig; | GCPIAPIdentityProviderConfig
| AuthentikIdentityProviderConfig;
export interface GitHubIdentityProviderConfig { export interface GitHubIdentityProviderConfig {
provider: "github"; provider: "github";
@ -255,3 +256,46 @@ export interface GCPIAPIdentityProviderConfig {
googleCloudSecret: string; googleCloudSecret: string;
}; };
} }
export interface AuthentikIdentityProviderConfig {
provider: "authentik";
purpose: "sso";
clientId:
| {
/**
* The name of the environment variable that contains the token.
*/
env: string;
}
| {
/**
* The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets
*/
googleCloudSecret: string;
};
clientSecret:
| {
/**
* The name of the environment variable that contains the token.
*/
env: string;
}
| {
/**
* The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets
*/
googleCloudSecret: string;
};
issuer:
| {
/**
* The name of the environment variable that contains the token.
*/
env: string;
}
| {
/**
* The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets
*/
googleCloudSecret: string;
};
}

View file

@ -5162,6 +5162,115 @@ const schema = {
"purpose", "purpose",
"audience" "audience"
] ]
},
"AuthentikIdentityProviderConfig": {
"type": "object",
"additionalProperties": false,
"properties": {
"provider": {
"const": "authentik"
},
"purpose": {
"const": "sso"
},
"clientId": {
"anyOf": [
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token."
}
},
"required": [
"env"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"googleCloudSecret": {
"type": "string",
"description": "The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets"
}
},
"required": [
"googleCloudSecret"
],
"additionalProperties": false
}
]
},
"clientSecret": {
"anyOf": [
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token."
}
},
"required": [
"env"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"googleCloudSecret": {
"type": "string",
"description": "The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets"
}
},
"required": [
"googleCloudSecret"
],
"additionalProperties": false
}
]
},
"issuer": {
"anyOf": [
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token."
}
},
"required": [
"env"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"googleCloudSecret": {
"type": "string",
"description": "The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets"
}
},
"required": [
"googleCloudSecret"
],
"additionalProperties": false
}
]
}
},
"required": [
"provider",
"purpose",
"clientId",
"clientSecret",
"issuer"
]
} }
}, },
"oneOf": [ "oneOf": [
@ -5808,6 +5917,115 @@ const schema = {
"purpose", "purpose",
"audience" "audience"
] ]
},
{
"type": "object",
"additionalProperties": false,
"properties": {
"provider": {
"const": "authentik"
},
"purpose": {
"const": "sso"
},
"clientId": {
"anyOf": [
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token."
}
},
"required": [
"env"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"googleCloudSecret": {
"type": "string",
"description": "The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets"
}
},
"required": [
"googleCloudSecret"
],
"additionalProperties": false
}
]
},
"clientSecret": {
"anyOf": [
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token."
}
},
"required": [
"env"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"googleCloudSecret": {
"type": "string",
"description": "The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets"
}
},
"required": [
"googleCloudSecret"
],
"additionalProperties": false
}
]
},
"issuer": {
"anyOf": [
{
"type": "object",
"properties": {
"env": {
"type": "string",
"description": "The name of the environment variable that contains the token."
}
},
"required": [
"env"
],
"additionalProperties": false
},
{
"type": "object",
"properties": {
"googleCloudSecret": {
"type": "string",
"description": "The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets"
}
},
"required": [
"googleCloudSecret"
],
"additionalProperties": false
}
]
}
},
"required": [
"provider",
"purpose",
"clientId",
"clientSecret",
"issuer"
]
} }
] ]
} }

View file

@ -33,7 +33,8 @@ export type IdentityProviderConfig =
| OktaIdentityProviderConfig | OktaIdentityProviderConfig
| KeycloakIdentityProviderConfig | KeycloakIdentityProviderConfig
| MicrosoftEntraIDIdentityProviderConfig | MicrosoftEntraIDIdentityProviderConfig
| GCPIAPIdentityProviderConfig; | GCPIAPIdentityProviderConfig
| AuthentikIdentityProviderConfig;
export interface SourcebotConfig { export interface SourcebotConfig {
$schema?: string; $schema?: string;
@ -1401,3 +1402,46 @@ export interface GCPIAPIdentityProviderConfig {
googleCloudSecret: string; googleCloudSecret: string;
}; };
} }
export interface AuthentikIdentityProviderConfig {
provider: "authentik";
purpose: "sso";
clientId:
| {
/**
* The name of the environment variable that contains the token.
*/
env: string;
}
| {
/**
* The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets
*/
googleCloudSecret: string;
};
clientSecret:
| {
/**
* The name of the environment variable that contains the token.
*/
env: string;
}
| {
/**
* The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets
*/
googleCloudSecret: string;
};
issuer:
| {
/**
* The name of the environment variable that contains the token.
*/
env: string;
}
| {
/**
* The resource name of a Google Cloud secret. Must be in the format `projects/<project-id>/secrets/<secret-name>/versions/<version-id>`. See https://cloud.google.com/secret-manager/docs/creating-and-accessing-secrets
*/
googleCloudSecret: string;
};
}

View file

@ -7,7 +7,6 @@ export const env = createEnv({
client: { client: {
NEXT_PUBLIC_SOURCEBOT_CLOUD_ENVIRONMENT: z.enum(SOURCEBOT_CLOUD_ENVIRONMENT).optional(), NEXT_PUBLIC_SOURCEBOT_CLOUD_ENVIRONMENT: z.enum(SOURCEBOT_CLOUD_ENVIRONMENT).optional(),
NEXT_PUBLIC_SOURCEBOT_VERSION: z.string().default("unknown"), NEXT_PUBLIC_SOURCEBOT_VERSION: z.string().default("unknown"),
NEXT_PUBLIC_POSTHOG_PAPIK: z.string().optional(),
NEXT_PUBLIC_SENTRY_BACKEND_DSN: z.string().optional(), NEXT_PUBLIC_SENTRY_BACKEND_DSN: z.string().optional(),
NEXT_PUBLIC_SENTRY_ENVIRONMENT: z.string().optional(), NEXT_PUBLIC_SENTRY_ENVIRONMENT: z.string().optional(),
NEXT_PUBLIC_LANGFUSE_PUBLIC_KEY: z.string().optional(), NEXT_PUBLIC_LANGFUSE_PUBLIC_KEY: z.string().optional(),
@ -16,7 +15,6 @@ export const env = createEnv({
runtimeEnvStrict: { runtimeEnvStrict: {
NEXT_PUBLIC_SOURCEBOT_CLOUD_ENVIRONMENT: process.env.NEXT_PUBLIC_SOURCEBOT_CLOUD_ENVIRONMENT, NEXT_PUBLIC_SOURCEBOT_CLOUD_ENVIRONMENT: process.env.NEXT_PUBLIC_SOURCEBOT_CLOUD_ENVIRONMENT,
NEXT_PUBLIC_SOURCEBOT_VERSION: process.env.NEXT_PUBLIC_SOURCEBOT_VERSION, NEXT_PUBLIC_SOURCEBOT_VERSION: process.env.NEXT_PUBLIC_SOURCEBOT_VERSION,
NEXT_PUBLIC_POSTHOG_PAPIK: process.env.NEXT_PUBLIC_POSTHOG_PAPIK,
NEXT_PUBLIC_SENTRY_BACKEND_DSN: process.env.NEXT_PUBLIC_SENTRY_BACKEND_DSN, NEXT_PUBLIC_SENTRY_BACKEND_DSN: process.env.NEXT_PUBLIC_SENTRY_BACKEND_DSN,
NEXT_PUBLIC_SENTRY_ENVIRONMENT: process.env.NEXT_PUBLIC_SENTRY_ENVIRONMENT, NEXT_PUBLIC_SENTRY_ENVIRONMENT: process.env.NEXT_PUBLIC_SENTRY_ENVIRONMENT,
NEXT_PUBLIC_LANGFUSE_PUBLIC_KEY: process.env.NEXT_PUBLIC_LANGFUSE_PUBLIC_KEY, NEXT_PUBLIC_LANGFUSE_PUBLIC_KEY: process.env.NEXT_PUBLIC_LANGFUSE_PUBLIC_KEY,

View file

@ -120,6 +120,8 @@ export const env = createEnv({
CONFIG_MAX_REPOS_NO_TOKEN: numberSchema.default(Number.MAX_SAFE_INTEGER), CONFIG_MAX_REPOS_NO_TOKEN: numberSchema.default(Number.MAX_SAFE_INTEGER),
NODE_ENV: z.enum(["development", "test", "production"]), NODE_ENV: z.enum(["development", "test", "production"]),
SOURCEBOT_TELEMETRY_DISABLED: booleanSchema.default('false'), SOURCEBOT_TELEMETRY_DISABLED: booleanSchema.default('false'),
// @note: this is also declared in the Dockerfile.
POSTHOG_PAPIK: z.string().default("phc_lLPuFFi5LH6c94eFJcqvYVFwiJffVcV6HD8U4a1OnRW"),
// Database variables // Database variables
// Either DATABASE_URL or DATABASE_HOST, DATABASE_USERNAME, DATABASE_PASSWORD, and DATABASE_NAME must be set. // Either DATABASE_URL or DATABASE_HOST, DATABASE_USERNAME, DATABASE_PASSWORD, and DATABASE_NAME must be set.
@ -216,6 +218,12 @@ export const env = createEnv({
SOURCEBOT_LOG_LEVEL: z.enum(["info", "debug", "warn", "error"]).default("info"), SOURCEBOT_LOG_LEVEL: z.enum(["info", "debug", "warn", "error"]).default("info"),
SOURCEBOT_STRUCTURED_LOGGING_ENABLED: booleanSchema.default("false"), SOURCEBOT_STRUCTURED_LOGGING_ENABLED: booleanSchema.default("false"),
SOURCEBOT_STRUCTURED_LOGGING_FILE: z.string().optional(), SOURCEBOT_STRUCTURED_LOGGING_FILE: z.string().optional(),
// Configure the default maximum number of search results to return by default.
DEFAULT_MAX_MATCH_COUNT: numberSchema.default(10_000),
// A comma separated list of glob patterns that shwould always be indexed regardless of their size.
ALWAYS_INDEX_FILE_PATTERNS: z.string().optional(),
}, },
runtimeEnv, runtimeEnv,
emptyStringAsUndefined: true, emptyStringAsUndefined: true,

View file

@ -1,3 +1,4 @@
# shadcn components # shadcn components
src/components/ src/components/
next-env.d.ts next-env.d.ts
src/proto/**

View file

@ -8,6 +8,7 @@
"start": "next start", "start": "next start",
"lint": "cross-env SKIP_ENV_VALIDATION=1 eslint .", "lint": "cross-env SKIP_ENV_VALIDATION=1 eslint .",
"test": "cross-env SKIP_ENV_VALIDATION=1 vitest", "test": "cross-env SKIP_ENV_VALIDATION=1 vitest",
"generate:protos": "proto-loader-gen-types --includeComments --longs=Number --enums=String --defaults --oneofs --grpcLib=@grpc/grpc-js --keepCase --includeDirs=../../vendor/zoekt/grpc/protos --outDir=src/proto zoekt/webserver/v1/webserver.proto zoekt/webserver/v1/query.proto",
"dev:emails": "email dev --dir ./src/emails", "dev:emails": "email dev --dir ./src/emails",
"stripe:listen": "stripe listen --forward-to http://localhost:3000/api/stripe" "stripe:listen": "stripe listen --forward-to http://localhost:3000/api/stripe"
}, },
@ -52,6 +53,8 @@
"@codemirror/state": "^6.4.1", "@codemirror/state": "^6.4.1",
"@codemirror/view": "^6.33.0", "@codemirror/view": "^6.33.0",
"@floating-ui/react": "^0.27.2", "@floating-ui/react": "^0.27.2",
"@grpc/grpc-js": "^1.14.1",
"@grpc/proto-loader": "^0.8.0",
"@hookform/resolvers": "^3.9.0", "@hookform/resolvers": "^3.9.0",
"@iconify/react": "^5.1.0", "@iconify/react": "^5.1.0",
"@iizukak/codemirror-lang-wgsl": "^0.3.0", "@iizukak/codemirror-lang-wgsl": "^0.3.0",
@ -91,6 +94,7 @@
"@shopify/lang-jsonc": "^1.0.0", "@shopify/lang-jsonc": "^1.0.0",
"@sourcebot/codemirror-lang-tcl": "^1.0.12", "@sourcebot/codemirror-lang-tcl": "^1.0.12",
"@sourcebot/db": "workspace:*", "@sourcebot/db": "workspace:*",
"@sourcebot/query-language": "workspace:*",
"@sourcebot/schemas": "workspace:*", "@sourcebot/schemas": "workspace:*",
"@sourcebot/shared": "workspace:*", "@sourcebot/shared": "workspace:*",
"@ssddanbrown/codemirror-lang-twig": "^1.0.0", "@ssddanbrown/codemirror-lang-twig": "^1.0.0",
@ -133,6 +137,7 @@
"embla-carousel-auto-scroll": "^8.3.0", "embla-carousel-auto-scroll": "^8.3.0",
"embla-carousel-react": "^8.3.0", "embla-carousel-react": "^8.3.0",
"escape-string-regexp": "^5.0.0", "escape-string-regexp": "^5.0.0",
"fast-deep-equal": "^3.1.3",
"fuse.js": "^7.0.0", "fuse.js": "^7.0.0",
"google-auth-library": "^10.1.0", "google-auth-library": "^10.1.0",
"graphql": "^16.9.0", "graphql": "^16.9.0",
@ -142,7 +147,7 @@
"langfuse-vercel": "^3.38.4", "langfuse-vercel": "^3.38.4",
"lucide-react": "^0.517.0", "lucide-react": "^0.517.0",
"micromatch": "^4.0.8", "micromatch": "^4.0.8",
"next": "15.5.0", "next": "^15.5.7",
"next-auth": "^5.0.0-beta.30", "next-auth": "^5.0.0-beta.30",
"next-navigation-guard": "^0.2.0", "next-navigation-guard": "^0.2.0",
"next-themes": "^0.3.0", "next-themes": "^0.3.0",
@ -151,11 +156,12 @@
"openai": "^4.98.0", "openai": "^4.98.0",
"parse-diff": "^0.11.1", "parse-diff": "^0.11.1",
"posthog-js": "^1.161.5", "posthog-js": "^1.161.5",
"posthog-node": "^5.15.0",
"pretty-bytes": "^6.1.1", "pretty-bytes": "^6.1.1",
"psl": "^1.15.0", "psl": "^1.15.0",
"react": "19.1.1", "react": "^19.2.1",
"react-device-detect": "^2.2.3", "react-device-detect": "^2.2.3",
"react-dom": "19.1.1", "react-dom": "^19.2.1",
"react-hook-form": "^7.53.0", "react-hook-form": "^7.53.0",
"react-hotkeys-hook": "^4.5.1", "react-hotkeys-hook": "^4.5.1",
"react-icons": "^5.3.0", "react-icons": "^5.3.0",
@ -191,8 +197,8 @@
"@types/node": "^20", "@types/node": "^20",
"@types/nodemailer": "^6.4.17", "@types/nodemailer": "^6.4.17",
"@types/psl": "^1.1.3", "@types/psl": "^1.1.3",
"@types/react": "19.1.10", "@types/react": "19.2.1",
"@types/react-dom": "19.1.7", "@types/react-dom": "19.2.1",
"@typescript-eslint/eslint-plugin": "^8.40.0", "@typescript-eslint/eslint-plugin": "^8.40.0",
"@typescript-eslint/parser": "^8.40.0", "@typescript-eslint/parser": "^8.40.0",
"cross-env": "^7.0.3", "cross-env": "^7.0.3",
@ -212,7 +218,7 @@
"vitest-mock-extended": "^3.1.0" "vitest-mock-extended": "^3.1.0"
}, },
"resolutions": { "resolutions": {
"@types/react": "19.1.10", "@types/react": "19.2.1",
"@types/react-dom": "19.1.7" "@types/react-dom": "19.2.1"
} }
} }

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" xml:space="preserve" id="Layer_1" x="0" y="0" version="1.1" viewBox="-0.03 59.9 512.03 392.1"><style>.st0{fill:#fd4b2d}</style><path d="M279.9 141h17.9v51.2h-17.9zm46.6-2.2h17.9v40h-17.9zM65.3 197.3c-24 0-46 13.2-57.4 34.3h30.4c13.5-11.6 33-15 47.1 0h32.2c-12.6-17.1-31.4-34.3-52.3-34.3" class="st0"/><path d="M108.7 262.4C66.8 350-6.6 275.3 38.3 231.5H7.9C-15.9 273 17 329 65.3 327.8c37.4 0 68.2-55.5 68.2-65.3 0-4.3-6-17.6-16-31H85.4c10.7 9.7 20 23.7 23.3 30.9m1.1-2.6" class="st0"/><path d="M512 140.3v231.3c0 44.3-36.1 80.4-80.4 80.4h-34.1v-78.8h-163V452h-34.1c-44.4 0-80.4-36.1-80.4-80.4v-72.8h258.4v-139H253.6V238H119.9v-97.6c0-3.1.2-6.2.5-9.2.4-3.7 1.1-7.3 2-10.8.3-1.1.6-2.3 1-3.4.1-.3.2-.6.3-.8.2-.6.4-1.1.5-1.7.2-.5.4-1.1.6-1.7s.5-1.2.7-1.8.5-1.2.8-1.8c2-4.7 4.4-9.3 7.3-13.6l.1-.1c.7-1.1 1.5-2.1 2.3-3.2.7-.9 1.3-1.7 2-2.6.8-.9 1.6-1.9 2.4-2.8s1.6-1.8 2.4-2.6l.1-.1c.4-.5.9-.9 1.4-1.4 3-2.9 6.2-5.6 9.6-8 .9-.7 1.9-1.3 2.8-1.9 1.1-.7 2.2-1.4 3.3-2 2.1-1.2 4.2-2.4 6.5-3.4.7-.3 1.4-.7 2.1-1 3.1-1.3 6.2-2.5 9.4-3.4 1.2-.4 2.5-.7 3.7-1 .6-.2 1.2-.3 1.8-.4 3.6-.8 7.2-1.3 10.9-1.6l1.6-.1h.8c1.2-.1 2.4-.1 3.7-.1h231.3c1.2 0 2.5 0 3.7.1h.8l1.6.1c3.7.3 7.3.8 10.9 1.6.6.1 1.2.3 1.8.4 1.3.3 2.5.6 3.7 1 3.2.9 6.3 2.1 9.4 3.4.7.3 1.4.6 2.1 1 2.2 1 4.4 2.2 6.5 3.4 1.1.7 2.2 1.3 3.3 2 1 .6 1.9 1.3 2.8 1.9 3.9 2.8 7.6 6 11 9.4.8.8 1.7 1.7 2.4 2.6.8.9 1.6 1.9 2.4 2.8.7.8 1.3 1.7 2 2.6.8 1.1 1.5 2.1 2.3 3.2l.1.1c2.9 4.3 5.3 8.8 7.3 13.6.2.6.5 1.2.8 1.8.2.6.5 1.2.7 1.8.2.5.4 1.1.6 1.7s.4 1.1.5 1.7c.1.3.2.6.3.8.3 1.1.7 2.3 1 3.4.9 3.6 1.6 7.2 2 10.8 0 3.1.2 6.1.2 9.2" class="st0"/><path d="M498.3 95.5H133.5c14.9-22.2 40-35.6 66.7-35.6h231.3c26.9 0 51.9 13.4 66.8 35.6m13.2 35.6H120.4c1.4-12.8 6-25 13.1-35.6h364.8c7.2 10.6 11.7 22.9 13.2 35.6m.5 9.2v26.4H378.3v-6.9H253.6v6.9H119.9v-26.4c0-3.1.2-6.2.5-9.2h391.1c.3 3.1.5 6.1.5 9.2M119.9 166.7h133.7v35.6H119.9zm258.4 0H512v35.6H378.3zm-258.4 35.6h133.7v35.6H119.9zm258.4 0H512v35.6H378.3z" class="st0"/></svg>

After

Width:  |  Height:  |  Size: 2 KiB

View file

@ -38,8 +38,8 @@ const auditService = getAuditService();
/** /**
* "Service Error Wrapper". * "Service Error Wrapper".
* *
* Captures any thrown exceptions and converts them to a unexpected * Captures any thrown exceptions, logs them to the console and Sentry,
* service error. Also logs them with Sentry. * and returns a generic unexpected service error.
*/ */
export const sew = async <T>(fn: () => Promise<T>): Promise<T | ServiceError> => { export const sew = async <T>(fn: () => Promise<T>): Promise<T | ServiceError> => {
try { try {
@ -48,8 +48,8 @@ export const sew = async <T>(fn: () => Promise<T>): Promise<T | ServiceError> =>
Sentry.captureException(e); Sentry.captureException(e);
logger.error(e); logger.error(e);
if (e instanceof Error) { if (e instanceof ServiceErrorException) {
return unexpectedError(e.message); return e.serviceError;
} }
return unexpectedError(`An unexpected error occurred. Please try again later.`); return unexpectedError(`An unexpected error occurred. Please try again later.`);

View file

@ -1,10 +1,10 @@
import { getRepoInfoByName } from "@/actions"; import { getRepoInfoByName } from "@/actions";
import { PathHeader } from "@/app/[domain]/components/pathHeader"; import { PathHeader } from "@/app/[domain]/components/pathHeader";
import { Separator } from "@/components/ui/separator"; import { Separator } from "@/components/ui/separator";
import { getFileSource } from "@/features/search/fileSourceApi";
import { cn, getCodeHostInfoForRepo, isServiceError } from "@/lib/utils"; import { cn, getCodeHostInfoForRepo, isServiceError } from "@/lib/utils";
import Image from "next/image"; import Image from "next/image";
import { PureCodePreviewPanel } from "./pureCodePreviewPanel"; import { PureCodePreviewPanel } from "./pureCodePreviewPanel";
import { getFileSource } from "@/features/search/fileSourceApi";
interface CodePreviewPanelProps { interface CodePreviewPanelProps {
path: string; path: string;
@ -22,8 +22,12 @@ export const CodePreviewPanel = async ({ path, repoName, revisionName }: CodePre
getRepoInfoByName(repoName), getRepoInfoByName(repoName),
]); ]);
if (isServiceError(fileSourceResponse) || isServiceError(repoInfoResponse)) { if (isServiceError(fileSourceResponse)) {
return <div>Error loading file source</div> return <div>Error loading file source: {fileSourceResponse.message}</div>
}
if (isServiceError(repoInfoResponse)) {
return <div>Error loading repo info: {repoInfoResponse.message}</div>
} }
const codeHostInfo = getCodeHostInfoForRepo({ const codeHostInfo = getCodeHostInfoForRepo({

View file

@ -3,7 +3,6 @@
import { ScrollArea } from "@/components/ui/scroll-area"; import { ScrollArea } from "@/components/ui/scroll-area";
import { SymbolHoverPopup } from "@/ee/features/codeNav/components/symbolHoverPopup"; import { SymbolHoverPopup } from "@/ee/features/codeNav/components/symbolHoverPopup";
import { symbolHoverTargetsExtension } from "@/ee/features/codeNav/components/symbolHoverPopup/symbolHoverTargetsExtension"; import { symbolHoverTargetsExtension } from "@/ee/features/codeNav/components/symbolHoverPopup/symbolHoverTargetsExtension";
import { SymbolDefinition } from "@/ee/features/codeNav/components/symbolHoverPopup/useHoveredOverSymbolInfo";
import { useHasEntitlement } from "@/features/entitlements/useHasEntitlement"; import { useHasEntitlement } from "@/features/entitlements/useHasEntitlement";
import { useCodeMirrorLanguageExtension } from "@/hooks/useCodeMirrorLanguageExtension"; import { useCodeMirrorLanguageExtension } from "@/hooks/useCodeMirrorLanguageExtension";
import { useCodeMirrorTheme } from "@/hooks/useCodeMirrorTheme"; import { useCodeMirrorTheme } from "@/hooks/useCodeMirrorTheme";
@ -11,15 +10,10 @@ import { useKeymapExtension } from "@/hooks/useKeymapExtension";
import { useNonEmptyQueryParam } from "@/hooks/useNonEmptyQueryParam"; import { useNonEmptyQueryParam } from "@/hooks/useNonEmptyQueryParam";
import { search } from "@codemirror/search"; import { search } from "@codemirror/search";
import CodeMirror, { EditorSelection, EditorView, ReactCodeMirrorRef, SelectionRange, ViewUpdate } from "@uiw/react-codemirror"; import CodeMirror, { EditorSelection, EditorView, ReactCodeMirrorRef, SelectionRange, ViewUpdate } from "@uiw/react-codemirror";
import { useCallback, useEffect, useMemo, useState } from "react"; import { useEffect, useMemo, useState } from "react";
import { EditorContextMenu } from "../../../components/editorContextMenu"; import { EditorContextMenu } from "../../../components/editorContextMenu";
import { useBrowseNavigation } from "../../hooks/useBrowseNavigation";
import { BrowseHighlightRange, HIGHLIGHT_RANGE_QUERY_PARAM } from "../../hooks/utils"; import { BrowseHighlightRange, HIGHLIGHT_RANGE_QUERY_PARAM } from "../../hooks/utils";
import { useBrowseState } from "../../hooks/useBrowseState";
import { rangeHighlightingExtension } from "./rangeHighlightingExtension"; import { rangeHighlightingExtension } from "./rangeHighlightingExtension";
import useCaptureEvent from "@/hooks/useCaptureEvent";
import { createAuditAction } from "@/ee/features/audit/actions";
import { useDomain } from "@/hooks/useDomain";
interface PureCodePreviewPanelProps { interface PureCodePreviewPanelProps {
path: string; path: string;
@ -41,10 +35,6 @@ export const PureCodePreviewPanel = ({
const [currentSelection, setCurrentSelection] = useState<SelectionRange>(); const [currentSelection, setCurrentSelection] = useState<SelectionRange>();
const keymapExtension = useKeymapExtension(editorRef?.view); const keymapExtension = useKeymapExtension(editorRef?.view);
const hasCodeNavEntitlement = useHasEntitlement("code-nav"); const hasCodeNavEntitlement = useHasEntitlement("code-nav");
const { updateBrowseState } = useBrowseState();
const { navigateToPath } = useBrowseNavigation();
const domain = useDomain();
const captureEvent = useCaptureEvent();
const highlightRangeQuery = useNonEmptyQueryParam(HIGHLIGHT_RANGE_QUERY_PARAM); const highlightRangeQuery = useNonEmptyQueryParam(HIGHLIGHT_RANGE_QUERY_PARAM);
const highlightRange = useMemo((): BrowseHighlightRange | undefined => { const highlightRange = useMemo((): BrowseHighlightRange | undefined => {
@ -90,7 +80,6 @@ export const PureCodePreviewPanel = ({
} }
} }
} }
}, [highlightRangeQuery]); }, [highlightRangeQuery]);
const extensions = useMemo(() => { const extensions = useMemo(() => {
@ -118,90 +107,31 @@ export const PureCodePreviewPanel = ({
// Scroll the highlighted range into view. // Scroll the highlighted range into view.
useEffect(() => { useEffect(() => {
if (!highlightRange || !editorRef || !editorRef.state) { if (!highlightRange || !editorRef || !editorRef.state || !editorRef.view) {
return; return;
} }
const doc = editorRef.state.doc; const doc = editorRef.state.doc;
const { start, end } = highlightRange; const { start, end } = highlightRange;
const selection = EditorSelection.range(
doc.line(start.lineNumber).from, const from = doc.line(start.lineNumber).from;
doc.line(end.lineNumber).from, const to = doc.line(end.lineNumber).to;
); const selection = EditorSelection.range(from, to);
// When the selection is in view, we don't want to perform any scrolling
// as it could be jarring for the user. If it is not in view, scroll to the
// center of the viewport.
const viewport = editorRef.view.viewport;
const isInView = from >= viewport.from && to <= viewport.to;
const scrollStrategy = isInView ? "nearest" : "center";
editorRef.view?.dispatch({ editorRef.view?.dispatch({
effects: [ effects: [
EditorView.scrollIntoView(selection, { y: "center" }), EditorView.scrollIntoView(selection, { y: scrollStrategy }),
] ]
}); });
}, [editorRef, highlightRange]); }, [editorRef, highlightRange]);
const onFindReferences = useCallback((symbolName: string) => {
captureEvent('wa_find_references_pressed', {
source: 'browse',
});
createAuditAction({
action: "user.performed_find_references",
metadata: {
message: symbolName,
},
}, domain)
updateBrowseState({
selectedSymbolInfo: {
repoName,
symbolName,
revisionName,
language,
},
isBottomPanelCollapsed: false,
activeExploreMenuTab: "references",
})
}, [captureEvent, updateBrowseState, repoName, revisionName, language, domain]);
// If we resolve multiple matches, instead of navigating to the first match, we should
// instead popup the bottom sheet with the list of matches.
const onGotoDefinition = useCallback((symbolName: string, symbolDefinitions: SymbolDefinition[]) => {
captureEvent('wa_goto_definition_pressed', {
source: 'browse',
});
createAuditAction({
action: "user.performed_goto_definition",
metadata: {
message: symbolName,
},
}, domain)
if (symbolDefinitions.length === 0) {
return;
}
if (symbolDefinitions.length === 1) {
const symbolDefinition = symbolDefinitions[0];
const { fileName, repoName } = symbolDefinition;
navigateToPath({
repoName,
revisionName,
path: fileName,
pathType: 'blob',
highlightRange: symbolDefinition.range,
})
} else {
updateBrowseState({
selectedSymbolInfo: {
symbolName,
repoName,
revisionName,
language,
},
activeExploreMenuTab: "definitions",
isBottomPanelCollapsed: false,
})
}
}, [captureEvent, navigateToPath, revisionName, updateBrowseState, repoName, language, domain]);
const theme = useCodeMirrorTheme(); const theme = useCodeMirrorTheme();
return ( return (
@ -225,11 +155,12 @@ export const PureCodePreviewPanel = ({
)} )}
{editorRef && hasCodeNavEntitlement && ( {editorRef && hasCodeNavEntitlement && (
<SymbolHoverPopup <SymbolHoverPopup
source="preview"
editorRef={editorRef} editorRef={editorRef}
revisionName={revisionName} revisionName={revisionName}
language={language} language={language}
onFindReferences={onFindReferences} fileName={path}
onGotoDefinition={onGotoDefinition} repoName={repoName}
/> />
)} )}
</CodeMirror> </CodeMirror>

View file

@ -1,12 +1,12 @@
'use client'; 'use client';
import { useRef } from "react"; import { useRef } from "react";
import { FileTreeItem } from "@/features/fileTree/actions";
import { FileTreeItemComponent } from "@/features/fileTree/components/fileTreeItemComponent"; import { FileTreeItemComponent } from "@/features/fileTree/components/fileTreeItemComponent";
import { getBrowsePath } from "../../hooks/utils"; import { getBrowsePath } from "../../hooks/utils";
import { ScrollArea } from "@/components/ui/scroll-area"; import { ScrollArea } from "@/components/ui/scroll-area";
import { useBrowseParams } from "../../hooks/useBrowseParams"; import { useBrowseParams } from "../../hooks/useBrowseParams";
import { useDomain } from "@/hooks/useDomain"; import { useDomain } from "@/hooks/useDomain";
import { FileTreeItem } from "@/features/fileTree/types";
interface PureTreePreviewPanelProps { interface PureTreePreviewPanelProps {
items: FileTreeItem[]; items: FileTreeItem[];

View file

@ -2,7 +2,7 @@
import { Separator } from "@/components/ui/separator"; import { Separator } from "@/components/ui/separator";
import { getRepoInfoByName } from "@/actions"; import { getRepoInfoByName } from "@/actions";
import { PathHeader } from "@/app/[domain]/components/pathHeader"; import { PathHeader } from "@/app/[domain]/components/pathHeader";
import { getFolderContents } from "@/features/fileTree/actions"; import { getFolderContents } from "@/features/fileTree/api";
import { isServiceError } from "@/lib/utils"; import { isServiceError } from "@/lib/utils";
import { PureTreePreviewPanel } from "./pureTreePreviewPanel"; import { PureTreePreviewPanel } from "./pureTreePreviewPanel";

View file

@ -5,7 +5,6 @@ import { useState, useRef, useMemo, useEffect, useCallback } from "react";
import { useHotkeys } from "react-hotkeys-hook"; import { useHotkeys } from "react-hotkeys-hook";
import { useQuery } from "@tanstack/react-query"; import { useQuery } from "@tanstack/react-query";
import { unwrapServiceError } from "@/lib/utils"; import { unwrapServiceError } from "@/lib/utils";
import { FileTreeItem, getFiles } from "@/features/fileTree/actions";
import { Dialog, DialogContent, DialogDescription, DialogTitle } from "@/components/ui/dialog"; import { Dialog, DialogContent, DialogDescription, DialogTitle } from "@/components/ui/dialog";
import { useBrowseNavigation } from "../hooks/useBrowseNavigation"; import { useBrowseNavigation } from "../hooks/useBrowseNavigation";
import { useBrowseState } from "../hooks/useBrowseState"; import { useBrowseState } from "../hooks/useBrowseState";
@ -13,6 +12,8 @@ import { useBrowseParams } from "../hooks/useBrowseParams";
import { FileTreeItemIcon } from "@/features/fileTree/components/fileTreeItemIcon"; import { FileTreeItemIcon } from "@/features/fileTree/components/fileTreeItemIcon";
import { useLocalStorage } from "usehooks-ts"; import { useLocalStorage } from "usehooks-ts";
import { Skeleton } from "@/components/ui/skeleton"; import { Skeleton } from "@/components/ui/skeleton";
import { FileTreeItem } from "@/features/fileTree/types";
import { getFiles } from "@/app/api/(client)/client";
const MAX_RESULTS = 100; const MAX_RESULTS = 100;

View file

@ -10,6 +10,7 @@ import { useBrowseParams } from "./hooks/useBrowseParams";
import { FileSearchCommandDialog } from "./components/fileSearchCommandDialog"; import { FileSearchCommandDialog } from "./components/fileSearchCommandDialog";
import { useDomain } from "@/hooks/useDomain"; import { useDomain } from "@/hooks/useDomain";
import { SearchBar } from "../components/searchBar"; import { SearchBar } from "../components/searchBar";
import escapeStringRegexp from "escape-string-regexp";
interface LayoutProps { interface LayoutProps {
children: React.ReactNode; children: React.ReactNode;
@ -29,7 +30,9 @@ export default function Layout({
> >
<SearchBar <SearchBar
size="sm" size="sm"
defaultQuery={`repo:${repoName}${revisionName ? ` rev:${revisionName}` : ''} `} defaults={{
query: `repo:^${escapeStringRegexp(repoName)}$${revisionName ? ` rev:${revisionName}` : ''} `,
}}
className="w-full" className="w-full"
/> />
</TopBar> </TopBar>

View file

@ -24,9 +24,9 @@ export default async function Page(props: PageProps) {
const languageModels = await getConfiguredLanguageModelsInfo(); const languageModels = await getConfiguredLanguageModelsInfo();
const repos = await getRepos(); const repos = await getRepos();
const searchContexts = await getSearchContexts(params.domain); const searchContexts = await getSearchContexts(params.domain);
const chatInfo = await getChatInfo({ chatId: params.id }, params.domain); const chatInfo = await getChatInfo({ chatId: params.id });
const session = await auth(); const session = await auth();
const chatHistory = session ? await getUserChatHistory(params.domain) : []; const chatHistory = session ? await getUserChatHistory() : [];
if (isServiceError(chatHistory)) { if (isServiceError(chatHistory)) {
throw new ServiceErrorException(chatHistory); throw new ServiceErrorException(chatHistory);

View file

@ -4,7 +4,6 @@ import { useToast } from "@/components/hooks/use-toast";
import { Badge } from "@/components/ui/badge"; import { Badge } from "@/components/ui/badge";
import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip"; import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip";
import { updateChatName } from "@/features/chat/actions"; import { updateChatName } from "@/features/chat/actions";
import { useDomain } from "@/hooks/useDomain";
import { isServiceError } from "@/lib/utils"; import { isServiceError } from "@/lib/utils";
import { GlobeIcon } from "@radix-ui/react-icons"; import { GlobeIcon } from "@radix-ui/react-icons";
import { ChatVisibility } from "@sourcebot/db"; import { ChatVisibility } from "@sourcebot/db";
@ -23,7 +22,6 @@ interface ChatNameProps {
export const ChatName = ({ name, visibility, id, isReadonly }: ChatNameProps) => { export const ChatName = ({ name, visibility, id, isReadonly }: ChatNameProps) => {
const [isRenameDialogOpen, setIsRenameDialogOpen] = useState(false); const [isRenameDialogOpen, setIsRenameDialogOpen] = useState(false);
const { toast } = useToast(); const { toast } = useToast();
const domain = useDomain();
const router = useRouter(); const router = useRouter();
const onRenameChat = useCallback(async (name: string) => { const onRenameChat = useCallback(async (name: string) => {
@ -31,7 +29,7 @@ export const ChatName = ({ name, visibility, id, isReadonly }: ChatNameProps) =>
const response = await updateChatName({ const response = await updateChatName({
chatId: id, chatId: id,
name: name, name: name,
}, domain); });
if (isServiceError(response)) { if (isServiceError(response)) {
toast({ toast({
@ -43,7 +41,7 @@ export const ChatName = ({ name, visibility, id, isReadonly }: ChatNameProps) =>
}); });
router.refresh(); router.refresh();
} }
}, [id, domain, toast, router]); }, [id, toast, router]);
return ( return (
<> <>

View file

@ -9,7 +9,6 @@ import { ScrollArea } from "@/components/ui/scroll-area";
import { Separator } from "@/components/ui/separator"; import { Separator } from "@/components/ui/separator";
import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip"; import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip";
import { deleteChat, updateChatName } from "@/features/chat/actions"; import { deleteChat, updateChatName } from "@/features/chat/actions";
import { useDomain } from "@/hooks/useDomain";
import { cn, isServiceError } from "@/lib/utils"; import { cn, isServiceError } from "@/lib/utils";
import { CirclePlusIcon, EllipsisIcon, PencilIcon, TrashIcon } from "lucide-react"; import { CirclePlusIcon, EllipsisIcon, PencilIcon, TrashIcon } from "lucide-react";
import { useRouter } from "next/navigation"; import { useRouter } from "next/navigation";
@ -23,6 +22,7 @@ import { useChatId } from "../useChatId";
import { RenameChatDialog } from "./renameChatDialog"; import { RenameChatDialog } from "./renameChatDialog";
import { DeleteChatDialog } from "./deleteChatDialog"; import { DeleteChatDialog } from "./deleteChatDialog";
import Link from "next/link"; import Link from "next/link";
import { SINGLE_TENANT_ORG_DOMAIN } from "@/lib/constants";
interface ChatSidePanelProps { interface ChatSidePanelProps {
order: number; order: number;
@ -41,7 +41,6 @@ export const ChatSidePanel = ({
isAuthenticated, isAuthenticated,
isCollapsedInitially, isCollapsedInitially,
}: ChatSidePanelProps) => { }: ChatSidePanelProps) => {
const domain = useDomain();
const [isCollapsed, setIsCollapsed] = useState(isCollapsedInitially); const [isCollapsed, setIsCollapsed] = useState(isCollapsedInitially);
const sidePanelRef = useRef<ImperativePanelHandle>(null); const sidePanelRef = useRef<ImperativePanelHandle>(null);
const router = useRouter(); const router = useRouter();
@ -72,7 +71,7 @@ export const ChatSidePanel = ({
const response = await updateChatName({ const response = await updateChatName({
chatId, chatId,
name: name, name: name,
}, domain); });
if (isServiceError(response)) { if (isServiceError(response)) {
toast({ toast({
@ -84,14 +83,14 @@ export const ChatSidePanel = ({
}); });
router.refresh(); router.refresh();
} }
}, [router, toast, domain]); }, [router, toast]);
const onDeleteChat = useCallback(async (chatIdToDelete: string) => { const onDeleteChat = useCallback(async (chatIdToDelete: string) => {
if (!chatIdToDelete) { if (!chatIdToDelete) {
return; return;
} }
const response = await deleteChat({ chatId: chatIdToDelete }, domain); const response = await deleteChat({ chatId: chatIdToDelete });
if (isServiceError(response)) { if (isServiceError(response)) {
toast({ toast({
@ -104,12 +103,12 @@ export const ChatSidePanel = ({
// If we just deleted the current chat, navigate to new chat // If we just deleted the current chat, navigate to new chat
if (chatIdToDelete === chatId) { if (chatIdToDelete === chatId) {
router.push(`/${domain}/chat`); router.push(`/${SINGLE_TENANT_ORG_DOMAIN}/chat`);
} }
router.refresh(); router.refresh();
} }
}, [chatId, router, toast, domain]); }, [chatId, router, toast]);
return ( return (
<> <>
@ -131,7 +130,7 @@ export const ChatSidePanel = ({
size="sm" size="sm"
className="w-full" className="w-full"
onClick={() => { onClick={() => {
router.push(`/${domain}/chat`); router.push(`/${SINGLE_TENANT_ORG_DOMAIN}/chat`);
}} }}
> >
<CirclePlusIcon className="w-4 h-4 mr-1" /> <CirclePlusIcon className="w-4 h-4 mr-1" />
@ -145,7 +144,7 @@ export const ChatSidePanel = ({
<div className="flex flex-col"> <div className="flex flex-col">
<p className="text-sm text-muted-foreground mb-4"> <p className="text-sm text-muted-foreground mb-4">
<Link <Link
href={`/login?callbackUrl=${encodeURIComponent(`/${domain}/chat`)}`} href={`/login?callbackUrl=${encodeURIComponent(`/${SINGLE_TENANT_ORG_DOMAIN}/chat`)}`}
className="text-sm text-link hover:underline cursor-pointer" className="text-sm text-link hover:underline cursor-pointer"
> >
Sign in Sign in
@ -163,7 +162,7 @@ export const ChatSidePanel = ({
chat.id === chatId && "bg-muted" chat.id === chatId && "bg-muted"
)} )}
onClick={() => { onClick={() => {
router.push(`/${domain}/chat/${chat.id}`); router.push(`/${SINGLE_TENANT_ORG_DOMAIN}/chat/${chat.id}`);
}} }}
> >
<span className="text-sm truncate">{chat.name ?? 'Untitled chat'}</span> <span className="text-sm truncate">{chat.name ?? 'Untitled chat'}</span>

View file

@ -6,7 +6,7 @@ import { memo, useEffect, useMemo, useState } from 'react'
import { useCodeMirrorHighlighter } from '@/hooks/useCodeMirrorHighlighter' import { useCodeMirrorHighlighter } from '@/hooks/useCodeMirrorHighlighter'
import tailwind from '@/tailwind' import tailwind from '@/tailwind'
import { measure } from '@/lib/utils' import { measure } from '@/lib/utils'
import { SourceRange } from '@/features/search/types' import { SourceRange } from '@/features/search'
// Define a plain text language // Define a plain text language
const plainTextLanguage = StreamLanguage.define({ const plainTextLanguage = StreamLanguage.define({

View file

@ -1,4 +1,4 @@
import { getConnectionStats, getRepos, getReposStats } from "@/actions"; import { getConnectionStats, getCurrentUserRole, getOrgAccountRequests, getRepos, getReposStats } from "@/actions";
import { SourcebotLogo } from "@/app/components/sourcebotLogo"; import { SourcebotLogo } from "@/app/components/sourcebotLogo";
import { auth } from "@/auth"; import { auth } from "@/auth";
import { Button } from "@/components/ui/button"; import { Button } from "@/components/ui/button";
@ -10,7 +10,7 @@ import { env } from "@sourcebot/shared";
import { ServiceErrorException } from "@/lib/serviceError"; import { ServiceErrorException } from "@/lib/serviceError";
import { isServiceError } from "@/lib/utils"; import { isServiceError } from "@/lib/utils";
import { DiscordLogoIcon, GitHubLogoIcon } from "@radix-ui/react-icons"; import { DiscordLogoIcon, GitHubLogoIcon } from "@radix-ui/react-icons";
import { RepoIndexingJobStatus, RepoIndexingJobType } from "@sourcebot/db"; import { OrgRole, RepoIndexingJobStatus, RepoIndexingJobType } from "@sourcebot/db";
import Link from "next/link"; import Link from "next/link";
import { redirect } from "next/navigation"; import { redirect } from "next/navigation";
import { OrgSelector } from "../orgSelector"; import { OrgSelector } from "../orgSelector";
@ -20,7 +20,7 @@ import { NavigationItems } from "./navigationItems";
import { ProgressIndicator } from "./progressIndicator"; import { ProgressIndicator } from "./progressIndicator";
import { TrialIndicator } from "./trialIndicator"; import { TrialIndicator } from "./trialIndicator";
const SOURCEBOT_DISCORD_URL = "https://discord.gg/6Fhp27x7Pb"; const SOURCEBOT_DISCORD_URL = "https://discord.gg/HDScTs3ptP";
const SOURCEBOT_GITHUB_URL = "https://github.com/sourcebot-dev/sourcebot"; const SOURCEBOT_GITHUB_URL = "https://github.com/sourcebot-dev/sourcebot";
interface NavigationMenuProps { interface NavigationMenuProps {
@ -39,11 +39,32 @@ export const NavigationMenu = async ({
throw new ServiceErrorException(repoStats); throw new ServiceErrorException(repoStats);
} }
const connectionStats = isAuthenticated ? await getConnectionStats() : null; const role = isAuthenticated ? await getCurrentUserRole(domain) : null;
if (isServiceError(connectionStats)) { if (isServiceError(role)) {
throw new ServiceErrorException(connectionStats); throw new ServiceErrorException(role);
} }
const stats = await (async () => {
if (!isAuthenticated || role !== OrgRole.OWNER) {
return null;
}
const joinRequests = await getOrgAccountRequests(domain);
if (isServiceError(joinRequests)) {
throw new ServiceErrorException(joinRequests);
}
const connectionStats = await getConnectionStats();
if (isServiceError(connectionStats)) {
throw new ServiceErrorException(connectionStats);
}
return {
numJoinRequests: joinRequests.length,
connectionStats,
};
})();
const sampleRepos = await getRepos({ const sampleRepos = await getRepos({
where: { where: {
jobs: { jobs: {
@ -100,9 +121,10 @@ export const NavigationMenu = async ({
numberOfRepos={numberOfRepos} numberOfRepos={numberOfRepos}
isReposButtonNotificationDotVisible={numberOfReposWithFirstTimeIndexingJobsInProgress > 0} isReposButtonNotificationDotVisible={numberOfReposWithFirstTimeIndexingJobsInProgress > 0}
isSettingsButtonNotificationDotVisible={ isSettingsButtonNotificationDotVisible={
connectionStats ? stats ? (
connectionStats.numberOfConnectionsWithFirstTimeSyncJobsInProgress > 0 : stats.connectionStats.numberOfConnectionsWithFirstTimeSyncJobsInProgress > 0 ||
false stats.numJoinRequests > 0
) : false
} }
isAuthenticated={isAuthenticated} isAuthenticated={isAuthenticated}
/> />

View file

@ -233,7 +233,7 @@ export const PathHeader = ({
}} }}
> >
<span className="mr-0.5">@</span> <span className="mr-0.5">@</span>
{`${branchDisplayName}`} {`${branchDisplayName.replace(/^refs\/(heads|tags)\//, '')}`}
</p> </p>
)} )}
<span>·</span> <span>·</span>

View file

@ -16,57 +16,53 @@ export enum SearchPrefix {
sym = "sym:", sym = "sym:",
content = "content:", content = "content:",
archived = "archived:", archived = "archived:",
case = "case:",
fork = "fork:", fork = "fork:",
public = "public:", visibility = "visibility:",
context = "context:", context = "context:",
} }
export const publicModeSuggestions: Suggestion[] = [ export const visibilityModeSuggestions: Suggestion[] = [
{ {
value: "yes", value: "public",
description: "Only include results from public repositories." description: "Only include results from public repositories."
}, },
{ {
value: "no", value: "private",
description: "Only include results from private repositories." description: "Only include results from private repositories."
}, },
{
value: "any",
description: "Include results from both public and private repositories (default)."
},
]; ];
export const forkModeSuggestions: Suggestion[] = [ export const forkModeSuggestions: Suggestion[] = [
{ {
value: "yes", value: "yes",
description: "Include results from forked repositories (default)."
},
{
value: "no",
description: "Exclude results from forked repositories."
},
{
value: "only",
description: "Only include results from forked repositories." description: "Only include results from forked repositories."
}, }
{
value: "no",
description: "Only include results from non-forked repositories."
},
];
export const caseModeSuggestions: Suggestion[] = [
{
value: "auto",
description: "Search patterns are case-insensitive if all characters are lowercase, and case sensitive otherwise (default)."
},
{
value: "yes",
description: "Case sensitive search."
},
{
value: "no",
description: "Case insensitive search."
},
]; ];
export const archivedModeSuggestions: Suggestion[] = [ export const archivedModeSuggestions: Suggestion[] = [
{ {
value: "yes", value: "yes",
description: "Only include results in archived repositories." description: "Include results from archived repositories (default)."
}, },
{ {
value: "no", value: "no",
description: "Only include results in non-archived repositories." description: "Exclude results from archived repositories."
}, },
{
value: "only",
description: "Only include results from archived repositories."
}
]; ];

View file

@ -42,14 +42,18 @@ import { Separator } from "@/components/ui/separator";
import { Tooltip, TooltipTrigger, TooltipContent } from "@/components/ui/tooltip"; import { Tooltip, TooltipTrigger, TooltipContent } from "@/components/ui/tooltip";
import { Toggle } from "@/components/ui/toggle"; import { Toggle } from "@/components/ui/toggle";
import { useDomain } from "@/hooks/useDomain"; import { useDomain } from "@/hooks/useDomain";
import { KeyboardShortcutHint } from "@/app/components/keyboardShortcutHint";
import { createAuditAction } from "@/ee/features/audit/actions"; import { createAuditAction } from "@/ee/features/audit/actions";
import tailwind from "@/tailwind"; import tailwind from "@/tailwind";
import { CaseSensitiveIcon, RegexIcon } from "lucide-react";
interface SearchBarProps { interface SearchBarProps {
className?: string; className?: string;
size?: "default" | "sm"; size?: "default" | "sm";
defaultQuery?: string; defaults?: {
isRegexEnabled?: boolean;
isCaseSensitivityEnabled?: boolean;
query?: string;
}
autoFocus?: boolean; autoFocus?: boolean;
} }
@ -91,8 +95,12 @@ const searchBarContainerVariants = cva(
export const SearchBar = ({ export const SearchBar = ({
className, className,
size, size,
defaultQuery,
autoFocus, autoFocus,
defaults: {
isRegexEnabled: defaultIsRegexEnabled = false,
isCaseSensitivityEnabled: defaultIsCaseSensitivityEnabled = false,
query: defaultQuery = "",
} = {}
}: SearchBarProps) => { }: SearchBarProps) => {
const router = useRouter(); const router = useRouter();
const domain = useDomain(); const domain = useDomain();
@ -102,11 +110,13 @@ export const SearchBar = ({
const [isSuggestionsEnabled, setIsSuggestionsEnabled] = useState(false); const [isSuggestionsEnabled, setIsSuggestionsEnabled] = useState(false);
const [isSuggestionsBoxFocused, setIsSuggestionsBoxFocused] = useState(false); const [isSuggestionsBoxFocused, setIsSuggestionsBoxFocused] = useState(false);
const [isHistorySearchEnabled, setIsHistorySearchEnabled] = useState(false); const [isHistorySearchEnabled, setIsHistorySearchEnabled] = useState(false);
const [isRegexEnabled, setIsRegexEnabled] = useState(defaultIsRegexEnabled);
const [isCaseSensitivityEnabled, setIsCaseSensitivityEnabled] = useState(defaultIsCaseSensitivityEnabled);
const focusEditor = useCallback(() => editorRef.current?.view?.focus(), []); const focusEditor = useCallback(() => editorRef.current?.view?.focus(), []);
const focusSuggestionsBox = useCallback(() => suggestionBoxRef.current?.focus(), []); const focusSuggestionsBox = useCallback(() => suggestionBoxRef.current?.focus(), []);
const [_query, setQuery] = useState(defaultQuery ?? ""); const [_query, setQuery] = useState(defaultQuery);
const query = useMemo(() => { const query = useMemo(() => {
// Replace any newlines with spaces to handle // Replace any newlines with spaces to handle
// copy & pasting text with newlines. // copy & pasting text with newlines.
@ -211,13 +221,15 @@ export const SearchBar = ({
metadata: { metadata: {
message: query, message: query,
}, },
}, domain) })
const url = createPathWithQueryParams(`/${domain}/search`, const url = createPathWithQueryParams(`/${domain}/search`,
[SearchQueryParams.query, query], [SearchQueryParams.query, query],
[SearchQueryParams.isRegexEnabled, isRegexEnabled ? "true" : null],
[SearchQueryParams.isCaseSensitivityEnabled, isCaseSensitivityEnabled ? "true" : null],
); );
router.push(url); router.push(url);
}, [domain, router]); }, [domain, router, isRegexEnabled, isCaseSensitivityEnabled]);
return ( return (
<div <div
@ -275,18 +287,40 @@ export const SearchBar = ({
indentWithTab={false} indentWithTab={false}
autoFocus={autoFocus ?? false} autoFocus={autoFocus ?? false}
/> />
<Tooltip <div className="flex flex-row items-center gap-1 ml-1">
delayDuration={100} <Tooltip>
> <TooltipTrigger asChild>
<TooltipTrigger asChild> <span>
<div> <Toggle
<KeyboardShortcutHint shortcut="/" /> className="h-7 w-7 min-w-7 p-0 cursor-pointer"
</div> pressed={isCaseSensitivityEnabled}
</TooltipTrigger> onPressedChange={setIsCaseSensitivityEnabled}
<TooltipContent side="bottom" className="flex flex-row items-center gap-2"> >
Focus search bar <CaseSensitiveIcon className="w-4 h-4" />
</TooltipContent> </Toggle>
</Tooltip> </span>
</TooltipTrigger>
<TooltipContent side="bottom" className="flex flex-row items-center gap-2">
{isCaseSensitivityEnabled ? "Disable" : "Enable"} case sensitivity
</TooltipContent>
</Tooltip>
<Tooltip>
<TooltipTrigger asChild>
<span>
<Toggle
className="h-7 w-7 min-w-7 p-0 cursor-pointer"
pressed={isRegexEnabled}
onPressedChange={setIsRegexEnabled}
>
<RegexIcon className="w-4 h-4" />
</Toggle>
</span>
</TooltipTrigger>
<TooltipContent side="bottom" className="flex flex-row items-center gap-2">
{isRegexEnabled ? "Disable" : "Enable"} regular expressions
</TooltipContent>
</Tooltip>
</div>
<SearchSuggestionsBox <SearchSuggestionsBox
ref={suggestionBoxRef} ref={suggestionBoxRef}
query={query} query={query}

View file

@ -7,9 +7,8 @@ import Fuse from "fuse.js";
import { forwardRef, Ref, useEffect, useMemo, useState } from "react"; import { forwardRef, Ref, useEffect, useMemo, useState } from "react";
import { import {
archivedModeSuggestions, archivedModeSuggestions,
caseModeSuggestions,
forkModeSuggestions, forkModeSuggestions,
publicModeSuggestions, visibilityModeSuggestions,
} from "./constants"; } from "./constants";
import { IconType } from "react-icons/lib"; import { IconType } from "react-icons/lib";
import { VscFile, VscFilter, VscRepo, VscSymbolMisc } from "react-icons/vsc"; import { VscFile, VscFilter, VscRepo, VscSymbolMisc } from "react-icons/vsc";
@ -32,9 +31,8 @@ export type SuggestionMode =
"archived" | "archived" |
"file" | "file" |
"language" | "language" |
"case" |
"fork" | "fork" |
"public" | "visibility" |
"revision" | "revision" |
"symbol" | "symbol" |
"content" | "content" |
@ -137,9 +135,9 @@ const SearchSuggestionsBox = forwardRef(({
DefaultIcon?: IconType DefaultIcon?: IconType
} => { } => {
switch (suggestionMode) { switch (suggestionMode) {
case "public": case "visibility":
return { return {
list: publicModeSuggestions, list: visibilityModeSuggestions,
onSuggestionClicked: createOnSuggestionClickedHandler(), onSuggestionClicked: createOnSuggestionClickedHandler(),
} }
case "fork": case "fork":
@ -147,11 +145,6 @@ const SearchSuggestionsBox = forwardRef(({
list: forkModeSuggestions, list: forkModeSuggestions,
onSuggestionClicked: createOnSuggestionClickedHandler(), onSuggestionClicked: createOnSuggestionClickedHandler(),
} }
case "case":
return {
list: caseModeSuggestions,
onSuggestionClicked: createOnSuggestionClickedHandler(),
}
case "archived": case "archived":
return { return {
list: archivedModeSuggestions, list: archivedModeSuggestions,
@ -183,7 +176,7 @@ const SearchSuggestionsBox = forwardRef(({
case "file": case "file":
return { return {
list: fileSuggestions, list: fileSuggestions,
onSuggestionClicked: createOnSuggestionClickedHandler(), onSuggestionClicked: createOnSuggestionClickedHandler({ regexEscaped: true }),
isClientSideSearchEnabled: false, isClientSideSearchEnabled: false,
DefaultIcon: VscFile, DefaultIcon: VscFile,
} }

View file

@ -26,7 +26,7 @@ export const useRefineModeSuggestions = () => {
}, },
] : []), ] : []),
{ {
value: SearchPrefix.public, value: SearchPrefix.visibility,
description: "Filter on repository visibility." description: "Filter on repository visibility."
}, },
{ {
@ -86,10 +86,6 @@ export const useRefineModeSuggestions = () => {
value: SearchPrefix.archived, value: SearchPrefix.archived,
description: "Include results from archived repositories.", description: "Include results from archived repositories.",
}, },
{
value: SearchPrefix.case,
description: "Control case-sensitivity of search patterns."
},
{ {
value: SearchPrefix.fork, value: SearchPrefix.fork,
description: "Include only results from forked repositories." description: "Include only results from forked repositories."

View file

@ -70,12 +70,6 @@ export const useSuggestionModeMappings = () => {
SearchPrefix.archived SearchPrefix.archived
] ]
}, },
{
suggestionMode: "case",
prefixes: [
SearchPrefix.case
]
},
{ {
suggestionMode: "fork", suggestionMode: "fork",
prefixes: [ prefixes: [
@ -83,9 +77,9 @@ export const useSuggestionModeMappings = () => {
] ]
}, },
{ {
suggestionMode: "public", suggestionMode: "visibility",
prefixes: [ prefixes: [
SearchPrefix.public SearchPrefix.visibility
] ]
}, },
...(isSearchContextsEnabled ? [ ...(isSearchContextsEnabled ? [

View file

@ -5,7 +5,7 @@ import { Suggestion, SuggestionMode } from "./searchSuggestionsBox";
import { getRepos, search } from "@/app/api/(client)/client"; import { getRepos, search } from "@/app/api/(client)/client";
import { getSearchContexts } from "@/actions"; import { getSearchContexts } from "@/actions";
import { useMemo } from "react"; import { useMemo } from "react";
import { SearchSymbol } from "@/features/search/types"; import { SearchSymbol } from "@/features/search";
import { languageMetadataMap } from "@/lib/languageMetadata"; import { languageMetadataMap } from "@/lib/languageMetadata";
import { import {
VscSymbolClass, VscSymbolClass,
@ -55,7 +55,8 @@ export const useSuggestionsData = ({
query: `file:${suggestionQuery}`, query: `file:${suggestionQuery}`,
matches: 15, matches: 15,
contextLines: 1, contextLines: 1,
}, domain), source: 'search-bar-file-suggestions'
}),
select: (data): Suggestion[] => { select: (data): Suggestion[] => {
if (isServiceError(data)) { if (isServiceError(data)) {
return []; return [];
@ -75,7 +76,8 @@ export const useSuggestionsData = ({
query: `sym:${suggestionQuery.length > 0 ? suggestionQuery : ".*"}`, query: `sym:${suggestionQuery.length > 0 ? suggestionQuery : ".*"}`,
matches: 15, matches: 15,
contextLines: 1, contextLines: 1,
}, domain), source: 'search-bar-symbol-suggestions'
}),
select: (data): Suggestion[] => { select: (data): Suggestion[] => {
if (isServiceError(data)) { if (isServiceError(data)) {
return []; return [];

Some files were not shown because too many files have changed in this diff Show more