mirror of
https://github.com/open-webui/open-webui.git
synced 2025-12-14 21:35:19 +00:00
Merge branch 'dev' into Classic298-patch-1
This commit is contained in:
commit
e67ab489aa
153 changed files with 5180 additions and 1807 deletions
|
|
@ -7,6 +7,15 @@ OPENAI_API_KEY=''
|
|||
|
||||
# AUTOMATIC1111_BASE_URL="http://localhost:7860"
|
||||
|
||||
# For production, you should only need one host as
|
||||
# fastapi serves the svelte-kit built frontend and backend from the same host and port.
|
||||
# To test with CORS locally, you can set something like
|
||||
# CORS_ALLOW_ORIGIN='http://localhost:5173;http://localhost:8080'
|
||||
CORS_ALLOW_ORIGIN='*'
|
||||
|
||||
# For production you should set this to match the proxy configuration (127.0.0.1)
|
||||
FORWARDED_ALLOW_IPS='*'
|
||||
|
||||
# DO NOT TRACK
|
||||
SCARF_NO_ANALYTICS=true
|
||||
DO_NOT_TRACK=true
|
||||
|
|
|
|||
50
.gitattributes
vendored
50
.gitattributes
vendored
|
|
@ -1 +1,49 @@
|
|||
*.sh text eol=lf
|
||||
# TypeScript
|
||||
*.ts text eol=lf
|
||||
*.tsx text eol=lf
|
||||
|
||||
# JavaScript
|
||||
*.js text eol=lf
|
||||
*.jsx text eol=lf
|
||||
*.mjs text eol=lf
|
||||
*.cjs text eol=lf
|
||||
|
||||
# Svelte
|
||||
*.svelte text eol=lf
|
||||
|
||||
# HTML/CSS
|
||||
*.html text eol=lf
|
||||
*.css text eol=lf
|
||||
*.scss text eol=lf
|
||||
*.less text eol=lf
|
||||
|
||||
# Config files and JSON
|
||||
*.json text eol=lf
|
||||
*.jsonc text eol=lf
|
||||
*.yml text eol=lf
|
||||
*.yaml text eol=lf
|
||||
*.toml text eol=lf
|
||||
|
||||
# Shell scripts
|
||||
*.sh text eol=lf
|
||||
|
||||
# Markdown & docs
|
||||
*.md text eol=lf
|
||||
*.mdx text eol=lf
|
||||
*.txt text eol=lf
|
||||
|
||||
# Git-related
|
||||
.gitattributes text eol=lf
|
||||
.gitignore text eol=lf
|
||||
|
||||
# Prettier and other dotfiles
|
||||
.prettierrc text eol=lf
|
||||
.prettierignore text eol=lf
|
||||
.eslintrc text eol=lf
|
||||
.eslintignore text eol=lf
|
||||
.stylelintrc text eol=lf
|
||||
.editorconfig text eol=lf
|
||||
|
||||
# Misc
|
||||
*.env text eol=lf
|
||||
*.lock text eol=lf
|
||||
191
.github/workflows/docker-build.yaml
vendored
191
.github/workflows/docker-build.yaml
vendored
|
|
@ -14,16 +14,18 @@ env:
|
|||
|
||||
jobs:
|
||||
build-main-image:
|
||||
runs-on: ${{ matrix.platform == 'linux/arm64' && 'ubuntu-24.04-arm' || 'ubuntu-latest' }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm64
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
|
||||
steps:
|
||||
# GitHub Packages requires the entire repository name to be in lowercase
|
||||
|
|
@ -111,16 +113,18 @@ jobs:
|
|||
retention-days: 1
|
||||
|
||||
build-cuda-image:
|
||||
runs-on: ${{ matrix.platform == 'linux/arm64' && 'ubuntu-24.04-arm' || 'ubuntu-latest' }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm64
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
|
||||
steps:
|
||||
# GitHub Packages requires the entire repository name to be in lowercase
|
||||
|
|
@ -210,17 +214,122 @@ jobs:
|
|||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
build-ollama-image:
|
||||
runs-on: ${{ matrix.platform == 'linux/arm64' && 'ubuntu-24.04-arm' || 'ubuntu-latest' }}
|
||||
build-cuda126-image:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm64
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
|
||||
steps:
|
||||
# GitHub Packages requires the entire repository name to be in lowercase
|
||||
# although the repository owner has a lowercase username, this prevents some people from running actions after forking
|
||||
- name: Set repository and image name to lowercase
|
||||
run: |
|
||||
echo "IMAGE_NAME=${IMAGE_NAME,,}" >>${GITHUB_ENV}
|
||||
echo "FULL_IMAGE_NAME=ghcr.io/${IMAGE_NAME,,}" >>${GITHUB_ENV}
|
||||
env:
|
||||
IMAGE_NAME: '${{ github.repository }}'
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata for Docker images (cuda126 tag)
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.FULL_IMAGE_NAME }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=tag
|
||||
type=sha,prefix=git-
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=raw,enable=${{ github.ref == 'refs/heads/main' }},prefix=,suffix=,value=cuda126
|
||||
flavor: |
|
||||
latest=${{ github.ref == 'refs/heads/main' }}
|
||||
suffix=-cuda126,onlatest=true
|
||||
|
||||
- name: Extract metadata for Docker cache
|
||||
id: cache-meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.FULL_IMAGE_NAME }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
${{ github.ref_type == 'tag' && 'type=raw,value=main' || '' }}
|
||||
flavor: |
|
||||
prefix=cache-cuda126-${{ matrix.platform }}-
|
||||
latest=false
|
||||
|
||||
- name: Build Docker image (cuda126)
|
||||
uses: docker/build-push-action@v5
|
||||
id: build
|
||||
with:
|
||||
context: .
|
||||
push: true
|
||||
platforms: ${{ matrix.platform }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
outputs: type=image,name=${{ env.FULL_IMAGE_NAME }},push-by-digest=true,name-canonical=true,push=true
|
||||
cache-from: type=registry,ref=${{ steps.cache-meta.outputs.tags }}
|
||||
cache-to: type=registry,ref=${{ steps.cache-meta.outputs.tags }},mode=max
|
||||
build-args: |
|
||||
BUILD_HASH=${{ github.sha }}
|
||||
USE_CUDA=true
|
||||
USE_CUDA_VER=cu126
|
||||
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-cuda126-${{ env.PLATFORM_PAIR }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
build-ollama-image:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- platform: linux/amd64
|
||||
runner: ubuntu-latest
|
||||
- platform: linux/arm64
|
||||
runner: ubuntu-24.04-arm
|
||||
|
||||
steps:
|
||||
# GitHub Packages requires the entire repository name to be in lowercase
|
||||
|
|
@ -420,6 +529,62 @@ jobs:
|
|||
run: |
|
||||
docker buildx imagetools inspect ${{ env.FULL_IMAGE_NAME }}:${{ steps.meta.outputs.version }}
|
||||
|
||||
merge-cuda126-images:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build-cuda126-image]
|
||||
steps:
|
||||
# GitHub Packages requires the entire repository name to be in lowercase
|
||||
# although the repository owner has a lowercase username, this prevents some people from running actions after forking
|
||||
- name: Set repository and image name to lowercase
|
||||
run: |
|
||||
echo "IMAGE_NAME=${IMAGE_NAME,,}" >>${GITHUB_ENV}
|
||||
echo "FULL_IMAGE_NAME=ghcr.io/${IMAGE_NAME,,}" >>${GITHUB_ENV}
|
||||
env:
|
||||
IMAGE_NAME: '${{ github.repository }}'
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: digests-cuda126-*
|
||||
path: /tmp/digests
|
||||
merge-multiple: true
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Log in to the Container registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Extract metadata for Docker images (default latest tag)
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.FULL_IMAGE_NAME }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=tag
|
||||
type=sha,prefix=git-
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=raw,enable=${{ github.ref == 'refs/heads/main' }},prefix=,suffix=,value=cuda126
|
||||
flavor: |
|
||||
latest=${{ github.ref == 'refs/heads/main' }}
|
||||
suffix=-cuda126,onlatest=true
|
||||
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.FULL_IMAGE_NAME }}@sha256:%s ' *)
|
||||
|
||||
- name: Inspect image
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ env.FULL_IMAGE_NAME }}:${{ steps.meta.outputs.version }}
|
||||
|
||||
merge-ollama-images:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build-ollama-image]
|
||||
|
|
|
|||
|
|
@ -5,5 +5,6 @@
|
|||
"printWidth": 100,
|
||||
"plugins": ["prettier-plugin-svelte"],
|
||||
"pluginSearchDirs": ["."],
|
||||
"overrides": [{ "files": "*.svelte", "options": { "parser": "svelte" } }]
|
||||
"overrides": [{ "files": "*.svelte", "options": { "parser": "svelte" } }],
|
||||
"endOfLine": "lf"
|
||||
}
|
||||
|
|
|
|||
16
README.md
16
README.md
|
|
@ -73,19 +73,29 @@ Want to learn more about Open WebUI's features? Check out our [Open WebUI docume
|
|||
</a>
|
||||
</td>
|
||||
<td>
|
||||
N8N • Does your interface have a backend yet?<br>Try <a href="https://n8n.io/">n8n</a>
|
||||
<a href="https://n8n.io/">n8n</a> • Does your interface have a backend yet?<br>Try <a href="https://n8n.io/">n8n</a>
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>
|
||||
<a href="https://warp.dev/open-webui" target="_blank">
|
||||
<img src="https://docs.openwebui.com/sponsors/logos/warp.png" alt="n8n" style="width: 8rem; height: 8rem; border-radius: .75rem;" />
|
||||
<img src="https://docs.openwebui.com/sponsors/logos/warp.png" alt="Warp" style="width: 8rem; height: 8rem; border-radius: .75rem;" />
|
||||
</a>
|
||||
</td>
|
||||
<td>
|
||||
<a href="https://warp.dev/open-webui">Warp</a> • The intelligent terminal for developers
|
||||
</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>
|
||||
<a href="https://tailscale.com/blog/self-host-a-local-ai-stack/?utm_source=OpenWebUI&utm_medium=paid-ad-placement&utm_campaign=OpenWebUI-Docs" target="_blank">
|
||||
<img src="https://docs.openwebui.com/sponsors/logos/tailscale.png" alt="Tailscale" style="width: 8rem; height: 8rem; border-radius: .75rem;" />
|
||||
</a>
|
||||
</td>
|
||||
<td>
|
||||
<a href="https://tailscale.com/blog/self-host-a-local-ai-stack/?utm_source=OpenWebUI&utm_medium=paid-ad-placement&utm_campaign=OpenWebUI-Docs">Tailscale</a> • Connect self-hosted AI to any device with Tailscale
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
---
|
||||
|
|
@ -181,6 +191,8 @@ After installation, you can access Open WebUI at [http://localhost:3000](http://
|
|||
|
||||
We offer various installation alternatives, including non-Docker native installation methods, Docker Compose, Kustomize, and Helm. Visit our [Open WebUI Documentation](https://docs.openwebui.com/getting-started/) or join our [Discord community](https://discord.gg/5rJgQTnV4s) for comprehensive guidance.
|
||||
|
||||
Look at the [Local Development Guide](https://docs.openwebui.com/getting-started/advanced-topics/development) for instructions on setting up a local development environment.
|
||||
|
||||
### Troubleshooting
|
||||
|
||||
Encountering connection issues? Our [Open WebUI Documentation](https://docs.openwebui.com/troubleshooting/) has got you covered. For further assistance and to join our vibrant community, visit the [Open WebUI Discord](https://discord.gg/5rJgQTnV4s).
|
||||
|
|
|
|||
|
|
@ -347,6 +347,24 @@ MICROSOFT_CLIENT_TENANT_ID = PersistentConfig(
|
|||
os.environ.get("MICROSOFT_CLIENT_TENANT_ID", ""),
|
||||
)
|
||||
|
||||
MICROSOFT_CLIENT_LOGIN_BASE_URL = PersistentConfig(
|
||||
"MICROSOFT_CLIENT_LOGIN_BASE_URL",
|
||||
"oauth.microsoft.login_base_url",
|
||||
os.environ.get(
|
||||
"MICROSOFT_CLIENT_LOGIN_BASE_URL", "https://login.microsoftonline.com"
|
||||
),
|
||||
)
|
||||
|
||||
MICROSOFT_CLIENT_PICTURE_URL = PersistentConfig(
|
||||
"MICROSOFT_CLIENT_PICTURE_URL",
|
||||
"oauth.microsoft.picture_url",
|
||||
os.environ.get(
|
||||
"MICROSOFT_CLIENT_PICTURE_URL",
|
||||
"https://graph.microsoft.com/v1.0/me/photo/$value",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
MICROSOFT_OAUTH_SCOPE = PersistentConfig(
|
||||
"MICROSOFT_OAUTH_SCOPE",
|
||||
"oauth.microsoft.scope",
|
||||
|
|
@ -542,7 +560,7 @@ def load_oauth_providers():
|
|||
name="microsoft",
|
||||
client_id=MICROSOFT_CLIENT_ID.value,
|
||||
client_secret=MICROSOFT_CLIENT_SECRET.value,
|
||||
server_metadata_url=f"https://login.microsoftonline.com/{MICROSOFT_CLIENT_TENANT_ID.value}/v2.0/.well-known/openid-configuration?appid={MICROSOFT_CLIENT_ID.value}",
|
||||
server_metadata_url=f"{MICROSOFT_CLIENT_LOGIN_BASE_URL.value}/{MICROSOFT_CLIENT_TENANT_ID.value}/v2.0/.well-known/openid-configuration?appid={MICROSOFT_CLIENT_ID.value}",
|
||||
client_kwargs={
|
||||
"scope": MICROSOFT_OAUTH_SCOPE.value,
|
||||
},
|
||||
|
|
@ -551,7 +569,7 @@ def load_oauth_providers():
|
|||
|
||||
OAUTH_PROVIDERS["microsoft"] = {
|
||||
"redirect_uri": MICROSOFT_REDIRECT_URI.value,
|
||||
"picture_url": "https://graph.microsoft.com/v1.0/me/photo/$value",
|
||||
"picture_url": MICROSOFT_CLIENT_PICTURE_URL.value,
|
||||
"register": microsoft_oauth_register,
|
||||
}
|
||||
|
||||
|
|
@ -901,9 +919,7 @@ TOOL_SERVER_CONNECTIONS = PersistentConfig(
|
|||
####################################
|
||||
|
||||
|
||||
WEBUI_URL = PersistentConfig(
|
||||
"WEBUI_URL", "webui.url", os.environ.get("WEBUI_URL", "http://localhost:3000")
|
||||
)
|
||||
WEBUI_URL = PersistentConfig("WEBUI_URL", "webui.url", os.environ.get("WEBUI_URL", ""))
|
||||
|
||||
|
||||
ENABLE_SIGNUP = PersistentConfig(
|
||||
|
|
@ -1247,12 +1263,6 @@ if THREAD_POOL_SIZE is not None and isinstance(THREAD_POOL_SIZE, str):
|
|||
THREAD_POOL_SIZE = None
|
||||
|
||||
|
||||
def validate_cors_origins(origins):
|
||||
for origin in origins:
|
||||
if origin != "*":
|
||||
validate_cors_origin(origin)
|
||||
|
||||
|
||||
def validate_cors_origin(origin):
|
||||
parsed_url = urlparse(origin)
|
||||
|
||||
|
|
@ -1272,16 +1282,17 @@ def validate_cors_origin(origin):
|
|||
# To test CORS_ALLOW_ORIGIN locally, you can set something like
|
||||
# CORS_ALLOW_ORIGIN=http://localhost:5173;http://localhost:8080
|
||||
# in your .env file depending on your frontend port, 5173 in this case.
|
||||
CORS_ALLOW_ORIGIN = os.environ.get(
|
||||
"CORS_ALLOW_ORIGIN", "*;http://localhost:5173;http://localhost:8080"
|
||||
).split(";")
|
||||
CORS_ALLOW_ORIGIN = os.environ.get("CORS_ALLOW_ORIGIN", "*").split(";")
|
||||
|
||||
if "*" in CORS_ALLOW_ORIGIN:
|
||||
if CORS_ALLOW_ORIGIN == ["*"]:
|
||||
log.warning(
|
||||
"\n\nWARNING: CORS_ALLOW_ORIGIN IS SET TO '*' - NOT RECOMMENDED FOR PRODUCTION DEPLOYMENTS.\n"
|
||||
)
|
||||
|
||||
validate_cors_origins(CORS_ALLOW_ORIGIN)
|
||||
else:
|
||||
# You have to pick between a single wildcard or a list of origins.
|
||||
# Doing both will result in CORS errors in the browser.
|
||||
for origin in CORS_ALLOW_ORIGIN:
|
||||
validate_cors_origin(origin)
|
||||
|
||||
|
||||
class BannerModel(BaseModel):
|
||||
|
|
@ -1413,6 +1424,35 @@ Strictly return in JSON format:
|
|||
{{MESSAGES:END:6}}
|
||||
</chat_history>"""
|
||||
|
||||
|
||||
FOLLOW_UP_GENERATION_PROMPT_TEMPLATE = PersistentConfig(
|
||||
"FOLLOW_UP_GENERATION_PROMPT_TEMPLATE",
|
||||
"task.follow_up.prompt_template",
|
||||
os.environ.get("FOLLOW_UP_GENERATION_PROMPT_TEMPLATE", ""),
|
||||
)
|
||||
|
||||
DEFAULT_FOLLOW_UP_GENERATION_PROMPT_TEMPLATE = """### Task:
|
||||
Suggest 3-5 relevant follow-up questions or prompts that the user might naturally ask next in this conversation as a **user**, based on the chat history, to help continue or deepen the discussion.
|
||||
### Guidelines:
|
||||
- Write all follow-up questions from the user’s point of view, directed to the assistant.
|
||||
- Make questions concise, clear, and directly related to the discussed topic(s).
|
||||
- Only suggest follow-ups that make sense given the chat content and do not repeat what was already covered.
|
||||
- If the conversation is very short or not specific, suggest more general (but relevant) follow-ups the user might ask.
|
||||
- Use the conversation's primary language; default to English if multilingual.
|
||||
- Response must be a JSON array of strings, no extra text or formatting.
|
||||
### Output:
|
||||
JSON format: { "follow_ups": ["Question 1?", "Question 2?", "Question 3?"] }
|
||||
### Chat History:
|
||||
<chat_history>
|
||||
{{MESSAGES:END:6}}
|
||||
</chat_history>"""
|
||||
|
||||
ENABLE_FOLLOW_UP_GENERATION = PersistentConfig(
|
||||
"ENABLE_FOLLOW_UP_GENERATION",
|
||||
"task.follow_up.enable",
|
||||
os.environ.get("ENABLE_FOLLOW_UP_GENERATION", "True").lower() == "true",
|
||||
)
|
||||
|
||||
ENABLE_TAGS_GENERATION = PersistentConfig(
|
||||
"ENABLE_TAGS_GENERATION",
|
||||
"task.tags.enable",
|
||||
|
|
@ -1785,6 +1825,13 @@ PGVECTOR_INITIALIZE_MAX_VECTOR_LENGTH = int(
|
|||
os.environ.get("PGVECTOR_INITIALIZE_MAX_VECTOR_LENGTH", "1536")
|
||||
)
|
||||
|
||||
PGVECTOR_PGCRYPTO = os.getenv("PGVECTOR_PGCRYPTO", "false").lower() == "true"
|
||||
PGVECTOR_PGCRYPTO_KEY = os.getenv("PGVECTOR_PGCRYPTO_KEY", None)
|
||||
if PGVECTOR_PGCRYPTO and not PGVECTOR_PGCRYPTO_KEY:
|
||||
raise ValueError(
|
||||
"PGVECTOR_PGCRYPTO is enabled but PGVECTOR_PGCRYPTO_KEY is not set. Please provide a valid key."
|
||||
)
|
||||
|
||||
# Pinecone
|
||||
PINECONE_API_KEY = os.environ.get("PINECONE_API_KEY", None)
|
||||
PINECONE_ENVIRONMENT = os.environ.get("PINECONE_ENVIRONMENT", None)
|
||||
|
|
@ -1945,6 +1992,40 @@ DOCLING_DO_PICTURE_DESCRIPTION = PersistentConfig(
|
|||
os.getenv("DOCLING_DO_PICTURE_DESCRIPTION", "False").lower() == "true",
|
||||
)
|
||||
|
||||
DOCLING_PICTURE_DESCRIPTION_MODE = PersistentConfig(
|
||||
"DOCLING_PICTURE_DESCRIPTION_MODE",
|
||||
"rag.docling_picture_description_mode",
|
||||
os.getenv("DOCLING_PICTURE_DESCRIPTION_MODE", ""),
|
||||
)
|
||||
|
||||
|
||||
docling_picture_description_local = os.getenv("DOCLING_PICTURE_DESCRIPTION_LOCAL", "")
|
||||
try:
|
||||
docling_picture_description_local = json.loads(docling_picture_description_local)
|
||||
except json.JSONDecodeError:
|
||||
docling_picture_description_local = {}
|
||||
|
||||
|
||||
DOCLING_PICTURE_DESCRIPTION_LOCAL = PersistentConfig(
|
||||
"DOCLING_PICTURE_DESCRIPTION_LOCAL",
|
||||
"rag.docling_picture_description_local",
|
||||
docling_picture_description_local,
|
||||
)
|
||||
|
||||
docling_picture_description_api = os.getenv("DOCLING_PICTURE_DESCRIPTION_API", "")
|
||||
try:
|
||||
docling_picture_description_api = json.loads(docling_picture_description_api)
|
||||
except json.JSONDecodeError:
|
||||
docling_picture_description_api = {}
|
||||
|
||||
|
||||
DOCLING_PICTURE_DESCRIPTION_API = PersistentConfig(
|
||||
"DOCLING_PICTURE_DESCRIPTION_API",
|
||||
"rag.docling_picture_description_api",
|
||||
docling_picture_description_api,
|
||||
)
|
||||
|
||||
|
||||
DOCUMENT_INTELLIGENCE_ENDPOINT = PersistentConfig(
|
||||
"DOCUMENT_INTELLIGENCE_ENDPOINT",
|
||||
"rag.document_intelligence_endpoint",
|
||||
|
|
@ -2444,6 +2525,18 @@ PERPLEXITY_API_KEY = PersistentConfig(
|
|||
os.getenv("PERPLEXITY_API_KEY", ""),
|
||||
)
|
||||
|
||||
PERPLEXITY_MODEL = PersistentConfig(
|
||||
"PERPLEXITY_MODEL",
|
||||
"rag.web.search.perplexity_model",
|
||||
os.getenv("PERPLEXITY_MODEL", "sonar"),
|
||||
)
|
||||
|
||||
PERPLEXITY_SEARCH_CONTEXT_USAGE = PersistentConfig(
|
||||
"PERPLEXITY_SEARCH_CONTEXT_USAGE",
|
||||
"rag.web.search.perplexity_search_context_usage",
|
||||
os.getenv("PERPLEXITY_SEARCH_CONTEXT_USAGE", "medium"),
|
||||
)
|
||||
|
||||
SOUGOU_API_SID = PersistentConfig(
|
||||
"SOUGOU_API_SID",
|
||||
"rag.web.search.sougou_api_sid",
|
||||
|
|
|
|||
|
|
@ -111,6 +111,7 @@ class TASKS(str, Enum):
|
|||
|
||||
DEFAULT = lambda task="": f"{task if task else 'generation'}"
|
||||
TITLE_GENERATION = "title_generation"
|
||||
FOLLOW_UP_GENERATION = "follow_up_generation"
|
||||
TAGS_GENERATION = "tags_generation"
|
||||
EMOJI_GENERATION = "emoji_generation"
|
||||
QUERY_GENERATION = "query_generation"
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import os
|
|||
import pkgutil
|
||||
import sys
|
||||
import shutil
|
||||
from uuid import uuid4
|
||||
from pathlib import Path
|
||||
|
||||
import markdown
|
||||
|
|
@ -130,6 +131,7 @@ else:
|
|||
PACKAGE_DATA = {"version": "0.0.0"}
|
||||
|
||||
VERSION = PACKAGE_DATA["version"]
|
||||
INSTANCE_ID = os.environ.get("INSTANCE_ID", str(uuid4()))
|
||||
|
||||
|
||||
# Function to parse each section
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ from open_webui.socket.main import (
|
|||
)
|
||||
|
||||
|
||||
from open_webui.models.users import UserModel
|
||||
from open_webui.models.functions import Functions
|
||||
from open_webui.models.models import Models
|
||||
|
||||
|
|
@ -227,12 +228,7 @@ async def generate_function_chat_completion(
|
|||
"__task__": __task__,
|
||||
"__task_body__": __task_body__,
|
||||
"__files__": files,
|
||||
"__user__": {
|
||||
"id": user.id,
|
||||
"email": user.email,
|
||||
"name": user.name,
|
||||
"role": user.role,
|
||||
},
|
||||
"__user__": user.model_dump() if isinstance(user, UserModel) else {},
|
||||
"__metadata__": metadata,
|
||||
"__request__": request,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,6 +8,8 @@ import shutil
|
|||
import sys
|
||||
import time
|
||||
import random
|
||||
from uuid import uuid4
|
||||
|
||||
|
||||
from contextlib import asynccontextmanager
|
||||
from urllib.parse import urlencode, parse_qs, urlparse
|
||||
|
|
@ -19,6 +21,7 @@ from aiocache import cached
|
|||
import aiohttp
|
||||
import anyio.to_thread
|
||||
import requests
|
||||
from redis import Redis
|
||||
|
||||
|
||||
from fastapi import (
|
||||
|
|
@ -37,7 +40,7 @@ from fastapi import (
|
|||
from fastapi.openapi.docs import get_swagger_ui_html
|
||||
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import JSONResponse, RedirectResponse
|
||||
from fastapi.responses import FileResponse, JSONResponse, RedirectResponse
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
|
||||
from starlette_compress import CompressMiddleware
|
||||
|
|
@ -231,6 +234,9 @@ from open_webui.config import (
|
|||
DOCLING_OCR_ENGINE,
|
||||
DOCLING_OCR_LANG,
|
||||
DOCLING_DO_PICTURE_DESCRIPTION,
|
||||
DOCLING_PICTURE_DESCRIPTION_MODE,
|
||||
DOCLING_PICTURE_DESCRIPTION_LOCAL,
|
||||
DOCLING_PICTURE_DESCRIPTION_API,
|
||||
DOCUMENT_INTELLIGENCE_ENDPOINT,
|
||||
DOCUMENT_INTELLIGENCE_KEY,
|
||||
MISTRAL_OCR_API_KEY,
|
||||
|
|
@ -268,6 +274,8 @@ from open_webui.config import (
|
|||
BRAVE_SEARCH_API_KEY,
|
||||
EXA_API_KEY,
|
||||
PERPLEXITY_API_KEY,
|
||||
PERPLEXITY_MODEL,
|
||||
PERPLEXITY_SEARCH_CONTEXT_USAGE,
|
||||
SOUGOU_API_SID,
|
||||
SOUGOU_API_SK,
|
||||
KAGI_SEARCH_API_KEY,
|
||||
|
|
@ -359,10 +367,12 @@ from open_webui.config import (
|
|||
TASK_MODEL_EXTERNAL,
|
||||
ENABLE_TAGS_GENERATION,
|
||||
ENABLE_TITLE_GENERATION,
|
||||
ENABLE_FOLLOW_UP_GENERATION,
|
||||
ENABLE_SEARCH_QUERY_GENERATION,
|
||||
ENABLE_RETRIEVAL_QUERY_GENERATION,
|
||||
ENABLE_AUTOCOMPLETE_GENERATION,
|
||||
TITLE_GENERATION_PROMPT_TEMPLATE,
|
||||
FOLLOW_UP_GENERATION_PROMPT_TEMPLATE,
|
||||
TAGS_GENERATION_PROMPT_TEMPLATE,
|
||||
IMAGE_PROMPT_GENERATION_PROMPT_TEMPLATE,
|
||||
TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE,
|
||||
|
|
@ -384,6 +394,7 @@ from open_webui.env import (
|
|||
SAFE_MODE,
|
||||
SRC_LOG_LEVELS,
|
||||
VERSION,
|
||||
INSTANCE_ID,
|
||||
WEBUI_BUILD_HASH,
|
||||
WEBUI_SECRET_KEY,
|
||||
WEBUI_SESSION_COOKIE_SAME_SITE,
|
||||
|
|
@ -411,6 +422,7 @@ from open_webui.utils.chat import (
|
|||
chat_completed as chat_completed_handler,
|
||||
chat_action as chat_action_handler,
|
||||
)
|
||||
from open_webui.utils.embeddings import generate_embeddings
|
||||
from open_webui.utils.middleware import process_chat_payload, process_chat_response
|
||||
from open_webui.utils.access_control import has_access
|
||||
|
||||
|
|
@ -424,8 +436,10 @@ from open_webui.utils.auth import (
|
|||
from open_webui.utils.plugin import install_tool_and_function_dependencies
|
||||
from open_webui.utils.oauth import OAuthManager
|
||||
from open_webui.utils.security_headers import SecurityHeadersMiddleware
|
||||
from open_webui.utils.redis import get_redis_connection
|
||||
|
||||
from open_webui.tasks import (
|
||||
redis_task_command_listener,
|
||||
list_task_ids_by_chat_id,
|
||||
stop_task,
|
||||
list_tasks,
|
||||
|
|
@ -477,7 +491,9 @@ https://github.com/open-webui/open-webui
|
|||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
app.state.instance_id = INSTANCE_ID
|
||||
start_logger()
|
||||
|
||||
if RESET_CONFIG_ON_START:
|
||||
reset_config()
|
||||
|
||||
|
|
@ -489,6 +505,19 @@ async def lifespan(app: FastAPI):
|
|||
log.info("Installing external dependencies of functions and tools...")
|
||||
install_tool_and_function_dependencies()
|
||||
|
||||
app.state.redis = get_redis_connection(
|
||||
redis_url=REDIS_URL,
|
||||
redis_sentinels=get_sentinels_from_env(
|
||||
REDIS_SENTINEL_HOSTS, REDIS_SENTINEL_PORT
|
||||
),
|
||||
async_mode=True,
|
||||
)
|
||||
|
||||
if app.state.redis is not None:
|
||||
app.state.redis_task_command_listener = asyncio.create_task(
|
||||
redis_task_command_listener(app)
|
||||
)
|
||||
|
||||
if THREAD_POOL_SIZE and THREAD_POOL_SIZE > 0:
|
||||
limiter = anyio.to_thread.current_default_thread_limiter()
|
||||
limiter.total_tokens = THREAD_POOL_SIZE
|
||||
|
|
@ -497,6 +526,9 @@ async def lifespan(app: FastAPI):
|
|||
|
||||
yield
|
||||
|
||||
if hasattr(app.state, "redis_task_command_listener"):
|
||||
app.state.redis_task_command_listener.cancel()
|
||||
|
||||
|
||||
app = FastAPI(
|
||||
title="Open WebUI",
|
||||
|
|
@ -508,10 +540,12 @@ app = FastAPI(
|
|||
|
||||
oauth_manager = OAuthManager(app)
|
||||
|
||||
app.state.instance_id = None
|
||||
app.state.config = AppConfig(
|
||||
redis_url=REDIS_URL,
|
||||
redis_sentinels=get_sentinels_from_env(REDIS_SENTINEL_HOSTS, REDIS_SENTINEL_PORT),
|
||||
)
|
||||
app.state.redis = None
|
||||
|
||||
app.state.WEBUI_NAME = WEBUI_NAME
|
||||
app.state.LICENSE_METADATA = None
|
||||
|
|
@ -696,6 +730,9 @@ app.state.config.DOCLING_SERVER_URL = DOCLING_SERVER_URL
|
|||
app.state.config.DOCLING_OCR_ENGINE = DOCLING_OCR_ENGINE
|
||||
app.state.config.DOCLING_OCR_LANG = DOCLING_OCR_LANG
|
||||
app.state.config.DOCLING_DO_PICTURE_DESCRIPTION = DOCLING_DO_PICTURE_DESCRIPTION
|
||||
app.state.config.DOCLING_PICTURE_DESCRIPTION_MODE = DOCLING_PICTURE_DESCRIPTION_MODE
|
||||
app.state.config.DOCLING_PICTURE_DESCRIPTION_LOCAL = DOCLING_PICTURE_DESCRIPTION_LOCAL
|
||||
app.state.config.DOCLING_PICTURE_DESCRIPTION_API = DOCLING_PICTURE_DESCRIPTION_API
|
||||
app.state.config.DOCUMENT_INTELLIGENCE_ENDPOINT = DOCUMENT_INTELLIGENCE_ENDPOINT
|
||||
app.state.config.DOCUMENT_INTELLIGENCE_KEY = DOCUMENT_INTELLIGENCE_KEY
|
||||
app.state.config.MISTRAL_OCR_API_KEY = MISTRAL_OCR_API_KEY
|
||||
|
|
@ -771,6 +808,8 @@ app.state.config.BING_SEARCH_V7_ENDPOINT = BING_SEARCH_V7_ENDPOINT
|
|||
app.state.config.BING_SEARCH_V7_SUBSCRIPTION_KEY = BING_SEARCH_V7_SUBSCRIPTION_KEY
|
||||
app.state.config.EXA_API_KEY = EXA_API_KEY
|
||||
app.state.config.PERPLEXITY_API_KEY = PERPLEXITY_API_KEY
|
||||
app.state.config.PERPLEXITY_MODEL = PERPLEXITY_MODEL
|
||||
app.state.config.PERPLEXITY_SEARCH_CONTEXT_USAGE = PERPLEXITY_SEARCH_CONTEXT_USAGE
|
||||
app.state.config.SOUGOU_API_SID = SOUGOU_API_SID
|
||||
app.state.config.SOUGOU_API_SK = SOUGOU_API_SK
|
||||
app.state.config.EXTERNAL_WEB_SEARCH_URL = EXTERNAL_WEB_SEARCH_URL
|
||||
|
|
@ -959,6 +998,7 @@ app.state.config.ENABLE_RETRIEVAL_QUERY_GENERATION = ENABLE_RETRIEVAL_QUERY_GENE
|
|||
app.state.config.ENABLE_AUTOCOMPLETE_GENERATION = ENABLE_AUTOCOMPLETE_GENERATION
|
||||
app.state.config.ENABLE_TAGS_GENERATION = ENABLE_TAGS_GENERATION
|
||||
app.state.config.ENABLE_TITLE_GENERATION = ENABLE_TITLE_GENERATION
|
||||
app.state.config.ENABLE_FOLLOW_UP_GENERATION = ENABLE_FOLLOW_UP_GENERATION
|
||||
|
||||
|
||||
app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE = TITLE_GENERATION_PROMPT_TEMPLATE
|
||||
|
|
@ -966,6 +1006,9 @@ app.state.config.TAGS_GENERATION_PROMPT_TEMPLATE = TAGS_GENERATION_PROMPT_TEMPLA
|
|||
app.state.config.IMAGE_PROMPT_GENERATION_PROMPT_TEMPLATE = (
|
||||
IMAGE_PROMPT_GENERATION_PROMPT_TEMPLATE
|
||||
)
|
||||
app.state.config.FOLLOW_UP_GENERATION_PROMPT_TEMPLATE = (
|
||||
FOLLOW_UP_GENERATION_PROMPT_TEMPLATE
|
||||
)
|
||||
|
||||
app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE = (
|
||||
TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE
|
||||
|
|
@ -1197,6 +1240,37 @@ async def get_base_models(request: Request, user=Depends(get_admin_user)):
|
|||
return {"data": models}
|
||||
|
||||
|
||||
##################################
|
||||
# Embeddings
|
||||
##################################
|
||||
|
||||
|
||||
@app.post("/api/embeddings")
|
||||
async def embeddings(
|
||||
request: Request, form_data: dict, user=Depends(get_verified_user)
|
||||
):
|
||||
"""
|
||||
OpenAI-compatible embeddings endpoint.
|
||||
|
||||
This handler:
|
||||
- Performs user/model checks and dispatches to the correct backend.
|
||||
- Supports OpenAI, Ollama, arena models, pipelines, and any compatible provider.
|
||||
|
||||
Args:
|
||||
request (Request): Request context.
|
||||
form_data (dict): OpenAI-like payload (e.g., {"model": "...", "input": [...]})
|
||||
user (UserModel): Authenticated user.
|
||||
|
||||
Returns:
|
||||
dict: OpenAI-compatible embeddings response.
|
||||
"""
|
||||
# Make sure models are loaded in app state
|
||||
if not request.app.state.MODELS:
|
||||
await get_all_models(request, user=user)
|
||||
# Use generic dispatcher in utils.embeddings
|
||||
return await generate_embeddings(request, form_data, user)
|
||||
|
||||
|
||||
@app.post("/api/chat/completions")
|
||||
async def chat_completion(
|
||||
request: Request,
|
||||
|
|
@ -1338,26 +1412,30 @@ async def chat_action(
|
|||
|
||||
|
||||
@app.post("/api/tasks/stop/{task_id}")
|
||||
async def stop_task_endpoint(task_id: str, user=Depends(get_verified_user)):
|
||||
async def stop_task_endpoint(
|
||||
request: Request, task_id: str, user=Depends(get_verified_user)
|
||||
):
|
||||
try:
|
||||
result = await stop_task(task_id)
|
||||
result = await stop_task(request, task_id)
|
||||
return result
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(e))
|
||||
|
||||
|
||||
@app.get("/api/tasks")
|
||||
async def list_tasks_endpoint(user=Depends(get_verified_user)):
|
||||
return {"tasks": list_tasks()}
|
||||
async def list_tasks_endpoint(request: Request, user=Depends(get_verified_user)):
|
||||
return {"tasks": await list_tasks(request)}
|
||||
|
||||
|
||||
@app.get("/api/tasks/chat/{chat_id}")
|
||||
async def list_tasks_by_chat_id_endpoint(chat_id: str, user=Depends(get_verified_user)):
|
||||
async def list_tasks_by_chat_id_endpoint(
|
||||
request: Request, chat_id: str, user=Depends(get_verified_user)
|
||||
):
|
||||
chat = Chats.get_chat_by_id(chat_id)
|
||||
if chat is None or chat.user_id != user.id:
|
||||
return {"task_ids": []}
|
||||
|
||||
task_ids = list_task_ids_by_chat_id(chat_id)
|
||||
task_ids = await list_task_ids_by_chat_id(request, chat_id)
|
||||
|
||||
print(f"Task IDs for chat {chat_id}: {task_ids}")
|
||||
return {"task_ids": task_ids}
|
||||
|
|
@ -1628,7 +1706,20 @@ async def healthcheck_with_db():
|
|||
|
||||
|
||||
app.mount("/static", StaticFiles(directory=STATIC_DIR), name="static")
|
||||
app.mount("/cache", StaticFiles(directory=CACHE_DIR), name="cache")
|
||||
|
||||
|
||||
@app.get("/cache/{path:path}")
|
||||
async def serve_cache_file(
|
||||
path: str,
|
||||
user=Depends(get_verified_user),
|
||||
):
|
||||
file_path = os.path.abspath(os.path.join(CACHE_DIR, path))
|
||||
# prevent path traversal
|
||||
if not file_path.startswith(os.path.abspath(CACHE_DIR)):
|
||||
raise HTTPException(status_code=404, detail="File not found")
|
||||
if not os.path.isfile(file_path):
|
||||
raise HTTPException(status_code=404, detail="File not found")
|
||||
return FileResponse(file_path)
|
||||
|
||||
|
||||
def swagger_ui_html(*args, **kwargs):
|
||||
|
|
|
|||
|
|
@ -95,6 +95,7 @@ class UserRoleUpdateForm(BaseModel):
|
|||
|
||||
|
||||
class UserUpdateForm(BaseModel):
|
||||
role: str
|
||||
name: str
|
||||
email: str
|
||||
profile_image_url: str
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import requests
|
|||
import logging
|
||||
import ftfy
|
||||
import sys
|
||||
import json
|
||||
|
||||
from langchain_community.document_loaders import (
|
||||
AzureAIDocumentIntelligenceLoader,
|
||||
|
|
@ -76,7 +77,6 @@ known_source_ext = [
|
|||
"swift",
|
||||
"vue",
|
||||
"svelte",
|
||||
"msg",
|
||||
"ex",
|
||||
"exs",
|
||||
"erl",
|
||||
|
|
@ -147,17 +147,32 @@ class DoclingLoader:
|
|||
)
|
||||
}
|
||||
|
||||
params = {
|
||||
"image_export_mode": "placeholder",
|
||||
"table_mode": "accurate",
|
||||
}
|
||||
params = {"image_export_mode": "placeholder", "table_mode": "accurate"}
|
||||
|
||||
if self.params:
|
||||
if self.params.get("do_picture_classification"):
|
||||
params["do_picture_classification"] = self.params.get(
|
||||
"do_picture_classification"
|
||||
if self.params.get("do_picture_description"):
|
||||
params["do_picture_description"] = self.params.get(
|
||||
"do_picture_description"
|
||||
)
|
||||
|
||||
picture_description_mode = self.params.get(
|
||||
"picture_description_mode", ""
|
||||
).lower()
|
||||
|
||||
if picture_description_mode == "local" and self.params.get(
|
||||
"picture_description_local", {}
|
||||
):
|
||||
params["picture_description_local"] = self.params.get(
|
||||
"picture_description_local", {}
|
||||
)
|
||||
|
||||
elif picture_description_mode == "api" and self.params.get(
|
||||
"picture_description_api", {}
|
||||
):
|
||||
params["picture_description_api"] = self.params.get(
|
||||
"picture_description_api", {}
|
||||
)
|
||||
|
||||
if self.params.get("ocr_engine") and self.params.get("ocr_lang"):
|
||||
params["ocr_engine"] = self.params.get("ocr_engine")
|
||||
params["ocr_lang"] = [
|
||||
|
|
@ -285,17 +300,20 @@ class Loader:
|
|||
if self._is_text_file(file_ext, file_content_type):
|
||||
loader = TextLoader(file_path, autodetect_encoding=True)
|
||||
else:
|
||||
# Build params for DoclingLoader
|
||||
params = self.kwargs.get("DOCLING_PARAMS", {})
|
||||
if not isinstance(params, dict):
|
||||
try:
|
||||
params = json.loads(params)
|
||||
except json.JSONDecodeError:
|
||||
log.error("Invalid DOCLING_PARAMS format, expected JSON object")
|
||||
params = {}
|
||||
|
||||
loader = DoclingLoader(
|
||||
url=self.kwargs.get("DOCLING_SERVER_URL"),
|
||||
file_path=file_path,
|
||||
mime_type=file_content_type,
|
||||
params={
|
||||
"ocr_engine": self.kwargs.get("DOCLING_OCR_ENGINE"),
|
||||
"ocr_lang": self.kwargs.get("DOCLING_OCR_LANG"),
|
||||
"do_picture_classification": self.kwargs.get(
|
||||
"DOCLING_DO_PICTURE_DESCRIPTION"
|
||||
),
|
||||
},
|
||||
params=params,
|
||||
)
|
||||
elif (
|
||||
self.engine == "document_intelligence"
|
||||
|
|
|
|||
|
|
@ -20,6 +20,14 @@ class MistralLoader:
|
|||
"""
|
||||
Enhanced Mistral OCR loader with both sync and async support.
|
||||
Loads documents by processing them through the Mistral OCR API.
|
||||
|
||||
Performance Optimizations:
|
||||
- Differentiated timeouts for different operations
|
||||
- Intelligent retry logic with exponential backoff
|
||||
- Memory-efficient file streaming for large files
|
||||
- Connection pooling and keepalive optimization
|
||||
- Semaphore-based concurrency control for batch processing
|
||||
- Enhanced error handling with retryable error classification
|
||||
"""
|
||||
|
||||
BASE_API_URL = "https://api.mistral.ai/v1"
|
||||
|
|
@ -53,17 +61,40 @@ class MistralLoader:
|
|||
self.max_retries = max_retries
|
||||
self.debug = enable_debug_logging
|
||||
|
||||
# Pre-compute file info for performance
|
||||
# PERFORMANCE OPTIMIZATION: Differentiated timeouts for different operations
|
||||
# This prevents long-running OCR operations from affecting quick operations
|
||||
# and improves user experience by failing fast on operations that should be quick
|
||||
self.upload_timeout = min(
|
||||
timeout, 120
|
||||
) # Cap upload at 2 minutes - prevents hanging on large files
|
||||
self.url_timeout = (
|
||||
30 # URL requests should be fast - fail quickly if API is slow
|
||||
)
|
||||
self.ocr_timeout = (
|
||||
timeout # OCR can take the full timeout - this is the heavy operation
|
||||
)
|
||||
self.cleanup_timeout = (
|
||||
30 # Cleanup should be quick - don't hang on file deletion
|
||||
)
|
||||
|
||||
# PERFORMANCE OPTIMIZATION: Pre-compute file info to avoid repeated filesystem calls
|
||||
# This avoids multiple os.path.basename() and os.path.getsize() calls during processing
|
||||
self.file_name = os.path.basename(file_path)
|
||||
self.file_size = os.path.getsize(file_path)
|
||||
|
||||
# ENHANCEMENT: Added User-Agent for better API tracking and debugging
|
||||
self.headers = {
|
||||
"Authorization": f"Bearer {self.api_key}",
|
||||
"User-Agent": "OpenWebUI-MistralLoader/2.0",
|
||||
"User-Agent": "OpenWebUI-MistralLoader/2.0", # Helps API provider track usage
|
||||
}
|
||||
|
||||
def _debug_log(self, message: str, *args) -> None:
|
||||
"""Conditional debug logging for performance."""
|
||||
"""
|
||||
PERFORMANCE OPTIMIZATION: Conditional debug logging for performance.
|
||||
|
||||
Only processes debug messages when debug mode is enabled, avoiding
|
||||
string formatting overhead in production environments.
|
||||
"""
|
||||
if self.debug:
|
||||
log.debug(message, *args)
|
||||
|
||||
|
|
@ -115,53 +146,118 @@ class MistralLoader:
|
|||
log.error(f"Unexpected error processing response: {e}")
|
||||
raise
|
||||
|
||||
def _is_retryable_error(self, error: Exception) -> bool:
|
||||
"""
|
||||
ENHANCEMENT: Intelligent error classification for retry logic.
|
||||
|
||||
Determines if an error is retryable based on its type and status code.
|
||||
This prevents wasting time retrying errors that will never succeed
|
||||
(like authentication errors) while ensuring transient errors are retried.
|
||||
|
||||
Retryable errors:
|
||||
- Network connection errors (temporary network issues)
|
||||
- Timeouts (server might be temporarily overloaded)
|
||||
- Server errors (5xx status codes - server-side issues)
|
||||
- Rate limiting (429 status - temporary throttling)
|
||||
|
||||
Non-retryable errors:
|
||||
- Authentication errors (401, 403 - won't fix with retry)
|
||||
- Bad request errors (400 - malformed request)
|
||||
- Not found errors (404 - resource doesn't exist)
|
||||
"""
|
||||
if isinstance(error, requests.exceptions.ConnectionError):
|
||||
return True # Network issues are usually temporary
|
||||
if isinstance(error, requests.exceptions.Timeout):
|
||||
return True # Timeouts might resolve on retry
|
||||
if isinstance(error, requests.exceptions.HTTPError):
|
||||
# Only retry on server errors (5xx) or rate limits (429)
|
||||
if hasattr(error, "response") and error.response is not None:
|
||||
status_code = error.response.status_code
|
||||
return status_code >= 500 or status_code == 429
|
||||
return False
|
||||
if isinstance(
|
||||
error, (aiohttp.ClientConnectionError, aiohttp.ServerTimeoutError)
|
||||
):
|
||||
return True # Async network/timeout errors are retryable
|
||||
if isinstance(error, aiohttp.ClientResponseError):
|
||||
return error.status >= 500 or error.status == 429
|
||||
return False # All other errors are non-retryable
|
||||
|
||||
def _retry_request_sync(self, request_func, *args, **kwargs):
|
||||
"""Synchronous retry logic with exponential backoff."""
|
||||
"""
|
||||
ENHANCEMENT: Synchronous retry logic with intelligent error classification.
|
||||
|
||||
Uses exponential backoff with jitter to avoid thundering herd problems.
|
||||
The wait time increases exponentially but is capped at 30 seconds to
|
||||
prevent excessive delays. Only retries errors that are likely to succeed
|
||||
on subsequent attempts.
|
||||
"""
|
||||
for attempt in range(self.max_retries):
|
||||
try:
|
||||
return request_func(*args, **kwargs)
|
||||
except (requests.exceptions.RequestException, Exception) as e:
|
||||
if attempt == self.max_retries - 1:
|
||||
except Exception as e:
|
||||
if attempt == self.max_retries - 1 or not self._is_retryable_error(e):
|
||||
raise
|
||||
|
||||
wait_time = (2**attempt) + 0.5
|
||||
# PERFORMANCE OPTIMIZATION: Exponential backoff with cap
|
||||
# Prevents overwhelming the server while ensuring reasonable retry delays
|
||||
wait_time = min((2**attempt) + 0.5, 30) # Cap at 30 seconds
|
||||
log.warning(
|
||||
f"Request failed (attempt {attempt + 1}/{self.max_retries}): {e}. Retrying in {wait_time}s..."
|
||||
f"Retryable error (attempt {attempt + 1}/{self.max_retries}): {e}. "
|
||||
f"Retrying in {wait_time}s..."
|
||||
)
|
||||
time.sleep(wait_time)
|
||||
|
||||
async def _retry_request_async(self, request_func, *args, **kwargs):
|
||||
"""Async retry logic with exponential backoff."""
|
||||
"""
|
||||
ENHANCEMENT: Async retry logic with intelligent error classification.
|
||||
|
||||
Async version of retry logic that doesn't block the event loop during
|
||||
wait periods. Uses the same exponential backoff strategy as sync version.
|
||||
"""
|
||||
for attempt in range(self.max_retries):
|
||||
try:
|
||||
return await request_func(*args, **kwargs)
|
||||
except (aiohttp.ClientError, asyncio.TimeoutError) as e:
|
||||
if attempt == self.max_retries - 1:
|
||||
except Exception as e:
|
||||
if attempt == self.max_retries - 1 or not self._is_retryable_error(e):
|
||||
raise
|
||||
|
||||
wait_time = (2**attempt) + 0.5
|
||||
# PERFORMANCE OPTIMIZATION: Non-blocking exponential backoff
|
||||
wait_time = min((2**attempt) + 0.5, 30) # Cap at 30 seconds
|
||||
log.warning(
|
||||
f"Request failed (attempt {attempt + 1}/{self.max_retries}): {e}. Retrying in {wait_time}s..."
|
||||
f"Retryable error (attempt {attempt + 1}/{self.max_retries}): {e}. "
|
||||
f"Retrying in {wait_time}s..."
|
||||
)
|
||||
await asyncio.sleep(wait_time)
|
||||
await asyncio.sleep(wait_time) # Non-blocking wait
|
||||
|
||||
def _upload_file(self) -> str:
|
||||
"""Uploads the file to Mistral for OCR processing (sync version)."""
|
||||
"""
|
||||
PERFORMANCE OPTIMIZATION: Enhanced file upload with streaming consideration.
|
||||
|
||||
Uploads the file to Mistral for OCR processing (sync version).
|
||||
Uses context manager for file handling to ensure proper resource cleanup.
|
||||
Although streaming is not enabled for this endpoint, the file is opened
|
||||
in a context manager to minimize memory usage duration.
|
||||
"""
|
||||
log.info("Uploading file to Mistral API")
|
||||
url = f"{self.BASE_API_URL}/files"
|
||||
file_name = os.path.basename(self.file_path)
|
||||
|
||||
def upload_request():
|
||||
# MEMORY OPTIMIZATION: Use context manager to minimize file handle lifetime
|
||||
# This ensures the file is closed immediately after reading, reducing memory usage
|
||||
with open(self.file_path, "rb") as f:
|
||||
files = {"file": (file_name, f, "application/pdf")}
|
||||
files = {"file": (self.file_name, f, "application/pdf")}
|
||||
data = {"purpose": "ocr"}
|
||||
|
||||
# NOTE: stream=False is required for this endpoint
|
||||
# The Mistral API doesn't support chunked uploads for this endpoint
|
||||
response = requests.post(
|
||||
url,
|
||||
headers=self.headers,
|
||||
files=files,
|
||||
data=data,
|
||||
timeout=self.timeout,
|
||||
timeout=self.upload_timeout, # Use specialized upload timeout
|
||||
stream=False, # Keep as False for this endpoint
|
||||
)
|
||||
|
||||
return self._handle_response(response)
|
||||
|
|
@ -209,7 +305,7 @@ class MistralLoader:
|
|||
url,
|
||||
data=writer,
|
||||
headers=self.headers,
|
||||
timeout=aiohttp.ClientTimeout(total=self.timeout),
|
||||
timeout=aiohttp.ClientTimeout(total=self.upload_timeout),
|
||||
) as response:
|
||||
return await self._handle_response_async(response)
|
||||
|
||||
|
|
@ -231,7 +327,7 @@ class MistralLoader:
|
|||
|
||||
def url_request():
|
||||
response = requests.get(
|
||||
url, headers=signed_url_headers, params=params, timeout=self.timeout
|
||||
url, headers=signed_url_headers, params=params, timeout=self.url_timeout
|
||||
)
|
||||
return self._handle_response(response)
|
||||
|
||||
|
|
@ -261,7 +357,7 @@ class MistralLoader:
|
|||
url,
|
||||
headers=headers,
|
||||
params=params,
|
||||
timeout=aiohttp.ClientTimeout(total=self.timeout),
|
||||
timeout=aiohttp.ClientTimeout(total=self.url_timeout),
|
||||
) as response:
|
||||
return await self._handle_response_async(response)
|
||||
|
||||
|
|
@ -294,7 +390,7 @@ class MistralLoader:
|
|||
|
||||
def ocr_request():
|
||||
response = requests.post(
|
||||
url, headers=ocr_headers, json=payload, timeout=self.timeout
|
||||
url, headers=ocr_headers, json=payload, timeout=self.ocr_timeout
|
||||
)
|
||||
return self._handle_response(response)
|
||||
|
||||
|
|
@ -336,7 +432,7 @@ class MistralLoader:
|
|||
url,
|
||||
json=payload,
|
||||
headers=headers,
|
||||
timeout=aiohttp.ClientTimeout(total=self.timeout),
|
||||
timeout=aiohttp.ClientTimeout(total=self.ocr_timeout),
|
||||
) as response:
|
||||
ocr_response = await self._handle_response_async(response)
|
||||
|
||||
|
|
@ -353,7 +449,9 @@ class MistralLoader:
|
|||
url = f"{self.BASE_API_URL}/files/{file_id}"
|
||||
|
||||
try:
|
||||
response = requests.delete(url, headers=self.headers, timeout=30)
|
||||
response = requests.delete(
|
||||
url, headers=self.headers, timeout=self.cleanup_timeout
|
||||
)
|
||||
delete_response = self._handle_response(response)
|
||||
log.info(f"File deleted successfully: {delete_response}")
|
||||
except Exception as e:
|
||||
|
|
@ -372,7 +470,7 @@ class MistralLoader:
|
|||
url=f"{self.BASE_API_URL}/files/{file_id}",
|
||||
headers=self.headers,
|
||||
timeout=aiohttp.ClientTimeout(
|
||||
total=30
|
||||
total=self.cleanup_timeout
|
||||
), # Shorter timeout for cleanup
|
||||
) as response:
|
||||
return await self._handle_response_async(response)
|
||||
|
|
@ -388,29 +486,39 @@ class MistralLoader:
|
|||
async def _get_session(self):
|
||||
"""Context manager for HTTP session with optimized settings."""
|
||||
connector = aiohttp.TCPConnector(
|
||||
limit=10, # Total connection limit
|
||||
limit_per_host=5, # Per-host connection limit
|
||||
ttl_dns_cache=300, # DNS cache TTL
|
||||
limit=20, # Increased total connection limit for better throughput
|
||||
limit_per_host=10, # Increased per-host limit for API endpoints
|
||||
ttl_dns_cache=600, # Longer DNS cache TTL (10 minutes)
|
||||
use_dns_cache=True,
|
||||
keepalive_timeout=30,
|
||||
keepalive_timeout=60, # Increased keepalive for connection reuse
|
||||
enable_cleanup_closed=True,
|
||||
force_close=False, # Allow connection reuse
|
||||
resolver=aiohttp.AsyncResolver(), # Use async DNS resolver
|
||||
)
|
||||
|
||||
timeout = aiohttp.ClientTimeout(
|
||||
total=self.timeout,
|
||||
connect=30, # Connection timeout
|
||||
sock_read=60, # Socket read timeout
|
||||
)
|
||||
|
||||
async with aiohttp.ClientSession(
|
||||
connector=connector,
|
||||
timeout=aiohttp.ClientTimeout(total=self.timeout),
|
||||
timeout=timeout,
|
||||
headers={"User-Agent": "OpenWebUI-MistralLoader/2.0"},
|
||||
raise_for_status=False, # We handle status codes manually
|
||||
) as session:
|
||||
yield session
|
||||
|
||||
def _process_results(self, ocr_response: Dict[str, Any]) -> List[Document]:
|
||||
"""Process OCR results into Document objects with enhanced metadata."""
|
||||
"""Process OCR results into Document objects with enhanced metadata and memory efficiency."""
|
||||
pages_data = ocr_response.get("pages")
|
||||
if not pages_data:
|
||||
log.warning("No pages found in OCR response.")
|
||||
return [
|
||||
Document(
|
||||
page_content="No text content found", metadata={"error": "no_pages"}
|
||||
page_content="No text content found",
|
||||
metadata={"error": "no_pages", "file_name": self.file_name},
|
||||
)
|
||||
]
|
||||
|
||||
|
|
@ -418,41 +526,44 @@ class MistralLoader:
|
|||
total_pages = len(pages_data)
|
||||
skipped_pages = 0
|
||||
|
||||
# Process pages in a memory-efficient way
|
||||
for page_data in pages_data:
|
||||
page_content = page_data.get("markdown")
|
||||
page_index = page_data.get("index") # API uses 0-based index
|
||||
|
||||
if page_content is not None and page_index is not None:
|
||||
# Clean up content efficiently
|
||||
cleaned_content = (
|
||||
page_content.strip()
|
||||
if isinstance(page_content, str)
|
||||
else str(page_content)
|
||||
)
|
||||
|
||||
if cleaned_content: # Only add non-empty pages
|
||||
documents.append(
|
||||
Document(
|
||||
page_content=cleaned_content,
|
||||
metadata={
|
||||
"page": page_index, # 0-based index from API
|
||||
"page_label": page_index
|
||||
+ 1, # 1-based label for convenience
|
||||
"total_pages": total_pages,
|
||||
"file_name": self.file_name,
|
||||
"file_size": self.file_size,
|
||||
"processing_engine": "mistral-ocr",
|
||||
},
|
||||
)
|
||||
)
|
||||
else:
|
||||
skipped_pages += 1
|
||||
self._debug_log(f"Skipping empty page {page_index}")
|
||||
else:
|
||||
if page_content is None or page_index is None:
|
||||
skipped_pages += 1
|
||||
self._debug_log(
|
||||
f"Skipping page due to missing 'markdown' or 'index'. Data: {page_data}"
|
||||
f"Skipping page due to missing 'markdown' or 'index'. Data keys: {list(page_data.keys())}"
|
||||
)
|
||||
continue
|
||||
|
||||
# Clean up content efficiently with early exit for empty content
|
||||
if isinstance(page_content, str):
|
||||
cleaned_content = page_content.strip()
|
||||
else:
|
||||
cleaned_content = str(page_content).strip()
|
||||
|
||||
if not cleaned_content:
|
||||
skipped_pages += 1
|
||||
self._debug_log(f"Skipping empty page {page_index}")
|
||||
continue
|
||||
|
||||
# Create document with optimized metadata
|
||||
documents.append(
|
||||
Document(
|
||||
page_content=cleaned_content,
|
||||
metadata={
|
||||
"page": page_index, # 0-based index from API
|
||||
"page_label": page_index + 1, # 1-based label for convenience
|
||||
"total_pages": total_pages,
|
||||
"file_name": self.file_name,
|
||||
"file_size": self.file_size,
|
||||
"processing_engine": "mistral-ocr",
|
||||
"content_length": len(cleaned_content),
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
if skipped_pages > 0:
|
||||
log.info(
|
||||
|
|
@ -467,7 +578,11 @@ class MistralLoader:
|
|||
return [
|
||||
Document(
|
||||
page_content="No valid text content found in document",
|
||||
metadata={"error": "no_valid_pages", "total_pages": total_pages},
|
||||
metadata={
|
||||
"error": "no_valid_pages",
|
||||
"total_pages": total_pages,
|
||||
"file_name": self.file_name,
|
||||
},
|
||||
)
|
||||
]
|
||||
|
||||
|
|
@ -585,12 +700,14 @@ class MistralLoader:
|
|||
@staticmethod
|
||||
async def load_multiple_async(
|
||||
loaders: List["MistralLoader"],
|
||||
max_concurrent: int = 5, # Limit concurrent requests
|
||||
) -> List[List[Document]]:
|
||||
"""
|
||||
Process multiple files concurrently for maximum performance.
|
||||
Process multiple files concurrently with controlled concurrency.
|
||||
|
||||
Args:
|
||||
loaders: List of MistralLoader instances
|
||||
max_concurrent: Maximum number of concurrent requests
|
||||
|
||||
Returns:
|
||||
List of document lists, one for each loader
|
||||
|
|
@ -598,11 +715,20 @@ class MistralLoader:
|
|||
if not loaders:
|
||||
return []
|
||||
|
||||
log.info(f"Starting concurrent processing of {len(loaders)} files")
|
||||
log.info(
|
||||
f"Starting concurrent processing of {len(loaders)} files with max {max_concurrent} concurrent"
|
||||
)
|
||||
start_time = time.time()
|
||||
|
||||
# Process all files concurrently
|
||||
tasks = [loader.load_async() for loader in loaders]
|
||||
# Use semaphore to control concurrency
|
||||
semaphore = asyncio.Semaphore(max_concurrent)
|
||||
|
||||
async def process_with_semaphore(loader: "MistralLoader") -> List[Document]:
|
||||
async with semaphore:
|
||||
return await loader.load_async()
|
||||
|
||||
# Process all files with controlled concurrency
|
||||
tasks = [process_with_semaphore(loader) for loader in loaders]
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
# Handle any exceptions in results
|
||||
|
|
@ -624,10 +750,18 @@ class MistralLoader:
|
|||
else:
|
||||
processed_results.append(result)
|
||||
|
||||
# MONITORING: Log comprehensive batch processing statistics
|
||||
total_time = time.time() - start_time
|
||||
total_docs = sum(len(docs) for docs in processed_results)
|
||||
success_count = sum(
|
||||
1 for result in results if not isinstance(result, Exception)
|
||||
)
|
||||
failure_count = len(results) - success_count
|
||||
|
||||
log.info(
|
||||
f"Batch processing completed in {total_time:.2f}s, produced {total_docs} total documents"
|
||||
f"Batch processing completed in {total_time:.2f}s: "
|
||||
f"{success_count} files succeeded, {failure_count} files failed, "
|
||||
f"produced {total_docs} total documents"
|
||||
)
|
||||
|
||||
return processed_results
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import logging
|
||||
from xml.etree.ElementTree import ParseError
|
||||
|
||||
from typing import Any, Dict, Generator, List, Optional, Sequence, Union
|
||||
from urllib.parse import parse_qs, urlparse
|
||||
|
|
@ -93,7 +94,6 @@ class YoutubeLoader:
|
|||
"http": self.proxy_url,
|
||||
"https": self.proxy_url,
|
||||
}
|
||||
# Don't log complete URL because it might contain secrets
|
||||
log.debug(f"Using proxy URL: {self.proxy_url[:14]}...")
|
||||
else:
|
||||
youtube_proxies = None
|
||||
|
|
@ -110,11 +110,37 @@ class YoutubeLoader:
|
|||
for lang in self.language:
|
||||
try:
|
||||
transcript = transcript_list.find_transcript([lang])
|
||||
if transcript.is_generated:
|
||||
log.debug(f"Found generated transcript for language '{lang}'")
|
||||
try:
|
||||
transcript = transcript_list.find_manually_created_transcript(
|
||||
[lang]
|
||||
)
|
||||
log.debug(f"Found manual transcript for language '{lang}'")
|
||||
except NoTranscriptFound:
|
||||
log.debug(
|
||||
f"No manual transcript found for language '{lang}', using generated"
|
||||
)
|
||||
pass
|
||||
|
||||
log.debug(f"Found transcript for language '{lang}'")
|
||||
transcript_pieces: List[Dict[str, Any]] = transcript.fetch()
|
||||
try:
|
||||
transcript_pieces: List[Dict[str, Any]] = transcript.fetch()
|
||||
except ParseError:
|
||||
log.debug(f"Empty or invalid transcript for language '{lang}'")
|
||||
continue
|
||||
|
||||
if not transcript_pieces:
|
||||
log.debug(f"Empty transcript for language '{lang}'")
|
||||
continue
|
||||
|
||||
transcript_text = " ".join(
|
||||
map(
|
||||
lambda transcript_piece: transcript_piece.text.strip(" "),
|
||||
lambda transcript_piece: (
|
||||
transcript_piece.text.strip(" ")
|
||||
if hasattr(transcript_piece, "text")
|
||||
else ""
|
||||
),
|
||||
transcript_pieces,
|
||||
)
|
||||
)
|
||||
|
|
@ -131,6 +157,4 @@ class YoutubeLoader:
|
|||
log.warning(
|
||||
f"No transcript found for any of the specified languages: {languages_tried}. Verify if the video has transcripts, add more languages if needed."
|
||||
)
|
||||
raise NoTranscriptFound(
|
||||
f"No transcript found for any supported language. Verify if the video has transcripts, add more languages if needed."
|
||||
)
|
||||
raise NoTranscriptFound(self.video_id, self.language, list(transcript_list))
|
||||
|
|
|
|||
|
|
@ -1,12 +1,16 @@
|
|||
from typing import Optional, List, Dict, Any
|
||||
import logging
|
||||
import json
|
||||
from sqlalchemy import (
|
||||
func,
|
||||
literal,
|
||||
cast,
|
||||
column,
|
||||
create_engine,
|
||||
Column,
|
||||
Integer,
|
||||
MetaData,
|
||||
LargeBinary,
|
||||
select,
|
||||
text,
|
||||
Text,
|
||||
|
|
@ -28,7 +32,12 @@ from open_webui.retrieval.vector.main import (
|
|||
SearchResult,
|
||||
GetResult,
|
||||
)
|
||||
from open_webui.config import PGVECTOR_DB_URL, PGVECTOR_INITIALIZE_MAX_VECTOR_LENGTH
|
||||
from open_webui.config import (
|
||||
PGVECTOR_DB_URL,
|
||||
PGVECTOR_INITIALIZE_MAX_VECTOR_LENGTH,
|
||||
PGVECTOR_PGCRYPTO,
|
||||
PGVECTOR_PGCRYPTO_KEY,
|
||||
)
|
||||
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
|
|
@ -39,14 +48,27 @@ log = logging.getLogger(__name__)
|
|||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def pgcrypto_encrypt(val, key):
|
||||
return func.pgp_sym_encrypt(val, literal(key))
|
||||
|
||||
|
||||
def pgcrypto_decrypt(col, key, outtype="text"):
|
||||
return func.cast(func.pgp_sym_decrypt(col, literal(key)), outtype)
|
||||
|
||||
|
||||
class DocumentChunk(Base):
|
||||
__tablename__ = "document_chunk"
|
||||
|
||||
id = Column(Text, primary_key=True)
|
||||
vector = Column(Vector(dim=VECTOR_LENGTH), nullable=True)
|
||||
collection_name = Column(Text, nullable=False)
|
||||
text = Column(Text, nullable=True)
|
||||
vmetadata = Column(MutableDict.as_mutable(JSONB), nullable=True)
|
||||
|
||||
if PGVECTOR_PGCRYPTO:
|
||||
text = Column(LargeBinary, nullable=True)
|
||||
vmetadata = Column(LargeBinary, nullable=True)
|
||||
else:
|
||||
text = Column(Text, nullable=True)
|
||||
vmetadata = Column(MutableDict.as_mutable(JSONB), nullable=True)
|
||||
|
||||
|
||||
class PgvectorClient(VectorDBBase):
|
||||
|
|
@ -70,6 +92,15 @@ class PgvectorClient(VectorDBBase):
|
|||
# Ensure the pgvector extension is available
|
||||
self.session.execute(text("CREATE EXTENSION IF NOT EXISTS vector;"))
|
||||
|
||||
if PGVECTOR_PGCRYPTO:
|
||||
# Ensure the pgcrypto extension is available for encryption
|
||||
self.session.execute(text("CREATE EXTENSION IF NOT EXISTS pgcrypto;"))
|
||||
|
||||
if not PGVECTOR_PGCRYPTO_KEY:
|
||||
raise ValueError(
|
||||
"PGVECTOR_PGCRYPTO_KEY must be set when PGVECTOR_PGCRYPTO is enabled."
|
||||
)
|
||||
|
||||
# Check vector length consistency
|
||||
self.check_vector_length()
|
||||
|
||||
|
|
@ -147,44 +178,39 @@ class PgvectorClient(VectorDBBase):
|
|||
|
||||
def insert(self, collection_name: str, items: List[VectorItem]) -> None:
|
||||
try:
|
||||
new_items = []
|
||||
for item in items:
|
||||
vector = self.adjust_vector_length(item["vector"])
|
||||
new_chunk = DocumentChunk(
|
||||
id=item["id"],
|
||||
vector=vector,
|
||||
collection_name=collection_name,
|
||||
text=item["text"],
|
||||
vmetadata=item["metadata"],
|
||||
)
|
||||
new_items.append(new_chunk)
|
||||
self.session.bulk_save_objects(new_items)
|
||||
self.session.commit()
|
||||
log.info(
|
||||
f"Inserted {len(new_items)} items into collection '{collection_name}'."
|
||||
)
|
||||
except Exception as e:
|
||||
self.session.rollback()
|
||||
log.exception(f"Error during insert: {e}")
|
||||
raise
|
||||
|
||||
def upsert(self, collection_name: str, items: List[VectorItem]) -> None:
|
||||
try:
|
||||
for item in items:
|
||||
vector = self.adjust_vector_length(item["vector"])
|
||||
existing = (
|
||||
self.session.query(DocumentChunk)
|
||||
.filter(DocumentChunk.id == item["id"])
|
||||
.first()
|
||||
)
|
||||
if existing:
|
||||
existing.vector = vector
|
||||
existing.text = item["text"]
|
||||
existing.vmetadata = item["metadata"]
|
||||
existing.collection_name = (
|
||||
collection_name # Update collection_name if necessary
|
||||
if PGVECTOR_PGCRYPTO:
|
||||
for item in items:
|
||||
vector = self.adjust_vector_length(item["vector"])
|
||||
# Use raw SQL for BYTEA/pgcrypto
|
||||
self.session.execute(
|
||||
text(
|
||||
"""
|
||||
INSERT INTO document_chunk
|
||||
(id, vector, collection_name, text, vmetadata)
|
||||
VALUES (
|
||||
:id, :vector, :collection_name,
|
||||
pgp_sym_encrypt(:text, :key),
|
||||
pgp_sym_encrypt(:metadata::text, :key)
|
||||
)
|
||||
ON CONFLICT (id) DO NOTHING
|
||||
"""
|
||||
),
|
||||
{
|
||||
"id": item["id"],
|
||||
"vector": vector,
|
||||
"collection_name": collection_name,
|
||||
"text": item["text"],
|
||||
"metadata": json.dumps(item["metadata"]),
|
||||
"key": PGVECTOR_PGCRYPTO_KEY,
|
||||
},
|
||||
)
|
||||
else:
|
||||
self.session.commit()
|
||||
log.info(f"Encrypted & inserted {len(items)} into '{collection_name}'")
|
||||
|
||||
else:
|
||||
new_items = []
|
||||
for item in items:
|
||||
vector = self.adjust_vector_length(item["vector"])
|
||||
new_chunk = DocumentChunk(
|
||||
id=item["id"],
|
||||
vector=vector,
|
||||
|
|
@ -192,11 +218,78 @@ class PgvectorClient(VectorDBBase):
|
|||
text=item["text"],
|
||||
vmetadata=item["metadata"],
|
||||
)
|
||||
self.session.add(new_chunk)
|
||||
self.session.commit()
|
||||
log.info(
|
||||
f"Upserted {len(items)} items into collection '{collection_name}'."
|
||||
)
|
||||
new_items.append(new_chunk)
|
||||
self.session.bulk_save_objects(new_items)
|
||||
self.session.commit()
|
||||
log.info(
|
||||
f"Inserted {len(new_items)} items into collection '{collection_name}'."
|
||||
)
|
||||
except Exception as e:
|
||||
self.session.rollback()
|
||||
log.exception(f"Error during insert: {e}")
|
||||
raise
|
||||
|
||||
def upsert(self, collection_name: str, items: List[VectorItem]) -> None:
|
||||
try:
|
||||
if PGVECTOR_PGCRYPTO:
|
||||
for item in items:
|
||||
vector = self.adjust_vector_length(item["vector"])
|
||||
self.session.execute(
|
||||
text(
|
||||
"""
|
||||
INSERT INTO document_chunk
|
||||
(id, vector, collection_name, text, vmetadata)
|
||||
VALUES (
|
||||
:id, :vector, :collection_name,
|
||||
pgp_sym_encrypt(:text, :key),
|
||||
pgp_sym_encrypt(:metadata::text, :key)
|
||||
)
|
||||
ON CONFLICT (id) DO UPDATE SET
|
||||
vector = EXCLUDED.vector,
|
||||
collection_name = EXCLUDED.collection_name,
|
||||
text = EXCLUDED.text,
|
||||
vmetadata = EXCLUDED.vmetadata
|
||||
"""
|
||||
),
|
||||
{
|
||||
"id": item["id"],
|
||||
"vector": vector,
|
||||
"collection_name": collection_name,
|
||||
"text": item["text"],
|
||||
"metadata": json.dumps(item["metadata"]),
|
||||
"key": PGVECTOR_PGCRYPTO_KEY,
|
||||
},
|
||||
)
|
||||
self.session.commit()
|
||||
log.info(f"Encrypted & upserted {len(items)} into '{collection_name}'")
|
||||
else:
|
||||
for item in items:
|
||||
vector = self.adjust_vector_length(item["vector"])
|
||||
existing = (
|
||||
self.session.query(DocumentChunk)
|
||||
.filter(DocumentChunk.id == item["id"])
|
||||
.first()
|
||||
)
|
||||
if existing:
|
||||
existing.vector = vector
|
||||
existing.text = item["text"]
|
||||
existing.vmetadata = item["metadata"]
|
||||
existing.collection_name = (
|
||||
collection_name # Update collection_name if necessary
|
||||
)
|
||||
else:
|
||||
new_chunk = DocumentChunk(
|
||||
id=item["id"],
|
||||
vector=vector,
|
||||
collection_name=collection_name,
|
||||
text=item["text"],
|
||||
vmetadata=item["metadata"],
|
||||
)
|
||||
self.session.add(new_chunk)
|
||||
self.session.commit()
|
||||
log.info(
|
||||
f"Upserted {len(items)} items into collection '{collection_name}'."
|
||||
)
|
||||
except Exception as e:
|
||||
self.session.rollback()
|
||||
log.exception(f"Error during upsert: {e}")
|
||||
|
|
@ -230,16 +323,32 @@ class PgvectorClient(VectorDBBase):
|
|||
.alias("query_vectors")
|
||||
)
|
||||
|
||||
result_fields = [
|
||||
DocumentChunk.id,
|
||||
]
|
||||
if PGVECTOR_PGCRYPTO:
|
||||
result_fields.append(
|
||||
pgcrypto_decrypt(
|
||||
DocumentChunk.text, PGVECTOR_PGCRYPTO_KEY, Text
|
||||
).label("text")
|
||||
)
|
||||
result_fields.append(
|
||||
pgcrypto_decrypt(
|
||||
DocumentChunk.vmetadata, PGVECTOR_PGCRYPTO_KEY, JSONB
|
||||
).label("vmetadata")
|
||||
)
|
||||
else:
|
||||
result_fields.append(DocumentChunk.text)
|
||||
result_fields.append(DocumentChunk.vmetadata)
|
||||
result_fields.append(
|
||||
(DocumentChunk.vector.cosine_distance(query_vectors.c.q_vector)).label(
|
||||
"distance"
|
||||
)
|
||||
)
|
||||
|
||||
# Build the lateral subquery for each query vector
|
||||
subq = (
|
||||
select(
|
||||
DocumentChunk.id,
|
||||
DocumentChunk.text,
|
||||
DocumentChunk.vmetadata,
|
||||
(
|
||||
DocumentChunk.vector.cosine_distance(query_vectors.c.q_vector)
|
||||
).label("distance"),
|
||||
)
|
||||
select(*result_fields)
|
||||
.where(DocumentChunk.collection_name == collection_name)
|
||||
.order_by(
|
||||
(DocumentChunk.vector.cosine_distance(query_vectors.c.q_vector))
|
||||
|
|
@ -299,17 +408,43 @@ class PgvectorClient(VectorDBBase):
|
|||
self, collection_name: str, filter: Dict[str, Any], limit: Optional[int] = None
|
||||
) -> Optional[GetResult]:
|
||||
try:
|
||||
query = self.session.query(DocumentChunk).filter(
|
||||
DocumentChunk.collection_name == collection_name
|
||||
)
|
||||
if PGVECTOR_PGCRYPTO:
|
||||
# Build where clause for vmetadata filter
|
||||
where_clauses = [DocumentChunk.collection_name == collection_name]
|
||||
for key, value in filter.items():
|
||||
# decrypt then check key: JSON filter after decryption
|
||||
where_clauses.append(
|
||||
pgcrypto_decrypt(
|
||||
DocumentChunk.vmetadata, PGVECTOR_PGCRYPTO_KEY, JSONB
|
||||
)[key].astext
|
||||
== str(value)
|
||||
)
|
||||
stmt = select(
|
||||
DocumentChunk.id,
|
||||
pgcrypto_decrypt(
|
||||
DocumentChunk.text, PGVECTOR_PGCRYPTO_KEY, Text
|
||||
).label("text"),
|
||||
pgcrypto_decrypt(
|
||||
DocumentChunk.vmetadata, PGVECTOR_PGCRYPTO_KEY, JSONB
|
||||
).label("vmetadata"),
|
||||
).where(*where_clauses)
|
||||
if limit is not None:
|
||||
stmt = stmt.limit(limit)
|
||||
results = self.session.execute(stmt).all()
|
||||
else:
|
||||
query = self.session.query(DocumentChunk).filter(
|
||||
DocumentChunk.collection_name == collection_name
|
||||
)
|
||||
|
||||
for key, value in filter.items():
|
||||
query = query.filter(DocumentChunk.vmetadata[key].astext == str(value))
|
||||
for key, value in filter.items():
|
||||
query = query.filter(
|
||||
DocumentChunk.vmetadata[key].astext == str(value)
|
||||
)
|
||||
|
||||
if limit is not None:
|
||||
query = query.limit(limit)
|
||||
if limit is not None:
|
||||
query = query.limit(limit)
|
||||
|
||||
results = query.all()
|
||||
results = query.all()
|
||||
|
||||
if not results:
|
||||
return None
|
||||
|
|
@ -331,20 +466,38 @@ class PgvectorClient(VectorDBBase):
|
|||
self, collection_name: str, limit: Optional[int] = None
|
||||
) -> Optional[GetResult]:
|
||||
try:
|
||||
query = self.session.query(DocumentChunk).filter(
|
||||
DocumentChunk.collection_name == collection_name
|
||||
)
|
||||
if limit is not None:
|
||||
query = query.limit(limit)
|
||||
if PGVECTOR_PGCRYPTO:
|
||||
stmt = select(
|
||||
DocumentChunk.id,
|
||||
pgcrypto_decrypt(
|
||||
DocumentChunk.text, PGVECTOR_PGCRYPTO_KEY, Text
|
||||
).label("text"),
|
||||
pgcrypto_decrypt(
|
||||
DocumentChunk.vmetadata, PGVECTOR_PGCRYPTO_KEY, JSONB
|
||||
).label("vmetadata"),
|
||||
).where(DocumentChunk.collection_name == collection_name)
|
||||
if limit is not None:
|
||||
stmt = stmt.limit(limit)
|
||||
results = self.session.execute(stmt).all()
|
||||
ids = [[row.id for row in results]]
|
||||
documents = [[row.text for row in results]]
|
||||
metadatas = [[row.vmetadata for row in results]]
|
||||
else:
|
||||
|
||||
results = query.all()
|
||||
query = self.session.query(DocumentChunk).filter(
|
||||
DocumentChunk.collection_name == collection_name
|
||||
)
|
||||
if limit is not None:
|
||||
query = query.limit(limit)
|
||||
|
||||
if not results:
|
||||
return None
|
||||
results = query.all()
|
||||
|
||||
ids = [[result.id for result in results]]
|
||||
documents = [[result.text for result in results]]
|
||||
metadatas = [[result.vmetadata for result in results]]
|
||||
if not results:
|
||||
return None
|
||||
|
||||
ids = [[result.id for result in results]]
|
||||
documents = [[result.text for result in results]]
|
||||
metadatas = [[result.vmetadata for result in results]]
|
||||
|
||||
return GetResult(ids=ids, documents=documents, metadatas=metadatas)
|
||||
except Exception as e:
|
||||
|
|
@ -358,17 +511,33 @@ class PgvectorClient(VectorDBBase):
|
|||
filter: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
try:
|
||||
query = self.session.query(DocumentChunk).filter(
|
||||
DocumentChunk.collection_name == collection_name
|
||||
)
|
||||
if ids:
|
||||
query = query.filter(DocumentChunk.id.in_(ids))
|
||||
if filter:
|
||||
for key, value in filter.items():
|
||||
query = query.filter(
|
||||
DocumentChunk.vmetadata[key].astext == str(value)
|
||||
)
|
||||
deleted = query.delete(synchronize_session=False)
|
||||
if PGVECTOR_PGCRYPTO:
|
||||
wheres = [DocumentChunk.collection_name == collection_name]
|
||||
if ids:
|
||||
wheres.append(DocumentChunk.id.in_(ids))
|
||||
if filter:
|
||||
for key, value in filter.items():
|
||||
wheres.append(
|
||||
pgcrypto_decrypt(
|
||||
DocumentChunk.vmetadata, PGVECTOR_PGCRYPTO_KEY, JSONB
|
||||
)[key].astext
|
||||
== str(value)
|
||||
)
|
||||
stmt = DocumentChunk.__table__.delete().where(*wheres)
|
||||
result = self.session.execute(stmt)
|
||||
deleted = result.rowcount
|
||||
else:
|
||||
query = self.session.query(DocumentChunk).filter(
|
||||
DocumentChunk.collection_name == collection_name
|
||||
)
|
||||
if ids:
|
||||
query = query.filter(DocumentChunk.id.in_(ids))
|
||||
if filter:
|
||||
for key, value in filter.items():
|
||||
query = query.filter(
|
||||
DocumentChunk.vmetadata[key].astext == str(value)
|
||||
)
|
||||
deleted = query.delete(synchronize_session=False)
|
||||
self.session.commit()
|
||||
log.info(f"Deleted {deleted} items from collection '{collection_name}'.")
|
||||
except Exception as e:
|
||||
|
|
|
|||
|
|
@ -3,10 +3,19 @@ import logging
|
|||
import time # for measuring elapsed time
|
||||
from pinecone import Pinecone, ServerlessSpec
|
||||
|
||||
# Add gRPC support for better performance (Pinecone best practice)
|
||||
try:
|
||||
from pinecone.grpc import PineconeGRPC
|
||||
|
||||
GRPC_AVAILABLE = True
|
||||
except ImportError:
|
||||
GRPC_AVAILABLE = False
|
||||
|
||||
import asyncio # for async upserts
|
||||
import functools # for partial binding in async tasks
|
||||
|
||||
import concurrent.futures # for parallel batch upserts
|
||||
import random # for jitter in retry backoff
|
||||
|
||||
from open_webui.retrieval.vector.main import (
|
||||
VectorDBBase,
|
||||
|
|
@ -47,7 +56,24 @@ class PineconeClient(VectorDBBase):
|
|||
self.cloud = PINECONE_CLOUD
|
||||
|
||||
# Initialize Pinecone client for improved performance
|
||||
self.client = Pinecone(api_key=self.api_key)
|
||||
if GRPC_AVAILABLE:
|
||||
# Use gRPC client for better performance (Pinecone recommendation)
|
||||
self.client = PineconeGRPC(
|
||||
api_key=self.api_key,
|
||||
pool_threads=20, # Improved connection pool size
|
||||
timeout=30, # Reasonable timeout for operations
|
||||
)
|
||||
self.using_grpc = True
|
||||
log.info("Using Pinecone gRPC client for optimal performance")
|
||||
else:
|
||||
# Fallback to HTTP client with enhanced connection pooling
|
||||
self.client = Pinecone(
|
||||
api_key=self.api_key,
|
||||
pool_threads=20, # Improved connection pool size
|
||||
timeout=30, # Reasonable timeout for operations
|
||||
)
|
||||
self.using_grpc = False
|
||||
log.info("Using Pinecone HTTP client (gRPC not available)")
|
||||
|
||||
# Persistent executor for batch operations
|
||||
self._executor = concurrent.futures.ThreadPoolExecutor(max_workers=5)
|
||||
|
|
@ -91,12 +117,53 @@ class PineconeClient(VectorDBBase):
|
|||
log.info(f"Using existing Pinecone index '{self.index_name}'")
|
||||
|
||||
# Connect to the index
|
||||
self.index = self.client.Index(self.index_name)
|
||||
self.index = self.client.Index(
|
||||
self.index_name,
|
||||
pool_threads=20, # Enhanced connection pool for index operations
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
log.error(f"Failed to initialize Pinecone index: {e}")
|
||||
raise RuntimeError(f"Failed to initialize Pinecone index: {e}")
|
||||
|
||||
def _retry_pinecone_operation(self, operation_func, max_retries=3):
|
||||
"""Retry Pinecone operations with exponential backoff for rate limits and network issues."""
|
||||
for attempt in range(max_retries):
|
||||
try:
|
||||
return operation_func()
|
||||
except Exception as e:
|
||||
error_str = str(e).lower()
|
||||
# Check if it's a retryable error (rate limits, network issues, timeouts)
|
||||
is_retryable = any(
|
||||
keyword in error_str
|
||||
for keyword in [
|
||||
"rate limit",
|
||||
"quota",
|
||||
"timeout",
|
||||
"network",
|
||||
"connection",
|
||||
"unavailable",
|
||||
"internal error",
|
||||
"429",
|
||||
"500",
|
||||
"502",
|
||||
"503",
|
||||
"504",
|
||||
]
|
||||
)
|
||||
|
||||
if not is_retryable or attempt == max_retries - 1:
|
||||
# Don't retry for non-retryable errors or on final attempt
|
||||
raise
|
||||
|
||||
# Exponential backoff with jitter
|
||||
delay = (2**attempt) + random.uniform(0, 1)
|
||||
log.warning(
|
||||
f"Pinecone operation failed (attempt {attempt + 1}/{max_retries}), "
|
||||
f"retrying in {delay:.2f}s: {e}"
|
||||
)
|
||||
time.sleep(delay)
|
||||
|
||||
def _create_points(
|
||||
self, items: List[VectorItem], collection_name_with_prefix: str
|
||||
) -> List[Dict[str, Any]]:
|
||||
|
|
@ -223,7 +290,8 @@ class PineconeClient(VectorDBBase):
|
|||
elapsed = time.time() - start_time
|
||||
log.debug(f"Insert of {len(points)} vectors took {elapsed:.2f} seconds")
|
||||
log.info(
|
||||
f"Successfully inserted {len(points)} vectors in parallel batches into '{collection_name_with_prefix}'"
|
||||
f"Successfully inserted {len(points)} vectors in parallel batches "
|
||||
f"into '{collection_name_with_prefix}'"
|
||||
)
|
||||
|
||||
def upsert(self, collection_name: str, items: List[VectorItem]) -> None:
|
||||
|
|
@ -254,7 +322,8 @@ class PineconeClient(VectorDBBase):
|
|||
elapsed = time.time() - start_time
|
||||
log.debug(f"Upsert of {len(points)} vectors took {elapsed:.2f} seconds")
|
||||
log.info(
|
||||
f"Successfully upserted {len(points)} vectors in parallel batches into '{collection_name_with_prefix}'"
|
||||
f"Successfully upserted {len(points)} vectors in parallel batches "
|
||||
f"into '{collection_name_with_prefix}'"
|
||||
)
|
||||
|
||||
async def insert_async(self, collection_name: str, items: List[VectorItem]) -> None:
|
||||
|
|
@ -285,7 +354,8 @@ class PineconeClient(VectorDBBase):
|
|||
log.error(f"Error in async insert batch: {result}")
|
||||
raise result
|
||||
log.info(
|
||||
f"Successfully async inserted {len(points)} vectors in batches into '{collection_name_with_prefix}'"
|
||||
f"Successfully async inserted {len(points)} vectors in batches "
|
||||
f"into '{collection_name_with_prefix}'"
|
||||
)
|
||||
|
||||
async def upsert_async(self, collection_name: str, items: List[VectorItem]) -> None:
|
||||
|
|
@ -316,7 +386,8 @@ class PineconeClient(VectorDBBase):
|
|||
log.error(f"Error in async upsert batch: {result}")
|
||||
raise result
|
||||
log.info(
|
||||
f"Successfully async upserted {len(points)} vectors in batches into '{collection_name_with_prefix}'"
|
||||
f"Successfully async upserted {len(points)} vectors in batches "
|
||||
f"into '{collection_name_with_prefix}'"
|
||||
)
|
||||
|
||||
def search(
|
||||
|
|
@ -457,10 +528,12 @@ class PineconeClient(VectorDBBase):
|
|||
# This is a limitation of Pinecone - be careful with ID uniqueness
|
||||
self.index.delete(ids=batch_ids)
|
||||
log.debug(
|
||||
f"Deleted batch of {len(batch_ids)} vectors by ID from '{collection_name_with_prefix}'"
|
||||
f"Deleted batch of {len(batch_ids)} vectors by ID "
|
||||
f"from '{collection_name_with_prefix}'"
|
||||
)
|
||||
log.info(
|
||||
f"Successfully deleted {len(ids)} vectors by ID from '{collection_name_with_prefix}'"
|
||||
f"Successfully deleted {len(ids)} vectors by ID "
|
||||
f"from '{collection_name_with_prefix}'"
|
||||
)
|
||||
|
||||
elif filter:
|
||||
|
|
|
|||
|
|
@ -1,10 +1,20 @@
|
|||
import logging
|
||||
from typing import Optional, List
|
||||
from typing import Optional, Literal
|
||||
import requests
|
||||
|
||||
from open_webui.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
MODELS = Literal[
|
||||
"sonar",
|
||||
"sonar-pro",
|
||||
"sonar-reasoning",
|
||||
"sonar-reasoning-pro",
|
||||
"sonar-deep-research",
|
||||
]
|
||||
SEARCH_CONTEXT_USAGE_LEVELS = Literal["low", "medium", "high"]
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
|
@ -14,6 +24,8 @@ def search_perplexity(
|
|||
query: str,
|
||||
count: int,
|
||||
filter_list: Optional[list[str]] = None,
|
||||
model: MODELS = "sonar",
|
||||
search_context_usage: SEARCH_CONTEXT_USAGE_LEVELS = "medium",
|
||||
) -> list[SearchResult]:
|
||||
"""Search using Perplexity API and return the results as a list of SearchResult objects.
|
||||
|
||||
|
|
@ -21,6 +33,9 @@ def search_perplexity(
|
|||
api_key (str): A Perplexity API key
|
||||
query (str): The query to search for
|
||||
count (int): Maximum number of results to return
|
||||
filter_list (Optional[list[str]]): List of domains to filter results
|
||||
model (str): The Perplexity model to use (sonar, sonar-pro)
|
||||
search_context_usage (str): Search context usage level (low, medium, high)
|
||||
|
||||
"""
|
||||
|
||||
|
|
@ -33,7 +48,7 @@ def search_perplexity(
|
|||
|
||||
# Create payload for the API call
|
||||
payload = {
|
||||
"model": "sonar",
|
||||
"model": model,
|
||||
"messages": [
|
||||
{
|
||||
"role": "system",
|
||||
|
|
@ -43,6 +58,9 @@ def search_perplexity(
|
|||
],
|
||||
"temperature": 0.2, # Lower temperature for more factual responses
|
||||
"stream": False,
|
||||
"web_search_options": {
|
||||
"search_context_usage": search_context_usage,
|
||||
},
|
||||
}
|
||||
|
||||
headers = {
|
||||
|
|
|
|||
|
|
@ -420,7 +420,7 @@ def load_b64_image_data(b64_str):
|
|||
try:
|
||||
if "," in b64_str:
|
||||
header, encoded = b64_str.split(",", 1)
|
||||
mime_type = header.split(";")[0]
|
||||
mime_type = header.split(";")[0].lstrip("data:")
|
||||
img_data = base64.b64decode(encoded)
|
||||
else:
|
||||
mime_type = "image/png"
|
||||
|
|
@ -428,7 +428,7 @@ def load_b64_image_data(b64_str):
|
|||
return img_data, mime_type
|
||||
except Exception as e:
|
||||
log.exception(f"Error loading image data: {e}")
|
||||
return None
|
||||
return None, None
|
||||
|
||||
|
||||
def load_url_image_data(url, headers=None):
|
||||
|
|
|
|||
|
|
@ -124,9 +124,8 @@ async def get_note_by_id(request: Request, id: str, user=Depends(get_verified_us
|
|||
status_code=status.HTTP_404_NOT_FOUND, detail=ERROR_MESSAGES.NOT_FOUND
|
||||
)
|
||||
|
||||
if (
|
||||
user.role != "admin"
|
||||
and user.id != note.user_id
|
||||
if user.role != "admin" or (
|
||||
user.id != note.user_id
|
||||
and not has_access(user.id, type="read", access_control=note.access_control)
|
||||
):
|
||||
raise HTTPException(
|
||||
|
|
@ -159,9 +158,8 @@ async def update_note_by_id(
|
|||
status_code=status.HTTP_404_NOT_FOUND, detail=ERROR_MESSAGES.NOT_FOUND
|
||||
)
|
||||
|
||||
if (
|
||||
user.role != "admin"
|
||||
and user.id != note.user_id
|
||||
if user.role != "admin" or (
|
||||
user.id != note.user_id
|
||||
and not has_access(user.id, type="write", access_control=note.access_control)
|
||||
):
|
||||
raise HTTPException(
|
||||
|
|
@ -199,9 +197,8 @@ async def delete_note_by_id(request: Request, id: str, user=Depends(get_verified
|
|||
status_code=status.HTTP_404_NOT_FOUND, detail=ERROR_MESSAGES.NOT_FOUND
|
||||
)
|
||||
|
||||
if (
|
||||
user.role != "admin"
|
||||
and user.id != note.user_id
|
||||
if user.role != "admin" or (
|
||||
user.id != note.user_id
|
||||
and not has_access(user.id, type="write", access_control=note.access_control)
|
||||
):
|
||||
raise HTTPException(
|
||||
|
|
|
|||
|
|
@ -1232,6 +1232,9 @@ class GenerateChatCompletionForm(BaseModel):
|
|||
stream: Optional[bool] = True
|
||||
keep_alive: Optional[Union[int, str]] = None
|
||||
tools: Optional[list[dict]] = None
|
||||
model_config = ConfigDict(
|
||||
extra="allow",
|
||||
)
|
||||
|
||||
|
||||
async def get_ollama_url(request: Request, model: str, url_idx: Optional[int] = None):
|
||||
|
|
@ -1269,7 +1272,9 @@ async def generate_chat_completion(
|
|||
detail=str(e),
|
||||
)
|
||||
|
||||
payload = {**form_data.model_dump(exclude_none=True)}
|
||||
if isinstance(form_data, BaseModel):
|
||||
payload = {**form_data.model_dump(exclude_none=True)}
|
||||
|
||||
if "metadata" in payload:
|
||||
del payload["metadata"]
|
||||
|
||||
|
|
@ -1323,7 +1328,7 @@ async def generate_chat_completion(
|
|||
prefix_id = api_config.get("prefix_id", None)
|
||||
if prefix_id:
|
||||
payload["model"] = payload["model"].replace(f"{prefix_id}.", "")
|
||||
# payload["keep_alive"] = -1 # keep alive forever
|
||||
|
||||
return await send_post_request(
|
||||
url=f"{url}/api/chat",
|
||||
payload=json.dumps(payload),
|
||||
|
|
|
|||
|
|
@ -887,6 +887,88 @@ async def generate_chat_completion(
|
|||
await session.close()
|
||||
|
||||
|
||||
async def embeddings(request: Request, form_data: dict, user):
|
||||
"""
|
||||
Calls the embeddings endpoint for OpenAI-compatible providers.
|
||||
|
||||
Args:
|
||||
request (Request): The FastAPI request context.
|
||||
form_data (dict): OpenAI-compatible embeddings payload.
|
||||
user (UserModel): The authenticated user.
|
||||
|
||||
Returns:
|
||||
dict: OpenAI-compatible embeddings response.
|
||||
"""
|
||||
idx = 0
|
||||
# Prepare payload/body
|
||||
body = json.dumps(form_data)
|
||||
# Find correct backend url/key based on model
|
||||
await get_all_models(request, user=user)
|
||||
model_id = form_data.get("model")
|
||||
models = request.app.state.OPENAI_MODELS
|
||||
if model_id in models:
|
||||
idx = models[model_id]["urlIdx"]
|
||||
url = request.app.state.config.OPENAI_API_BASE_URLS[idx]
|
||||
key = request.app.state.config.OPENAI_API_KEYS[idx]
|
||||
r = None
|
||||
session = None
|
||||
streaming = False
|
||||
try:
|
||||
session = aiohttp.ClientSession(trust_env=True)
|
||||
r = await session.request(
|
||||
method="POST",
|
||||
url=f"{url}/embeddings",
|
||||
data=body,
|
||||
headers={
|
||||
"Authorization": f"Bearer {key}",
|
||||
"Content-Type": "application/json",
|
||||
**(
|
||||
{
|
||||
"X-OpenWebUI-User-Name": user.name,
|
||||
"X-OpenWebUI-User-Id": user.id,
|
||||
"X-OpenWebUI-User-Email": user.email,
|
||||
"X-OpenWebUI-User-Role": user.role,
|
||||
}
|
||||
if ENABLE_FORWARD_USER_INFO_HEADERS and user
|
||||
else {}
|
||||
),
|
||||
},
|
||||
)
|
||||
r.raise_for_status()
|
||||
if "text/event-stream" in r.headers.get("Content-Type", ""):
|
||||
streaming = True
|
||||
return StreamingResponse(
|
||||
r.content,
|
||||
status_code=r.status,
|
||||
headers=dict(r.headers),
|
||||
background=BackgroundTask(
|
||||
cleanup_response, response=r, session=session
|
||||
),
|
||||
)
|
||||
else:
|
||||
response_data = await r.json()
|
||||
return response_data
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
detail = None
|
||||
if r is not None:
|
||||
try:
|
||||
res = await r.json()
|
||||
if "error" in res:
|
||||
detail = f"External: {res['error']['message'] if 'message' in res['error'] else res['error']}"
|
||||
except Exception:
|
||||
detail = f"External: {e}"
|
||||
raise HTTPException(
|
||||
status_code=r.status if r else 500,
|
||||
detail=detail if detail else "Open WebUI: Server Connection Error",
|
||||
)
|
||||
finally:
|
||||
if not streaming and session:
|
||||
if r:
|
||||
r.close()
|
||||
await session.close()
|
||||
|
||||
|
||||
@router.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"])
|
||||
async def proxy(path: str, request: Request, user=Depends(get_verified_user)):
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -414,6 +414,9 @@ async def get_rag_config(request: Request, user=Depends(get_admin_user)):
|
|||
"DOCLING_OCR_ENGINE": request.app.state.config.DOCLING_OCR_ENGINE,
|
||||
"DOCLING_OCR_LANG": request.app.state.config.DOCLING_OCR_LANG,
|
||||
"DOCLING_DO_PICTURE_DESCRIPTION": request.app.state.config.DOCLING_DO_PICTURE_DESCRIPTION,
|
||||
"DOCLING_PICTURE_DESCRIPTION_MODE": request.app.state.config.DOCLING_PICTURE_DESCRIPTION_MODE,
|
||||
"DOCLING_PICTURE_DESCRIPTION_LOCAL": request.app.state.config.DOCLING_PICTURE_DESCRIPTION_LOCAL,
|
||||
"DOCLING_PICTURE_DESCRIPTION_API": request.app.state.config.DOCLING_PICTURE_DESCRIPTION_API,
|
||||
"DOCUMENT_INTELLIGENCE_ENDPOINT": request.app.state.config.DOCUMENT_INTELLIGENCE_ENDPOINT,
|
||||
"DOCUMENT_INTELLIGENCE_KEY": request.app.state.config.DOCUMENT_INTELLIGENCE_KEY,
|
||||
"MISTRAL_OCR_API_KEY": request.app.state.config.MISTRAL_OCR_API_KEY,
|
||||
|
|
@ -467,6 +470,8 @@ async def get_rag_config(request: Request, user=Depends(get_admin_user)):
|
|||
"BING_SEARCH_V7_SUBSCRIPTION_KEY": request.app.state.config.BING_SEARCH_V7_SUBSCRIPTION_KEY,
|
||||
"EXA_API_KEY": request.app.state.config.EXA_API_KEY,
|
||||
"PERPLEXITY_API_KEY": request.app.state.config.PERPLEXITY_API_KEY,
|
||||
"PERPLEXITY_MODEL": request.app.state.config.PERPLEXITY_MODEL,
|
||||
"PERPLEXITY_SEARCH_CONTEXT_USAGE": request.app.state.config.PERPLEXITY_SEARCH_CONTEXT_USAGE,
|
||||
"SOUGOU_API_SID": request.app.state.config.SOUGOU_API_SID,
|
||||
"SOUGOU_API_SK": request.app.state.config.SOUGOU_API_SK,
|
||||
"WEB_LOADER_ENGINE": request.app.state.config.WEB_LOADER_ENGINE,
|
||||
|
|
@ -520,6 +525,8 @@ class WebConfig(BaseModel):
|
|||
BING_SEARCH_V7_SUBSCRIPTION_KEY: Optional[str] = None
|
||||
EXA_API_KEY: Optional[str] = None
|
||||
PERPLEXITY_API_KEY: Optional[str] = None
|
||||
PERPLEXITY_MODEL: Optional[str] = None
|
||||
PERPLEXITY_SEARCH_CONTEXT_USAGE: Optional[str] = None
|
||||
SOUGOU_API_SID: Optional[str] = None
|
||||
SOUGOU_API_SK: Optional[str] = None
|
||||
WEB_LOADER_ENGINE: Optional[str] = None
|
||||
|
|
@ -571,6 +578,9 @@ class ConfigForm(BaseModel):
|
|||
DOCLING_OCR_ENGINE: Optional[str] = None
|
||||
DOCLING_OCR_LANG: Optional[str] = None
|
||||
DOCLING_DO_PICTURE_DESCRIPTION: Optional[bool] = None
|
||||
DOCLING_PICTURE_DESCRIPTION_MODE: Optional[str] = None
|
||||
DOCLING_PICTURE_DESCRIPTION_LOCAL: Optional[dict] = None
|
||||
DOCLING_PICTURE_DESCRIPTION_API: Optional[dict] = None
|
||||
DOCUMENT_INTELLIGENCE_ENDPOINT: Optional[str] = None
|
||||
DOCUMENT_INTELLIGENCE_KEY: Optional[str] = None
|
||||
MISTRAL_OCR_API_KEY: Optional[str] = None
|
||||
|
|
@ -744,6 +754,22 @@ async def update_rag_config(
|
|||
else request.app.state.config.DOCLING_DO_PICTURE_DESCRIPTION
|
||||
)
|
||||
|
||||
request.app.state.config.DOCLING_PICTURE_DESCRIPTION_MODE = (
|
||||
form_data.DOCLING_PICTURE_DESCRIPTION_MODE
|
||||
if form_data.DOCLING_PICTURE_DESCRIPTION_MODE is not None
|
||||
else request.app.state.config.DOCLING_PICTURE_DESCRIPTION_MODE
|
||||
)
|
||||
request.app.state.config.DOCLING_PICTURE_DESCRIPTION_LOCAL = (
|
||||
form_data.DOCLING_PICTURE_DESCRIPTION_LOCAL
|
||||
if form_data.DOCLING_PICTURE_DESCRIPTION_LOCAL is not None
|
||||
else request.app.state.config.DOCLING_PICTURE_DESCRIPTION_LOCAL
|
||||
)
|
||||
request.app.state.config.DOCLING_PICTURE_DESCRIPTION_API = (
|
||||
form_data.DOCLING_PICTURE_DESCRIPTION_API
|
||||
if form_data.DOCLING_PICTURE_DESCRIPTION_API is not None
|
||||
else request.app.state.config.DOCLING_PICTURE_DESCRIPTION_API
|
||||
)
|
||||
|
||||
request.app.state.config.DOCUMENT_INTELLIGENCE_ENDPOINT = (
|
||||
form_data.DOCUMENT_INTELLIGENCE_ENDPOINT
|
||||
if form_data.DOCUMENT_INTELLIGENCE_ENDPOINT is not None
|
||||
|
|
@ -907,6 +933,10 @@ async def update_rag_config(
|
|||
)
|
||||
request.app.state.config.EXA_API_KEY = form_data.web.EXA_API_KEY
|
||||
request.app.state.config.PERPLEXITY_API_KEY = form_data.web.PERPLEXITY_API_KEY
|
||||
request.app.state.config.PERPLEXITY_MODEL = form_data.web.PERPLEXITY_MODEL
|
||||
request.app.state.config.PERPLEXITY_SEARCH_CONTEXT_USAGE = (
|
||||
form_data.web.PERPLEXITY_SEARCH_CONTEXT_USAGE
|
||||
)
|
||||
request.app.state.config.SOUGOU_API_SID = form_data.web.SOUGOU_API_SID
|
||||
request.app.state.config.SOUGOU_API_SK = form_data.web.SOUGOU_API_SK
|
||||
|
||||
|
|
@ -977,6 +1007,9 @@ async def update_rag_config(
|
|||
"DOCLING_OCR_ENGINE": request.app.state.config.DOCLING_OCR_ENGINE,
|
||||
"DOCLING_OCR_LANG": request.app.state.config.DOCLING_OCR_LANG,
|
||||
"DOCLING_DO_PICTURE_DESCRIPTION": request.app.state.config.DOCLING_DO_PICTURE_DESCRIPTION,
|
||||
"DOCLING_PICTURE_DESCRIPTION_MODE": request.app.state.config.DOCLING_PICTURE_DESCRIPTION_MODE,
|
||||
"DOCLING_PICTURE_DESCRIPTION_LOCAL": request.app.state.config.DOCLING_PICTURE_DESCRIPTION_LOCAL,
|
||||
"DOCLING_PICTURE_DESCRIPTION_API": request.app.state.config.DOCLING_PICTURE_DESCRIPTION_API,
|
||||
"DOCUMENT_INTELLIGENCE_ENDPOINT": request.app.state.config.DOCUMENT_INTELLIGENCE_ENDPOINT,
|
||||
"DOCUMENT_INTELLIGENCE_KEY": request.app.state.config.DOCUMENT_INTELLIGENCE_KEY,
|
||||
"MISTRAL_OCR_API_KEY": request.app.state.config.MISTRAL_OCR_API_KEY,
|
||||
|
|
@ -1030,6 +1063,8 @@ async def update_rag_config(
|
|||
"BING_SEARCH_V7_SUBSCRIPTION_KEY": request.app.state.config.BING_SEARCH_V7_SUBSCRIPTION_KEY,
|
||||
"EXA_API_KEY": request.app.state.config.EXA_API_KEY,
|
||||
"PERPLEXITY_API_KEY": request.app.state.config.PERPLEXITY_API_KEY,
|
||||
"PERPLEXITY_MODEL": request.app.state.config.PERPLEXITY_MODEL,
|
||||
"PERPLEXITY_SEARCH_CONTEXT_USAGE": request.app.state.config.PERPLEXITY_SEARCH_CONTEXT_USAGE,
|
||||
"SOUGOU_API_SID": request.app.state.config.SOUGOU_API_SID,
|
||||
"SOUGOU_API_SK": request.app.state.config.SOUGOU_API_SK,
|
||||
"WEB_LOADER_ENGINE": request.app.state.config.WEB_LOADER_ENGINE,
|
||||
|
|
@ -1321,9 +1356,14 @@ def process_file(
|
|||
EXTERNAL_DOCUMENT_LOADER_API_KEY=request.app.state.config.EXTERNAL_DOCUMENT_LOADER_API_KEY,
|
||||
TIKA_SERVER_URL=request.app.state.config.TIKA_SERVER_URL,
|
||||
DOCLING_SERVER_URL=request.app.state.config.DOCLING_SERVER_URL,
|
||||
DOCLING_OCR_ENGINE=request.app.state.config.DOCLING_OCR_ENGINE,
|
||||
DOCLING_OCR_LANG=request.app.state.config.DOCLING_OCR_LANG,
|
||||
DOCLING_DO_PICTURE_DESCRIPTION=request.app.state.config.DOCLING_DO_PICTURE_DESCRIPTION,
|
||||
DOCLING_PARAMS={
|
||||
"ocr_engine": request.app.state.config.DOCLING_OCR_ENGINE,
|
||||
"ocr_lang": request.app.state.config.DOCLING_OCR_LANG,
|
||||
"do_picture_description": request.app.state.config.DOCLING_DO_PICTURE_DESCRIPTION,
|
||||
"picture_description_mode": request.app.state.config.DOCLING_PICTURE_DESCRIPTION_MODE,
|
||||
"picture_description_local": request.app.state.config.DOCLING_PICTURE_DESCRIPTION_LOCAL,
|
||||
"picture_description_api": request.app.state.config.DOCLING_PICTURE_DESCRIPTION_API,
|
||||
},
|
||||
PDF_EXTRACT_IMAGES=request.app.state.config.PDF_EXTRACT_IMAGES,
|
||||
DOCUMENT_INTELLIGENCE_ENDPOINT=request.app.state.config.DOCUMENT_INTELLIGENCE_ENDPOINT,
|
||||
DOCUMENT_INTELLIGENCE_KEY=request.app.state.config.DOCUMENT_INTELLIGENCE_KEY,
|
||||
|
|
@ -1740,19 +1780,14 @@ def search_web(request: Request, engine: str, query: str) -> list[SearchResult]:
|
|||
request.app.state.config.WEB_SEARCH_RESULT_COUNT,
|
||||
request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
|
||||
)
|
||||
elif engine == "exa":
|
||||
return search_exa(
|
||||
request.app.state.config.EXA_API_KEY,
|
||||
query,
|
||||
request.app.state.config.WEB_SEARCH_RESULT_COUNT,
|
||||
request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
|
||||
)
|
||||
elif engine == "perplexity":
|
||||
return search_perplexity(
|
||||
request.app.state.config.PERPLEXITY_API_KEY,
|
||||
query,
|
||||
request.app.state.config.WEB_SEARCH_RESULT_COUNT,
|
||||
request.app.state.config.WEB_SEARCH_DOMAIN_FILTER_LIST,
|
||||
model=request.app.state.config.PERPLEXITY_MODEL,
|
||||
search_context_usage=request.app.state.config.PERPLEXITY_SEARCH_CONTEXT_USAGE,
|
||||
)
|
||||
elif engine == "sougou":
|
||||
if (
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@ import re
|
|||
from open_webui.utils.chat import generate_chat_completion
|
||||
from open_webui.utils.task import (
|
||||
title_generation_template,
|
||||
follow_up_generation_template,
|
||||
query_generation_template,
|
||||
image_prompt_generation_template,
|
||||
autocomplete_generation_template,
|
||||
|
|
@ -25,6 +26,7 @@ from open_webui.utils.task import get_task_model_id
|
|||
|
||||
from open_webui.config import (
|
||||
DEFAULT_TITLE_GENERATION_PROMPT_TEMPLATE,
|
||||
DEFAULT_FOLLOW_UP_GENERATION_PROMPT_TEMPLATE,
|
||||
DEFAULT_TAGS_GENERATION_PROMPT_TEMPLATE,
|
||||
DEFAULT_IMAGE_PROMPT_GENERATION_PROMPT_TEMPLATE,
|
||||
DEFAULT_QUERY_GENERATION_PROMPT_TEMPLATE,
|
||||
|
|
@ -58,6 +60,8 @@ async def get_task_config(request: Request, user=Depends(get_verified_user)):
|
|||
"ENABLE_AUTOCOMPLETE_GENERATION": request.app.state.config.ENABLE_AUTOCOMPLETE_GENERATION,
|
||||
"AUTOCOMPLETE_GENERATION_INPUT_MAX_LENGTH": request.app.state.config.AUTOCOMPLETE_GENERATION_INPUT_MAX_LENGTH,
|
||||
"TAGS_GENERATION_PROMPT_TEMPLATE": request.app.state.config.TAGS_GENERATION_PROMPT_TEMPLATE,
|
||||
"FOLLOW_UP_GENERATION_PROMPT_TEMPLATE": request.app.state.config.FOLLOW_UP_GENERATION_PROMPT_TEMPLATE,
|
||||
"ENABLE_FOLLOW_UP_GENERATION": request.app.state.config.ENABLE_FOLLOW_UP_GENERATION,
|
||||
"ENABLE_TAGS_GENERATION": request.app.state.config.ENABLE_TAGS_GENERATION,
|
||||
"ENABLE_TITLE_GENERATION": request.app.state.config.ENABLE_TITLE_GENERATION,
|
||||
"ENABLE_SEARCH_QUERY_GENERATION": request.app.state.config.ENABLE_SEARCH_QUERY_GENERATION,
|
||||
|
|
@ -76,6 +80,8 @@ class TaskConfigForm(BaseModel):
|
|||
ENABLE_AUTOCOMPLETE_GENERATION: bool
|
||||
AUTOCOMPLETE_GENERATION_INPUT_MAX_LENGTH: int
|
||||
TAGS_GENERATION_PROMPT_TEMPLATE: str
|
||||
FOLLOW_UP_GENERATION_PROMPT_TEMPLATE: str
|
||||
ENABLE_FOLLOW_UP_GENERATION: bool
|
||||
ENABLE_TAGS_GENERATION: bool
|
||||
ENABLE_SEARCH_QUERY_GENERATION: bool
|
||||
ENABLE_RETRIEVAL_QUERY_GENERATION: bool
|
||||
|
|
@ -94,6 +100,13 @@ async def update_task_config(
|
|||
form_data.TITLE_GENERATION_PROMPT_TEMPLATE
|
||||
)
|
||||
|
||||
request.app.state.config.ENABLE_FOLLOW_UP_GENERATION = (
|
||||
form_data.ENABLE_FOLLOW_UP_GENERATION
|
||||
)
|
||||
request.app.state.config.FOLLOW_UP_GENERATION_PROMPT_TEMPLATE = (
|
||||
form_data.FOLLOW_UP_GENERATION_PROMPT_TEMPLATE
|
||||
)
|
||||
|
||||
request.app.state.config.IMAGE_PROMPT_GENERATION_PROMPT_TEMPLATE = (
|
||||
form_data.IMAGE_PROMPT_GENERATION_PROMPT_TEMPLATE
|
||||
)
|
||||
|
|
@ -133,6 +146,8 @@ async def update_task_config(
|
|||
"AUTOCOMPLETE_GENERATION_INPUT_MAX_LENGTH": request.app.state.config.AUTOCOMPLETE_GENERATION_INPUT_MAX_LENGTH,
|
||||
"TAGS_GENERATION_PROMPT_TEMPLATE": request.app.state.config.TAGS_GENERATION_PROMPT_TEMPLATE,
|
||||
"ENABLE_TAGS_GENERATION": request.app.state.config.ENABLE_TAGS_GENERATION,
|
||||
"ENABLE_FOLLOW_UP_GENERATION": request.app.state.config.ENABLE_FOLLOW_UP_GENERATION,
|
||||
"FOLLOW_UP_GENERATION_PROMPT_TEMPLATE": request.app.state.config.FOLLOW_UP_GENERATION_PROMPT_TEMPLATE,
|
||||
"ENABLE_SEARCH_QUERY_GENERATION": request.app.state.config.ENABLE_SEARCH_QUERY_GENERATION,
|
||||
"ENABLE_RETRIEVAL_QUERY_GENERATION": request.app.state.config.ENABLE_RETRIEVAL_QUERY_GENERATION,
|
||||
"QUERY_GENERATION_PROMPT_TEMPLATE": request.app.state.config.QUERY_GENERATION_PROMPT_TEMPLATE,
|
||||
|
|
@ -231,6 +246,86 @@ async def generate_title(
|
|||
)
|
||||
|
||||
|
||||
@router.post("/follow_up/completions")
|
||||
async def generate_follow_ups(
|
||||
request: Request, form_data: dict, user=Depends(get_verified_user)
|
||||
):
|
||||
|
||||
if not request.app.state.config.ENABLE_FOLLOW_UP_GENERATION:
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_200_OK,
|
||||
content={"detail": "Follow-up generation is disabled"},
|
||||
)
|
||||
|
||||
if getattr(request.state, "direct", False) and hasattr(request.state, "model"):
|
||||
models = {
|
||||
request.state.model["id"]: request.state.model,
|
||||
}
|
||||
else:
|
||||
models = request.app.state.MODELS
|
||||
|
||||
model_id = form_data["model"]
|
||||
if model_id not in models:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Model not found",
|
||||
)
|
||||
|
||||
# Check if the user has a custom task model
|
||||
# If the user has a custom task model, use that model
|
||||
task_model_id = get_task_model_id(
|
||||
model_id,
|
||||
request.app.state.config.TASK_MODEL,
|
||||
request.app.state.config.TASK_MODEL_EXTERNAL,
|
||||
models,
|
||||
)
|
||||
|
||||
log.debug(
|
||||
f"generating chat title using model {task_model_id} for user {user.email} "
|
||||
)
|
||||
|
||||
if request.app.state.config.FOLLOW_UP_GENERATION_PROMPT_TEMPLATE != "":
|
||||
template = request.app.state.config.FOLLOW_UP_GENERATION_PROMPT_TEMPLATE
|
||||
else:
|
||||
template = DEFAULT_FOLLOW_UP_GENERATION_PROMPT_TEMPLATE
|
||||
|
||||
content = follow_up_generation_template(
|
||||
template,
|
||||
form_data["messages"],
|
||||
{
|
||||
"name": user.name,
|
||||
"location": user.info.get("location") if user.info else None,
|
||||
},
|
||||
)
|
||||
|
||||
payload = {
|
||||
"model": task_model_id,
|
||||
"messages": [{"role": "user", "content": content}],
|
||||
"stream": False,
|
||||
"metadata": {
|
||||
**(request.state.metadata if hasattr(request.state, "metadata") else {}),
|
||||
"task": str(TASKS.FOLLOW_UP_GENERATION),
|
||||
"task_body": form_data,
|
||||
"chat_id": form_data.get("chat_id", None),
|
||||
},
|
||||
}
|
||||
|
||||
# Process the payload through the pipeline
|
||||
try:
|
||||
payload = await process_pipeline_inlet_filter(request, payload, user, models)
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
try:
|
||||
return await generate_chat_completion(request, form_data=payload, user=user)
|
||||
except Exception as e:
|
||||
log.error("Exception occurred", exc_info=True)
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
content={"detail": "An internal error has occurred."},
|
||||
)
|
||||
|
||||
|
||||
@router.post("/tags/completions")
|
||||
async def generate_chat_tags(
|
||||
request: Request, form_data: dict, user=Depends(get_verified_user)
|
||||
|
|
|
|||
|
|
@ -165,22 +165,6 @@ async def update_default_user_permissions(
|
|||
return request.app.state.config.USER_PERMISSIONS
|
||||
|
||||
|
||||
############################
|
||||
# UpdateUserRole
|
||||
############################
|
||||
|
||||
|
||||
@router.post("/update/role", response_model=Optional[UserModel])
|
||||
async def update_user_role(form_data: UserRoleUpdateForm, user=Depends(get_admin_user)):
|
||||
if user.id != form_data.id and form_data.id != Users.get_first_user().id:
|
||||
return Users.update_user_role_by_id(form_data.id, form_data.role)
|
||||
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=ERROR_MESSAGES.ACTION_PROHIBITED,
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
# GetUserSettingsBySessionUser
|
||||
############################
|
||||
|
|
@ -333,11 +317,22 @@ async def update_user_by_id(
|
|||
# Prevent modification of the primary admin user by other admins
|
||||
try:
|
||||
first_user = Users.get_first_user()
|
||||
if first_user and user_id == first_user.id and session_user.id != user_id:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=ERROR_MESSAGES.ACTION_PROHIBITED,
|
||||
)
|
||||
if first_user:
|
||||
if user_id == first_user.id:
|
||||
if session_user.id != user_id:
|
||||
# If the user trying to update is the primary admin, and they are not the primary admin themselves
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=ERROR_MESSAGES.ACTION_PROHIBITED,
|
||||
)
|
||||
|
||||
if form_data.role != "admin":
|
||||
# If the primary admin is trying to change their own role, prevent it
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=ERROR_MESSAGES.ACTION_PROHIBITED,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
log.error(f"Error checking primary admin status: {e}")
|
||||
raise HTTPException(
|
||||
|
|
@ -365,6 +360,7 @@ async def update_user_by_id(
|
|||
updated_user = Users.update_user_by_id(
|
||||
user_id,
|
||||
{
|
||||
"role": form_data.role,
|
||||
"name": form_data.name,
|
||||
"email": form_data.email.lower(),
|
||||
"profile_image_url": form_data.profile_image_url,
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ class CodeForm(BaseModel):
|
|||
|
||||
|
||||
@router.post("/code/format")
|
||||
async def format_code(form_data: CodeForm, user=Depends(get_verified_user)):
|
||||
async def format_code(form_data: CodeForm, user=Depends(get_admin_user)):
|
||||
try:
|
||||
formatted_code = black.format_str(form_data.code, mode=black.Mode())
|
||||
return {"code": formatted_code}
|
||||
|
|
|
|||
|
|
@ -2,16 +2,87 @@
|
|||
import asyncio
|
||||
from typing import Dict
|
||||
from uuid import uuid4
|
||||
import json
|
||||
from redis.asyncio import Redis
|
||||
from fastapi import Request
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
# A dictionary to keep track of active tasks
|
||||
tasks: Dict[str, asyncio.Task] = {}
|
||||
chat_tasks = {}
|
||||
|
||||
|
||||
def cleanup_task(task_id: str, id=None):
|
||||
REDIS_TASKS_KEY = "open-webui:tasks"
|
||||
REDIS_CHAT_TASKS_KEY = "open-webui:tasks:chat"
|
||||
REDIS_PUBSUB_CHANNEL = "open-webui:tasks:commands"
|
||||
|
||||
|
||||
def is_redis(request: Request) -> bool:
|
||||
# Called everywhere a request is available to check Redis
|
||||
return hasattr(request.app.state, "redis") and (request.app.state.redis is not None)
|
||||
|
||||
|
||||
async def redis_task_command_listener(app):
|
||||
redis: Redis = app.state.redis
|
||||
pubsub = redis.pubsub()
|
||||
await pubsub.subscribe(REDIS_PUBSUB_CHANNEL)
|
||||
|
||||
async for message in pubsub.listen():
|
||||
if message["type"] != "message":
|
||||
continue
|
||||
try:
|
||||
command = json.loads(message["data"])
|
||||
if command.get("action") == "stop":
|
||||
task_id = command.get("task_id")
|
||||
local_task = tasks.get(task_id)
|
||||
if local_task:
|
||||
local_task.cancel()
|
||||
except Exception as e:
|
||||
print(f"Error handling distributed task command: {e}")
|
||||
|
||||
|
||||
### ------------------------------
|
||||
### REDIS-ENABLED HANDLERS
|
||||
### ------------------------------
|
||||
|
||||
|
||||
async def redis_save_task(redis: Redis, task_id: str, chat_id: Optional[str]):
|
||||
pipe = redis.pipeline()
|
||||
pipe.hset(REDIS_TASKS_KEY, task_id, chat_id or "")
|
||||
if chat_id:
|
||||
pipe.sadd(f"{REDIS_CHAT_TASKS_KEY}:{chat_id}", task_id)
|
||||
await pipe.execute()
|
||||
|
||||
|
||||
async def redis_cleanup_task(redis: Redis, task_id: str, chat_id: Optional[str]):
|
||||
pipe = redis.pipeline()
|
||||
pipe.hdel(REDIS_TASKS_KEY, task_id)
|
||||
if chat_id:
|
||||
pipe.srem(f"{REDIS_CHAT_TASKS_KEY}:{chat_id}", task_id)
|
||||
if (await pipe.scard(f"{REDIS_CHAT_TASKS_KEY}:{chat_id}").execute())[-1] == 0:
|
||||
pipe.delete(f"{REDIS_CHAT_TASKS_KEY}:{chat_id}") # Remove if empty set
|
||||
await pipe.execute()
|
||||
|
||||
|
||||
async def redis_list_tasks(redis: Redis) -> List[str]:
|
||||
return list(await redis.hkeys(REDIS_TASKS_KEY))
|
||||
|
||||
|
||||
async def redis_list_chat_tasks(redis: Redis, chat_id: str) -> List[str]:
|
||||
return list(await redis.smembers(f"{REDIS_CHAT_TASKS_KEY}:{chat_id}"))
|
||||
|
||||
|
||||
async def redis_send_command(redis: Redis, command: dict):
|
||||
await redis.publish(REDIS_PUBSUB_CHANNEL, json.dumps(command))
|
||||
|
||||
|
||||
async def cleanup_task(request, task_id: str, id=None):
|
||||
"""
|
||||
Remove a completed or canceled task from the global `tasks` dictionary.
|
||||
"""
|
||||
if is_redis(request):
|
||||
await redis_cleanup_task(request.app.state.redis, task_id, id)
|
||||
|
||||
tasks.pop(task_id, None) # Remove the task if it exists
|
||||
|
||||
# If an ID is provided, remove the task from the chat_tasks dictionary
|
||||
|
|
@ -21,7 +92,7 @@ def cleanup_task(task_id: str, id=None):
|
|||
chat_tasks.pop(id, None)
|
||||
|
||||
|
||||
def create_task(coroutine, id=None):
|
||||
async def create_task(request, coroutine, id=None):
|
||||
"""
|
||||
Create a new asyncio task and add it to the global task dictionary.
|
||||
"""
|
||||
|
|
@ -29,7 +100,9 @@ def create_task(coroutine, id=None):
|
|||
task = asyncio.create_task(coroutine) # Create the task
|
||||
|
||||
# Add a done callback for cleanup
|
||||
task.add_done_callback(lambda t: cleanup_task(task_id, id))
|
||||
task.add_done_callback(
|
||||
lambda t: asyncio.create_task(cleanup_task(request, task_id, id))
|
||||
)
|
||||
tasks[task_id] = task
|
||||
|
||||
# If an ID is provided, associate the task with that ID
|
||||
|
|
@ -38,34 +111,46 @@ def create_task(coroutine, id=None):
|
|||
else:
|
||||
chat_tasks[id] = [task_id]
|
||||
|
||||
if is_redis(request):
|
||||
await redis_save_task(request.app.state.redis, task_id, id)
|
||||
|
||||
return task_id, task
|
||||
|
||||
|
||||
def get_task(task_id: str):
|
||||
"""
|
||||
Retrieve a task by its task ID.
|
||||
"""
|
||||
return tasks.get(task_id)
|
||||
|
||||
|
||||
def list_tasks():
|
||||
async def list_tasks(request):
|
||||
"""
|
||||
List all currently active task IDs.
|
||||
"""
|
||||
if is_redis(request):
|
||||
return await redis_list_tasks(request.app.state.redis)
|
||||
return list(tasks.keys())
|
||||
|
||||
|
||||
def list_task_ids_by_chat_id(id):
|
||||
async def list_task_ids_by_chat_id(request, id):
|
||||
"""
|
||||
List all tasks associated with a specific ID.
|
||||
"""
|
||||
if is_redis(request):
|
||||
return await redis_list_chat_tasks(request.app.state.redis, id)
|
||||
return chat_tasks.get(id, [])
|
||||
|
||||
|
||||
async def stop_task(task_id: str):
|
||||
async def stop_task(request, task_id: str):
|
||||
"""
|
||||
Cancel a running task and remove it from the global task list.
|
||||
"""
|
||||
if is_redis(request):
|
||||
# PUBSUB: All instances check if they have this task, and stop if so.
|
||||
await redis_send_command(
|
||||
request.app.state.redis,
|
||||
{
|
||||
"action": "stop",
|
||||
"task_id": task_id,
|
||||
},
|
||||
)
|
||||
# Optionally check if task_id still in Redis a few moments later for feedback?
|
||||
return {"status": True, "message": f"Stop signal sent for {task_id}"}
|
||||
|
||||
task = tasks.get(task_id)
|
||||
if not task:
|
||||
raise ValueError(f"Task with ID {task_id} not found.")
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ from open_webui.env import (
|
|||
TRUSTED_SIGNATURE_KEY,
|
||||
STATIC_DIR,
|
||||
SRC_LOG_LEVELS,
|
||||
WEBUI_AUTH_TRUSTED_EMAIL_HEADER,
|
||||
)
|
||||
|
||||
from fastapi import BackgroundTasks, Depends, HTTPException, Request, Response, status
|
||||
|
|
@ -157,6 +158,7 @@ def get_http_authorization_cred(auth_header: Optional[str]):
|
|||
|
||||
def get_current_user(
|
||||
request: Request,
|
||||
response: Response,
|
||||
background_tasks: BackgroundTasks,
|
||||
auth_token: HTTPAuthorizationCredentials = Depends(bearer_security),
|
||||
):
|
||||
|
|
@ -225,6 +227,19 @@ def get_current_user(
|
|||
detail=ERROR_MESSAGES.INVALID_TOKEN,
|
||||
)
|
||||
else:
|
||||
if WEBUI_AUTH_TRUSTED_EMAIL_HEADER:
|
||||
trusted_email = request.headers.get(WEBUI_AUTH_TRUSTED_EMAIL_HEADER)
|
||||
if trusted_email and user.email != trusted_email:
|
||||
# Delete the token cookie
|
||||
response.delete_cookie("token")
|
||||
# Delete OAuth token if present
|
||||
if request.cookies.get("oauth_id_token"):
|
||||
response.delete_cookie("oauth_id_token")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="User mismatch. Please sign in again.",
|
||||
)
|
||||
|
||||
# Add user info to current span
|
||||
current_span = trace.get_current_span()
|
||||
if current_span:
|
||||
|
|
|
|||
|
|
@ -320,12 +320,7 @@ async def chat_completed(request: Request, form_data: dict, user: Any):
|
|||
extra_params = {
|
||||
"__event_emitter__": get_event_emitter(metadata),
|
||||
"__event_call__": get_event_call(metadata),
|
||||
"__user__": {
|
||||
"id": user.id,
|
||||
"email": user.email,
|
||||
"name": user.name,
|
||||
"role": user.role,
|
||||
},
|
||||
"__user__": user.model_dump() if isinstance(user, UserModel) else {},
|
||||
"__metadata__": metadata,
|
||||
"__request__": request,
|
||||
"__model__": model,
|
||||
|
|
@ -424,12 +419,7 @@ async def chat_action(request: Request, action_id: str, form_data: dict, user: A
|
|||
params[key] = value
|
||||
|
||||
if "__user__" in sig.parameters:
|
||||
__user__ = {
|
||||
"id": user.id,
|
||||
"email": user.email,
|
||||
"name": user.name,
|
||||
"role": user.role,
|
||||
}
|
||||
__user__ = (user.model_dump() if isinstance(user, UserModel) else {},)
|
||||
|
||||
try:
|
||||
if hasattr(function_module, "UserValves"):
|
||||
|
|
|
|||
90
backend/open_webui/utils/embeddings.py
Normal file
90
backend/open_webui/utils/embeddings.py
Normal file
|
|
@ -0,0 +1,90 @@
|
|||
import random
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from fastapi import Request
|
||||
from open_webui.models.users import UserModel
|
||||
from open_webui.models.models import Models
|
||||
from open_webui.utils.models import check_model_access
|
||||
from open_webui.env import SRC_LOG_LEVELS, GLOBAL_LOG_LEVEL, BYPASS_MODEL_ACCESS_CONTROL
|
||||
|
||||
from open_webui.routers.openai import embeddings as openai_embeddings
|
||||
from open_webui.routers.ollama import (
|
||||
embeddings as ollama_embeddings,
|
||||
GenerateEmbeddingsForm,
|
||||
)
|
||||
|
||||
|
||||
from open_webui.utils.payload import convert_embedding_payload_openai_to_ollama
|
||||
from open_webui.utils.response import convert_embedding_response_ollama_to_openai
|
||||
|
||||
logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL)
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MAIN"])
|
||||
|
||||
|
||||
async def generate_embeddings(
|
||||
request: Request,
|
||||
form_data: dict,
|
||||
user: UserModel,
|
||||
bypass_filter: bool = False,
|
||||
):
|
||||
"""
|
||||
Dispatch and handle embeddings generation based on the model type (OpenAI, Ollama).
|
||||
|
||||
Args:
|
||||
request (Request): The FastAPI request context.
|
||||
form_data (dict): The input data sent to the endpoint.
|
||||
user (UserModel): The authenticated user.
|
||||
bypass_filter (bool): If True, disables access filtering (default False).
|
||||
|
||||
Returns:
|
||||
dict: The embeddings response, following OpenAI API compatibility.
|
||||
"""
|
||||
if BYPASS_MODEL_ACCESS_CONTROL:
|
||||
bypass_filter = True
|
||||
|
||||
# Attach extra metadata from request.state if present
|
||||
if hasattr(request.state, "metadata"):
|
||||
if "metadata" not in form_data:
|
||||
form_data["metadata"] = request.state.metadata
|
||||
else:
|
||||
form_data["metadata"] = {
|
||||
**form_data["metadata"],
|
||||
**request.state.metadata,
|
||||
}
|
||||
|
||||
# If "direct" flag present, use only that model
|
||||
if getattr(request.state, "direct", False) and hasattr(request.state, "model"):
|
||||
models = {
|
||||
request.state.model["id"]: request.state.model,
|
||||
}
|
||||
else:
|
||||
models = request.app.state.MODELS
|
||||
|
||||
model_id = form_data.get("model")
|
||||
if model_id not in models:
|
||||
raise Exception("Model not found")
|
||||
model = models[model_id]
|
||||
|
||||
# Access filtering
|
||||
if not getattr(request.state, "direct", False):
|
||||
if not bypass_filter and user.role == "user":
|
||||
check_model_access(user, model)
|
||||
|
||||
# Ollama backend
|
||||
if model.get("owned_by") == "ollama":
|
||||
ollama_payload = convert_embedding_payload_openai_to_ollama(form_data)
|
||||
response = await ollama_embeddings(
|
||||
request=request,
|
||||
form_data=GenerateEmbeddingsForm(**ollama_payload),
|
||||
user=user,
|
||||
)
|
||||
return convert_embedding_response_ollama_to_openai(response)
|
||||
|
||||
# Default: OpenAI or compatible backend
|
||||
return await openai_embeddings(
|
||||
request=request,
|
||||
form_data=form_data,
|
||||
user=user,
|
||||
)
|
||||
|
|
@ -32,11 +32,17 @@ from open_webui.socket.main import (
|
|||
from open_webui.routers.tasks import (
|
||||
generate_queries,
|
||||
generate_title,
|
||||
generate_follow_ups,
|
||||
generate_image_prompt,
|
||||
generate_chat_tags,
|
||||
)
|
||||
from open_webui.routers.retrieval import process_web_search, SearchForm
|
||||
from open_webui.routers.images import image_generations, GenerateImageForm
|
||||
from open_webui.routers.images import (
|
||||
load_b64_image_data,
|
||||
image_generations,
|
||||
GenerateImageForm,
|
||||
upload_image,
|
||||
)
|
||||
from open_webui.routers.pipelines import (
|
||||
process_pipeline_inlet_filter,
|
||||
process_pipeline_outlet_filter,
|
||||
|
|
@ -692,13 +698,8 @@ def apply_params_to_form_data(form_data, model):
|
|||
params = deep_update(params, custom_params)
|
||||
|
||||
if model.get("ollama"):
|
||||
# Ollama specific parameters
|
||||
form_data["options"] = params
|
||||
|
||||
if "format" in params:
|
||||
form_data["format"] = params["format"]
|
||||
|
||||
if "keep_alive" in params:
|
||||
form_data["keep_alive"] = params["keep_alive"]
|
||||
else:
|
||||
if isinstance(params, dict):
|
||||
for key, value in params.items():
|
||||
|
|
@ -726,12 +727,7 @@ async def process_chat_payload(request, form_data, user, metadata, model):
|
|||
extra_params = {
|
||||
"__event_emitter__": event_emitter,
|
||||
"__event_call__": event_call,
|
||||
"__user__": {
|
||||
"id": user.id,
|
||||
"email": user.email,
|
||||
"name": user.name,
|
||||
"role": user.role,
|
||||
},
|
||||
"__user__": user.model_dump() if isinstance(user, UserModel) else {},
|
||||
"__metadata__": metadata,
|
||||
"__request__": request,
|
||||
"__model__": model,
|
||||
|
|
@ -1048,6 +1044,59 @@ async def process_chat_response(
|
|||
)
|
||||
|
||||
if tasks and messages:
|
||||
if (
|
||||
TASKS.FOLLOW_UP_GENERATION in tasks
|
||||
and tasks[TASKS.FOLLOW_UP_GENERATION]
|
||||
):
|
||||
res = await generate_follow_ups(
|
||||
request,
|
||||
{
|
||||
"model": message["model"],
|
||||
"messages": messages,
|
||||
"message_id": metadata["message_id"],
|
||||
"chat_id": metadata["chat_id"],
|
||||
},
|
||||
user,
|
||||
)
|
||||
|
||||
if res and isinstance(res, dict):
|
||||
if len(res.get("choices", [])) == 1:
|
||||
follow_ups_string = (
|
||||
res.get("choices", [])[0]
|
||||
.get("message", {})
|
||||
.get("content", "")
|
||||
)
|
||||
else:
|
||||
follow_ups_string = ""
|
||||
|
||||
follow_ups_string = follow_ups_string[
|
||||
follow_ups_string.find("{") : follow_ups_string.rfind("}")
|
||||
+ 1
|
||||
]
|
||||
|
||||
try:
|
||||
follow_ups = json.loads(follow_ups_string).get(
|
||||
"follow_ups", []
|
||||
)
|
||||
Chats.upsert_message_to_chat_by_id_and_message_id(
|
||||
metadata["chat_id"],
|
||||
metadata["message_id"],
|
||||
{
|
||||
"followUps": follow_ups,
|
||||
},
|
||||
)
|
||||
|
||||
await event_emitter(
|
||||
{
|
||||
"type": "chat:message:follow_ups",
|
||||
"data": {
|
||||
"follow_ups": follow_ups,
|
||||
},
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
if TASKS.TITLE_GENERATION in tasks:
|
||||
if tasks[TASKS.TITLE_GENERATION]:
|
||||
res = await generate_title(
|
||||
|
|
@ -1273,12 +1322,7 @@ async def process_chat_response(
|
|||
extra_params = {
|
||||
"__event_emitter__": event_emitter,
|
||||
"__event_call__": event_caller,
|
||||
"__user__": {
|
||||
"id": user.id,
|
||||
"email": user.email,
|
||||
"name": user.name,
|
||||
"role": user.role,
|
||||
},
|
||||
"__user__": user.model_dump() if isinstance(user, UserModel) else {},
|
||||
"__metadata__": metadata,
|
||||
"__request__": request,
|
||||
"__model__": model,
|
||||
|
|
@ -1822,9 +1866,11 @@ async def process_chat_response(
|
|||
|
||||
value = delta.get("content")
|
||||
|
||||
reasoning_content = delta.get(
|
||||
"reasoning_content"
|
||||
) or delta.get("reasoning")
|
||||
reasoning_content = (
|
||||
delta.get("reasoning_content")
|
||||
or delta.get("reasoning")
|
||||
or delta.get("thinking")
|
||||
)
|
||||
if reasoning_content:
|
||||
if (
|
||||
not content_blocks
|
||||
|
|
@ -2215,28 +2261,21 @@ async def process_chat_response(
|
|||
stdoutLines = stdout.split("\n")
|
||||
for idx, line in enumerate(stdoutLines):
|
||||
if "data:image/png;base64" in line:
|
||||
id = str(uuid4())
|
||||
|
||||
# ensure the path exists
|
||||
os.makedirs(
|
||||
os.path.join(CACHE_DIR, "images"),
|
||||
exist_ok=True,
|
||||
image_url = ""
|
||||
# Extract base64 image data from the line
|
||||
image_data, content_type = (
|
||||
load_b64_image_data(line)
|
||||
)
|
||||
|
||||
image_path = os.path.join(
|
||||
CACHE_DIR,
|
||||
f"images/{id}.png",
|
||||
)
|
||||
|
||||
with open(image_path, "wb") as f:
|
||||
f.write(
|
||||
base64.b64decode(
|
||||
line.split(",")[1]
|
||||
)
|
||||
if image_data is not None:
|
||||
image_url = upload_image(
|
||||
request,
|
||||
image_data,
|
||||
content_type,
|
||||
metadata,
|
||||
user,
|
||||
)
|
||||
|
||||
stdoutLines[idx] = (
|
||||
f""
|
||||
f""
|
||||
)
|
||||
|
||||
output["stdout"] = "\n".join(stdoutLines)
|
||||
|
|
@ -2247,30 +2286,22 @@ async def process_chat_response(
|
|||
resultLines = result.split("\n")
|
||||
for idx, line in enumerate(resultLines):
|
||||
if "data:image/png;base64" in line:
|
||||
id = str(uuid4())
|
||||
|
||||
# ensure the path exists
|
||||
os.makedirs(
|
||||
os.path.join(CACHE_DIR, "images"),
|
||||
exist_ok=True,
|
||||
image_url = ""
|
||||
# Extract base64 image data from the line
|
||||
image_data, content_type = (
|
||||
load_b64_image_data(line)
|
||||
)
|
||||
|
||||
image_path = os.path.join(
|
||||
CACHE_DIR,
|
||||
f"images/{id}.png",
|
||||
)
|
||||
|
||||
with open(image_path, "wb") as f:
|
||||
f.write(
|
||||
base64.b64decode(
|
||||
line.split(",")[1]
|
||||
)
|
||||
if image_data is not None:
|
||||
image_url = upload_image(
|
||||
request,
|
||||
image_data,
|
||||
content_type,
|
||||
metadata,
|
||||
user,
|
||||
)
|
||||
|
||||
resultLines[idx] = (
|
||||
f""
|
||||
f""
|
||||
)
|
||||
|
||||
output["result"] = "\n".join(resultLines)
|
||||
except Exception as e:
|
||||
output = str(e)
|
||||
|
|
@ -2379,8 +2410,8 @@ async def process_chat_response(
|
|||
await response.background()
|
||||
|
||||
# background_tasks.add_task(post_response_handler, response, events)
|
||||
task_id, _ = create_task(
|
||||
post_response_handler(response, events), id=metadata["chat_id"]
|
||||
task_id, _ = await create_task(
|
||||
request, post_response_handler(response, events), id=metadata["chat_id"]
|
||||
)
|
||||
return {"status": True, "task_id": task_id}
|
||||
|
||||
|
|
|
|||
|
|
@ -208,6 +208,7 @@ def openai_chat_message_template(model: str):
|
|||
def openai_chat_chunk_message_template(
|
||||
model: str,
|
||||
content: Optional[str] = None,
|
||||
reasoning_content: Optional[str] = None,
|
||||
tool_calls: Optional[list[dict]] = None,
|
||||
usage: Optional[dict] = None,
|
||||
) -> dict:
|
||||
|
|
@ -220,6 +221,9 @@ def openai_chat_chunk_message_template(
|
|||
if content:
|
||||
template["choices"][0]["delta"]["content"] = content
|
||||
|
||||
if reasoning_content:
|
||||
template["choices"][0]["delta"]["reasoning_content"] = reasoning_content
|
||||
|
||||
if tool_calls:
|
||||
template["choices"][0]["delta"]["tool_calls"] = tool_calls
|
||||
|
||||
|
|
@ -234,6 +238,7 @@ def openai_chat_chunk_message_template(
|
|||
def openai_chat_completion_message_template(
|
||||
model: str,
|
||||
message: Optional[str] = None,
|
||||
reasoning_content: Optional[str] = None,
|
||||
tool_calls: Optional[list[dict]] = None,
|
||||
usage: Optional[dict] = None,
|
||||
) -> dict:
|
||||
|
|
@ -241,8 +246,9 @@ def openai_chat_completion_message_template(
|
|||
template["object"] = "chat.completion"
|
||||
if message is not None:
|
||||
template["choices"][0]["message"] = {
|
||||
"content": message,
|
||||
"role": "assistant",
|
||||
"content": message,
|
||||
**({"reasoning_content": reasoning_content} if reasoning_content else {}),
|
||||
**({"tool_calls": tool_calls} if tool_calls else {}),
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -538,7 +538,7 @@ class OAuthManager:
|
|||
# Redirect back to the frontend with the JWT token
|
||||
|
||||
redirect_base_url = request.app.state.config.WEBUI_URL or request.base_url
|
||||
if redirect_base_url.endswith("/"):
|
||||
if isinstance(redirect_base_url, str) and redirect_base_url.endswith("/"):
|
||||
redirect_base_url = redirect_base_url[:-1]
|
||||
redirect_url = f"{redirect_base_url}/auth#token={jwt_token}"
|
||||
|
||||
|
|
|
|||
|
|
@ -175,14 +175,26 @@ def apply_model_params_to_body_ollama(params: dict, form_data: dict) -> dict:
|
|||
"num_thread": int,
|
||||
}
|
||||
|
||||
# Extract keep_alive from options if it exists
|
||||
if "options" in form_data and "keep_alive" in form_data["options"]:
|
||||
form_data["keep_alive"] = form_data["options"]["keep_alive"]
|
||||
del form_data["options"]["keep_alive"]
|
||||
def parse_json(value: str) -> dict:
|
||||
"""
|
||||
Parses a JSON string into a dictionary, handling potential JSONDecodeError.
|
||||
"""
|
||||
try:
|
||||
return json.loads(value)
|
||||
except Exception as e:
|
||||
return value
|
||||
|
||||
if "options" in form_data and "format" in form_data["options"]:
|
||||
form_data["format"] = form_data["options"]["format"]
|
||||
del form_data["options"]["format"]
|
||||
ollama_root_params = {
|
||||
"format": lambda x: parse_json(x),
|
||||
"keep_alive": lambda x: parse_json(x),
|
||||
"think": bool,
|
||||
}
|
||||
|
||||
for key, value in ollama_root_params.items():
|
||||
if (param := params.get(key, None)) is not None:
|
||||
# Copy the parameter to new name then delete it, to prevent Ollama warning of invalid option provided
|
||||
form_data[key] = value(param)
|
||||
del params[key]
|
||||
|
||||
return apply_model_params_to_body(params, form_data, mappings)
|
||||
|
||||
|
|
@ -279,36 +291,48 @@ def convert_payload_openai_to_ollama(openai_payload: dict) -> dict:
|
|||
openai_payload.get("messages")
|
||||
)
|
||||
ollama_payload["stream"] = openai_payload.get("stream", False)
|
||||
|
||||
if "tools" in openai_payload:
|
||||
ollama_payload["tools"] = openai_payload["tools"]
|
||||
|
||||
if "format" in openai_payload:
|
||||
ollama_payload["format"] = openai_payload["format"]
|
||||
|
||||
# If there are advanced parameters in the payload, format them in Ollama's options field
|
||||
if openai_payload.get("options"):
|
||||
ollama_payload["options"] = openai_payload["options"]
|
||||
ollama_options = openai_payload["options"]
|
||||
|
||||
def parse_json(value: str) -> dict:
|
||||
"""
|
||||
Parses a JSON string into a dictionary, handling potential JSONDecodeError.
|
||||
"""
|
||||
try:
|
||||
return json.loads(value)
|
||||
except Exception as e:
|
||||
return value
|
||||
|
||||
ollama_root_params = {
|
||||
"format": lambda x: parse_json(x),
|
||||
"keep_alive": lambda x: parse_json(x),
|
||||
"think": bool,
|
||||
}
|
||||
|
||||
# Ollama's options field can contain parameters that should be at the root level.
|
||||
for key, value in ollama_root_params.items():
|
||||
if (param := ollama_options.get(key, None)) is not None:
|
||||
# Copy the parameter to new name then delete it, to prevent Ollama warning of invalid option provided
|
||||
ollama_payload[key] = value(param)
|
||||
del ollama_options[key]
|
||||
|
||||
# Re-Mapping OpenAI's `max_tokens` -> Ollama's `num_predict`
|
||||
if "max_tokens" in ollama_options:
|
||||
ollama_options["num_predict"] = ollama_options["max_tokens"]
|
||||
del ollama_options[
|
||||
"max_tokens"
|
||||
] # To prevent Ollama warning of invalid option provided
|
||||
del ollama_options["max_tokens"]
|
||||
|
||||
# Ollama lacks a "system" prompt option. It has to be provided as a direct parameter, so we copy it down.
|
||||
# Comment: Not sure why this is needed, but we'll keep it for compatibility.
|
||||
if "system" in ollama_options:
|
||||
ollama_payload["system"] = ollama_options["system"]
|
||||
del ollama_options[
|
||||
"system"
|
||||
] # To prevent Ollama warning of invalid option provided
|
||||
del ollama_options["system"]
|
||||
|
||||
# Extract keep_alive from options if it exists
|
||||
if "keep_alive" in ollama_options:
|
||||
ollama_payload["keep_alive"] = ollama_options["keep_alive"]
|
||||
del ollama_options["keep_alive"]
|
||||
ollama_payload["options"] = ollama_options
|
||||
|
||||
# If there is the "stop" parameter in the openai_payload, remap it to the ollama_payload.options
|
||||
if "stop" in openai_payload:
|
||||
|
|
@ -329,3 +353,32 @@ def convert_payload_openai_to_ollama(openai_payload: dict) -> dict:
|
|||
ollama_payload["format"] = format
|
||||
|
||||
return ollama_payload
|
||||
|
||||
|
||||
def convert_embedding_payload_openai_to_ollama(openai_payload: dict) -> dict:
|
||||
"""
|
||||
Convert an embeddings request payload from OpenAI format to Ollama format.
|
||||
|
||||
Args:
|
||||
openai_payload (dict): The original payload designed for OpenAI API usage.
|
||||
|
||||
Returns:
|
||||
dict: A payload compatible with the Ollama API embeddings endpoint.
|
||||
"""
|
||||
ollama_payload = {"model": openai_payload.get("model")}
|
||||
input_value = openai_payload.get("input")
|
||||
|
||||
# Ollama expects 'input' as a list, and 'prompt' as a single string.
|
||||
if isinstance(input_value, list):
|
||||
ollama_payload["input"] = input_value
|
||||
ollama_payload["prompt"] = "\n".join(str(x) for x in input_value)
|
||||
else:
|
||||
ollama_payload["input"] = [input_value]
|
||||
ollama_payload["prompt"] = str(input_value)
|
||||
|
||||
# Optionally forward other fields if present
|
||||
for optional_key in ("options", "truncate", "keep_alive"):
|
||||
if optional_key in openai_payload:
|
||||
ollama_payload[optional_key] = openai_payload[optional_key]
|
||||
|
||||
return ollama_payload
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
import socketio
|
||||
import redis
|
||||
from redis import asyncio as aioredis
|
||||
from urllib.parse import urlparse
|
||||
from typing import Optional
|
||||
|
||||
|
||||
def parse_redis_service_url(redis_url):
|
||||
|
|
@ -18,23 +17,46 @@ def parse_redis_service_url(redis_url):
|
|||
}
|
||||
|
||||
|
||||
def get_redis_connection(redis_url, redis_sentinels, decode_responses=True):
|
||||
if redis_sentinels:
|
||||
redis_config = parse_redis_service_url(redis_url)
|
||||
sentinel = redis.sentinel.Sentinel(
|
||||
redis_sentinels,
|
||||
port=redis_config["port"],
|
||||
db=redis_config["db"],
|
||||
username=redis_config["username"],
|
||||
password=redis_config["password"],
|
||||
decode_responses=decode_responses,
|
||||
)
|
||||
def get_redis_connection(
|
||||
redis_url, redis_sentinels, async_mode=False, decode_responses=True
|
||||
):
|
||||
if async_mode:
|
||||
import redis.asyncio as redis
|
||||
|
||||
# Get a master connection from Sentinel
|
||||
return sentinel.master_for(redis_config["service"])
|
||||
# If using sentinel in async mode
|
||||
if redis_sentinels:
|
||||
redis_config = parse_redis_service_url(redis_url)
|
||||
sentinel = redis.sentinel.Sentinel(
|
||||
redis_sentinels,
|
||||
port=redis_config["port"],
|
||||
db=redis_config["db"],
|
||||
username=redis_config["username"],
|
||||
password=redis_config["password"],
|
||||
decode_responses=decode_responses,
|
||||
)
|
||||
return sentinel.master_for(redis_config["service"])
|
||||
elif redis_url:
|
||||
return redis.from_url(redis_url, decode_responses=decode_responses)
|
||||
else:
|
||||
return None
|
||||
else:
|
||||
# Standard Redis connection
|
||||
return redis.Redis.from_url(redis_url, decode_responses=decode_responses)
|
||||
import redis
|
||||
|
||||
if redis_sentinels:
|
||||
redis_config = parse_redis_service_url(redis_url)
|
||||
sentinel = redis.sentinel.Sentinel(
|
||||
redis_sentinels,
|
||||
port=redis_config["port"],
|
||||
db=redis_config["db"],
|
||||
username=redis_config["username"],
|
||||
password=redis_config["password"],
|
||||
decode_responses=decode_responses,
|
||||
)
|
||||
return sentinel.master_for(redis_config["service"])
|
||||
elif redis_url:
|
||||
return redis.Redis.from_url(redis_url, decode_responses=decode_responses)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def get_sentinels_from_env(sentinel_hosts_env, sentinel_port_env):
|
||||
|
|
|
|||
|
|
@ -83,6 +83,7 @@ def convert_ollama_usage_to_openai(data: dict) -> dict:
|
|||
def convert_response_ollama_to_openai(ollama_response: dict) -> dict:
|
||||
model = ollama_response.get("model", "ollama")
|
||||
message_content = ollama_response.get("message", {}).get("content", "")
|
||||
reasoning_content = ollama_response.get("message", {}).get("thinking", None)
|
||||
tool_calls = ollama_response.get("message", {}).get("tool_calls", None)
|
||||
openai_tool_calls = None
|
||||
|
||||
|
|
@ -94,7 +95,7 @@ def convert_response_ollama_to_openai(ollama_response: dict) -> dict:
|
|||
usage = convert_ollama_usage_to_openai(data)
|
||||
|
||||
response = openai_chat_completion_message_template(
|
||||
model, message_content, openai_tool_calls, usage
|
||||
model, message_content, reasoning_content, openai_tool_calls, usage
|
||||
)
|
||||
return response
|
||||
|
||||
|
|
@ -105,6 +106,7 @@ async def convert_streaming_response_ollama_to_openai(ollama_streaming_response)
|
|||
|
||||
model = data.get("model", "ollama")
|
||||
message_content = data.get("message", {}).get("content", None)
|
||||
reasoning_content = data.get("message", {}).get("thinking", None)
|
||||
tool_calls = data.get("message", {}).get("tool_calls", None)
|
||||
openai_tool_calls = None
|
||||
|
||||
|
|
@ -118,10 +120,71 @@ async def convert_streaming_response_ollama_to_openai(ollama_streaming_response)
|
|||
usage = convert_ollama_usage_to_openai(data)
|
||||
|
||||
data = openai_chat_chunk_message_template(
|
||||
model, message_content, openai_tool_calls, usage
|
||||
model, message_content, reasoning_content, openai_tool_calls, usage
|
||||
)
|
||||
|
||||
line = f"data: {json.dumps(data)}\n\n"
|
||||
yield line
|
||||
|
||||
yield "data: [DONE]\n\n"
|
||||
|
||||
|
||||
def convert_embedding_response_ollama_to_openai(response) -> dict:
|
||||
"""
|
||||
Convert the response from Ollama embeddings endpoint to the OpenAI-compatible format.
|
||||
|
||||
Args:
|
||||
response (dict): The response from the Ollama API,
|
||||
e.g. {"embedding": [...], "model": "..."}
|
||||
or {"embeddings": [{"embedding": [...], "index": 0}, ...], "model": "..."}
|
||||
|
||||
Returns:
|
||||
dict: Response adapted to OpenAI's embeddings API format.
|
||||
e.g. {
|
||||
"object": "list",
|
||||
"data": [
|
||||
{"object": "embedding", "embedding": [...], "index": 0},
|
||||
...
|
||||
],
|
||||
"model": "...",
|
||||
}
|
||||
"""
|
||||
# Ollama batch-style output
|
||||
if isinstance(response, dict) and "embeddings" in response:
|
||||
openai_data = []
|
||||
for i, emb in enumerate(response["embeddings"]):
|
||||
openai_data.append(
|
||||
{
|
||||
"object": "embedding",
|
||||
"embedding": emb.get("embedding"),
|
||||
"index": emb.get("index", i),
|
||||
}
|
||||
)
|
||||
return {
|
||||
"object": "list",
|
||||
"data": openai_data,
|
||||
"model": response.get("model"),
|
||||
}
|
||||
# Ollama single output
|
||||
elif isinstance(response, dict) and "embedding" in response:
|
||||
return {
|
||||
"object": "list",
|
||||
"data": [
|
||||
{
|
||||
"object": "embedding",
|
||||
"embedding": response["embedding"],
|
||||
"index": 0,
|
||||
}
|
||||
],
|
||||
"model": response.get("model"),
|
||||
}
|
||||
# Already OpenAI-compatible?
|
||||
elif (
|
||||
isinstance(response, dict)
|
||||
and "data" in response
|
||||
and isinstance(response["data"], list)
|
||||
):
|
||||
return response
|
||||
|
||||
# Fallback: return as is if unrecognized
|
||||
return response
|
||||
|
|
|
|||
|
|
@ -207,6 +207,24 @@ def title_generation_template(
|
|||
return template
|
||||
|
||||
|
||||
def follow_up_generation_template(
|
||||
template: str, messages: list[dict], user: Optional[dict] = None
|
||||
) -> str:
|
||||
prompt = get_last_user_message(messages)
|
||||
template = replace_prompt_variable(template, prompt)
|
||||
template = replace_messages_variable(template, messages)
|
||||
|
||||
template = prompt_template(
|
||||
template,
|
||||
**(
|
||||
{"user_name": user.get("name"), "user_location": user.get("location")}
|
||||
if user
|
||||
else {}
|
||||
),
|
||||
)
|
||||
return template
|
||||
|
||||
|
||||
def tags_generation_template(
|
||||
template: str, messages: list[dict], user: Optional[dict] = None
|
||||
) -> str:
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
fastapi==0.115.7
|
||||
uvicorn[standard]==0.34.0
|
||||
uvicorn[standard]==0.34.2
|
||||
pydantic==2.10.6
|
||||
python-multipart==0.0.20
|
||||
|
||||
|
|
@ -76,13 +76,13 @@ pandas==2.2.3
|
|||
openpyxl==3.1.5
|
||||
pyxlsb==1.0.10
|
||||
xlrd==2.0.1
|
||||
validators==0.34.0
|
||||
validators==0.35.0
|
||||
psutil
|
||||
sentencepiece
|
||||
soundfile==0.13.1
|
||||
azure-ai-documentintelligence==1.0.0
|
||||
azure-ai-documentintelligence==1.0.2
|
||||
|
||||
pillow==11.1.0
|
||||
pillow==11.2.1
|
||||
opencv-python-headless==4.11.0.86
|
||||
rapidocr-onnxruntime==1.4.4
|
||||
rank-bm25==0.2.2
|
||||
|
|
|
|||
|
|
@ -14,7 +14,11 @@ if [[ "${WEB_LOADER_ENGINE,,}" == "playwright" ]]; then
|
|||
python -c "import nltk; nltk.download('punkt_tab')"
|
||||
fi
|
||||
|
||||
KEY_FILE=.webui_secret_key
|
||||
if [ -n "${WEBUI_SECRET_KEY_FILE}" ]; then
|
||||
KEY_FILE="${WEBUI_SECRET_KEY_FILE}"
|
||||
else
|
||||
KEY_FILE=".webui_secret_key"
|
||||
fi
|
||||
|
||||
PORT="${PORT:-8080}"
|
||||
HOST="${HOST:-0.0.0.0}"
|
||||
|
|
|
|||
|
|
@ -18,6 +18,10 @@ IF /I "%WEB_LOADER_ENGINE%" == "playwright" (
|
|||
)
|
||||
|
||||
SET "KEY_FILE=.webui_secret_key"
|
||||
IF NOT "%WEBUI_SECRET_KEY_FILE%" == "" (
|
||||
SET "KEY_FILE=%WEBUI_SECRET_KEY_FILE%"
|
||||
)
|
||||
|
||||
IF "%PORT%"=="" SET PORT=8080
|
||||
IF "%HOST%"=="" SET HOST=0.0.0.0
|
||||
SET "WEBUI_SECRET_KEY=%WEBUI_SECRET_KEY%"
|
||||
|
|
|
|||
85
package-lock.json
generated
85
package-lock.json
generated
|
|
@ -1,12 +1,12 @@
|
|||
{
|
||||
"name": "open-webui",
|
||||
"version": "0.6.13",
|
||||
"version": "0.6.14",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "open-webui",
|
||||
"version": "0.6.13",
|
||||
"version": "0.6.14",
|
||||
"dependencies": {
|
||||
"@azure/msal-browser": "^4.5.0",
|
||||
"@codemirror/lang-javascript": "^6.2.2",
|
||||
|
|
@ -31,7 +31,7 @@
|
|||
"@tiptap/starter-kit": "^2.10.0",
|
||||
"@xyflow/svelte": "^0.1.19",
|
||||
"async": "^3.2.5",
|
||||
"bits-ui": "^0.19.7",
|
||||
"bits-ui": "^0.21.15",
|
||||
"codemirror": "^6.0.1",
|
||||
"codemirror-lang-elixir": "^4.0.0",
|
||||
"codemirror-lang-hcl": "^0.1.0",
|
||||
|
|
@ -51,7 +51,7 @@
|
|||
"idb": "^7.1.1",
|
||||
"js-sha256": "^0.10.1",
|
||||
"jspdf": "^3.0.0",
|
||||
"katex": "^0.16.21",
|
||||
"katex": "^0.16.22",
|
||||
"kokoro-js": "^1.1.1",
|
||||
"marked": "^9.1.0",
|
||||
"mermaid": "^11.6.0",
|
||||
|
|
@ -1201,26 +1201,29 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@floating-ui/core": {
|
||||
"version": "1.6.0",
|
||||
"resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.6.0.tgz",
|
||||
"integrity": "sha512-PcF++MykgmTj3CIyOQbKA/hDzOAiqI3mhuoN44WRCopIs1sgoDoU4oty4Jtqaj/y3oDU6fnVSm4QG0a3t5i0+g==",
|
||||
"version": "1.7.1",
|
||||
"resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.7.1.tgz",
|
||||
"integrity": "sha512-azI0DrjMMfIug/ExbBaeDVJXcY0a7EPvPjb2xAJPa4HeimBX+Z18HK8QQR3jb6356SnDDdxx+hinMLcJEDdOjw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@floating-ui/utils": "^0.2.1"
|
||||
"@floating-ui/utils": "^0.2.9"
|
||||
}
|
||||
},
|
||||
"node_modules/@floating-ui/dom": {
|
||||
"version": "1.6.3",
|
||||
"resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.3.tgz",
|
||||
"integrity": "sha512-RnDthu3mzPlQ31Ss/BTwQ1zjzIhr3lk1gZB1OC56h/1vEtaXkESrOqL5fQVMfXpwGtRwX+YsZBdyHtJMQnkArw==",
|
||||
"version": "1.7.1",
|
||||
"resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.7.1.tgz",
|
||||
"integrity": "sha512-cwsmW/zyw5ltYTUeeYJ60CnQuPqmGwuGVhG9w0PRaRKkAyi38BT5CKrpIbb+jtahSwUl04cWzSx9ZOIxeS6RsQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@floating-ui/core": "^1.0.0",
|
||||
"@floating-ui/utils": "^0.2.0"
|
||||
"@floating-ui/core": "^1.7.1",
|
||||
"@floating-ui/utils": "^0.2.9"
|
||||
}
|
||||
},
|
||||
"node_modules/@floating-ui/utils": {
|
||||
"version": "0.2.1",
|
||||
"resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.1.tgz",
|
||||
"integrity": "sha512-9TANp6GPoMtYzQdt54kfAyMmz1+osLlXdg2ENroU7zzrtflTLrrC/lgrIfaSe+Wu0b89GKccT7vxXA0MoAIO+Q=="
|
||||
"version": "0.2.9",
|
||||
"resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.9.tgz",
|
||||
"integrity": "sha512-MDWhGtE+eHw5JW7lq4qhc5yRLS11ERl1c7Z6Xd0a58DozHES6EnNNwUWbMiG4J9Cgj053Bhk8zvlhFYKVhULwg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@gulpjs/to-absolute-glob": {
|
||||
"version": "4.0.0",
|
||||
|
|
@ -1750,9 +1753,10 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@internationalized/date": {
|
||||
"version": "3.5.2",
|
||||
"resolved": "https://registry.npmjs.org/@internationalized/date/-/date-3.5.2.tgz",
|
||||
"integrity": "sha512-vo1yOMUt2hzp63IutEaTUxROdvQg1qlMRsbCvbay2AK2Gai7wIgCyK5weEX3nHkiLgo4qCXHijFNC/ILhlRpOQ==",
|
||||
"version": "3.8.2",
|
||||
"resolved": "https://registry.npmjs.org/@internationalized/date/-/date-3.8.2.tgz",
|
||||
"integrity": "sha512-/wENk7CbvLbkUvX1tu0mwq49CVkkWpkXubGel6birjRPyo6uQ4nQpnq5xZu823zRCwwn82zgHrvgF1vZyvmVgA==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@swc/helpers": "^0.5.0"
|
||||
}
|
||||
|
|
@ -2032,9 +2036,10 @@
|
|||
"integrity": "sha512-CZWV/q6TTe8ta61cZXjfnnHsfWIdFhms03M9T7Cnd5y2mdpylJM0rF1qRq+wsQVRMLz1OYPVEBU9ph2Bx8cxrg=="
|
||||
},
|
||||
"node_modules/@melt-ui/svelte": {
|
||||
"version": "0.76.0",
|
||||
"resolved": "https://registry.npmjs.org/@melt-ui/svelte/-/svelte-0.76.0.tgz",
|
||||
"integrity": "sha512-X1ktxKujjLjOBt8LBvfckHGDMrkHWceRt1jdsUTf0EH76ikNPP1ofSoiV0IhlduDoCBV+2YchJ8kXCDfDXfC9Q==",
|
||||
"version": "0.76.2",
|
||||
"resolved": "https://registry.npmjs.org/@melt-ui/svelte/-/svelte-0.76.2.tgz",
|
||||
"integrity": "sha512-7SbOa11tXUS95T3fReL+dwDs5FyJtCEqrqG3inRziDws346SYLsxOQ6HmX+4BkIsQh1R8U3XNa+EMmdMt38lMA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@floating-ui/core": "^1.3.1",
|
||||
"@floating-ui/dom": "^1.4.5",
|
||||
|
|
@ -2610,11 +2615,12 @@
|
|||
}
|
||||
},
|
||||
"node_modules/@swc/helpers": {
|
||||
"version": "0.5.7",
|
||||
"resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.7.tgz",
|
||||
"integrity": "sha512-BVvNZhx362+l2tSwSuyEUV4h7+jk9raNdoTSdLfwTshXJSaGmYKluGRJznziCI3KX02Z19DdsQrdfrpXAU3Hfg==",
|
||||
"version": "0.5.17",
|
||||
"resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.17.tgz",
|
||||
"integrity": "sha512-5IKx/Y13RsYd+sauPb2x+U/xZikHjolzfuDgTAl/Tdf3Q8rslRvC19NKDLgAJQ6wsqADk10ntlv08nPFw/gO/A==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"tslib": "^2.4.0"
|
||||
"tslib": "^2.8.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@tailwindcss/container-queries": {
|
||||
|
|
@ -4381,16 +4387,20 @@
|
|||
}
|
||||
},
|
||||
"node_modules/bits-ui": {
|
||||
"version": "0.19.7",
|
||||
"resolved": "https://registry.npmjs.org/bits-ui/-/bits-ui-0.19.7.tgz",
|
||||
"integrity": "sha512-GHUpKvN7QyazhnZNkUy0lxg6W1M6KJHWSZ4a/UGCjPE6nQgk6vKbGysY67PkDtQMknZTZAzVoMj1Eic4IKeCRQ==",
|
||||
"version": "0.21.15",
|
||||
"resolved": "https://registry.npmjs.org/bits-ui/-/bits-ui-0.21.15.tgz",
|
||||
"integrity": "sha512-+m5WSpJnFdCcNdXSTIVC1WYBozipO03qRh03GFWgrdxoHiolCfwW71EYG4LPCWYPG6KcTZV0Cj6iHSiZ7cdKdg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@internationalized/date": "^3.5.1",
|
||||
"@melt-ui/svelte": "0.76.0",
|
||||
"@melt-ui/svelte": "0.76.2",
|
||||
"nanoid": "^5.0.5"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/huntabyte"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"svelte": "^4.0.0"
|
||||
"svelte": "^4.0.0 || ^5.0.0-next.118"
|
||||
}
|
||||
},
|
||||
"node_modules/bl": {
|
||||
|
|
@ -7930,9 +7940,9 @@
|
|||
}
|
||||
},
|
||||
"node_modules/katex": {
|
||||
"version": "0.16.21",
|
||||
"resolved": "https://registry.npmjs.org/katex/-/katex-0.16.21.tgz",
|
||||
"integrity": "sha512-XvqR7FgOHtWupfMiigNzmh+MgUVmDGU2kXZm899ZkPfcuoPuFxyHmXsgATDpFZDAXCI8tvinaVcDo8PIIJSo4A==",
|
||||
"version": "0.16.22",
|
||||
"resolved": "https://registry.npmjs.org/katex/-/katex-0.16.22.tgz",
|
||||
"integrity": "sha512-XCHRdUw4lf3SKBaJe4EvgqIuWwkPSo9XoeO8GjQW94Bp7TWv9hNhzZjZ+OH9yf1UmLygb7DIT5GSFQiyt16zYg==",
|
||||
"funding": [
|
||||
"https://opencollective.com/katex",
|
||||
"https://github.com/sponsors/katex"
|
||||
|
|
@ -11842,9 +11852,10 @@
|
|||
}
|
||||
},
|
||||
"node_modules/tslib": {
|
||||
"version": "2.6.2",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz",
|
||||
"integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q=="
|
||||
"version": "2.8.1",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
|
||||
"integrity": "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w==",
|
||||
"license": "0BSD"
|
||||
},
|
||||
"node_modules/tunnel-agent": {
|
||||
"version": "0.6.0",
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "open-webui",
|
||||
"version": "0.6.13",
|
||||
"version": "0.6.14",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "npm run pyodide:fetch && vite dev --host",
|
||||
|
|
@ -75,7 +75,7 @@
|
|||
"@tiptap/starter-kit": "^2.10.0",
|
||||
"@xyflow/svelte": "^0.1.19",
|
||||
"async": "^3.2.5",
|
||||
"bits-ui": "^0.19.7",
|
||||
"bits-ui": "^0.21.15",
|
||||
"codemirror": "^6.0.1",
|
||||
"codemirror-lang-elixir": "^4.0.0",
|
||||
"codemirror-lang-hcl": "^0.1.0",
|
||||
|
|
@ -95,7 +95,7 @@
|
|||
"idb": "^7.1.1",
|
||||
"js-sha256": "^0.10.1",
|
||||
"jspdf": "^3.0.0",
|
||||
"katex": "^0.16.21",
|
||||
"katex": "^0.16.22",
|
||||
"kokoro-js": "^1.1.1",
|
||||
"marked": "^9.1.0",
|
||||
"mermaid": "^11.6.0",
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ dependencies = [
|
|||
"python-jose==3.4.0",
|
||||
"passlib[bcrypt]==1.7.4",
|
||||
|
||||
"requests==2.32.3",
|
||||
"requests==2.32.4",
|
||||
"aiohttp==3.11.11",
|
||||
"async-timeout",
|
||||
"aiocache",
|
||||
|
|
|
|||
|
|
@ -12,7 +12,8 @@ const packages = [
|
|||
'sympy',
|
||||
'tiktoken',
|
||||
'seaborn',
|
||||
'pytz'
|
||||
'pytz',
|
||||
'black'
|
||||
];
|
||||
|
||||
import { loadPyodide } from 'pyodide';
|
||||
|
|
|
|||
|
|
@ -44,6 +44,10 @@ code {
|
|||
font-family: 'InstrumentSerif', sans-serif;
|
||||
}
|
||||
|
||||
.marked a {
|
||||
@apply underline;
|
||||
}
|
||||
|
||||
math {
|
||||
margin-top: 1rem;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -336,7 +336,7 @@ export const userSignOut = async () => {
|
|||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res;
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(err);
|
||||
|
|
|
|||
|
|
@ -612,6 +612,78 @@ export const generateTitle = async (
|
|||
}
|
||||
};
|
||||
|
||||
export const generateFollowUps = async (
|
||||
token: string = '',
|
||||
model: string,
|
||||
messages: string,
|
||||
chat_id?: string
|
||||
) => {
|
||||
let error = null;
|
||||
|
||||
const res = await fetch(`${WEBUI_BASE_URL}/api/v1/tasks/follow_ups/completions`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
Accept: 'application/json',
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${token}`
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model: model,
|
||||
messages: messages,
|
||||
...(chat_id && { chat_id: chat_id })
|
||||
})
|
||||
})
|
||||
.then(async (res) => {
|
||||
if (!res.ok) throw await res.json();
|
||||
return res.json();
|
||||
})
|
||||
.catch((err) => {
|
||||
console.error(err);
|
||||
if ('detail' in err) {
|
||||
error = err.detail;
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
if (error) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
try {
|
||||
// Step 1: Safely extract the response string
|
||||
const response = res?.choices[0]?.message?.content ?? '';
|
||||
|
||||
// Step 2: Attempt to fix common JSON format issues like single quotes
|
||||
const sanitizedResponse = response.replace(/['‘’`]/g, '"'); // Convert single quotes to double quotes for valid JSON
|
||||
|
||||
// Step 3: Find the relevant JSON block within the response
|
||||
const jsonStartIndex = sanitizedResponse.indexOf('{');
|
||||
const jsonEndIndex = sanitizedResponse.lastIndexOf('}');
|
||||
|
||||
// Step 4: Check if we found a valid JSON block (with both `{` and `}`)
|
||||
if (jsonStartIndex !== -1 && jsonEndIndex !== -1) {
|
||||
const jsonResponse = sanitizedResponse.substring(jsonStartIndex, jsonEndIndex + 1);
|
||||
|
||||
// Step 5: Parse the JSON block
|
||||
const parsed = JSON.parse(jsonResponse);
|
||||
|
||||
// Step 6: If there's a "follow_ups" key, return the follow_ups array; otherwise, return an empty array
|
||||
if (parsed && parsed.follow_ups) {
|
||||
return Array.isArray(parsed.follow_ups) ? parsed.follow_ups : [];
|
||||
} else {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
// If no valid JSON block found, return an empty array
|
||||
return [];
|
||||
} catch (e) {
|
||||
// Catch and safely return empty array on any parsing errors
|
||||
console.error('Failed to parse response: ', e);
|
||||
return [];
|
||||
}
|
||||
};
|
||||
|
||||
export const generateTags = async (
|
||||
token: string = '',
|
||||
model: string,
|
||||
|
|
|
|||
|
|
@ -393,6 +393,7 @@ export const updateUserById = async (token: string, userId: string, user: UserUp
|
|||
},
|
||||
body: JSON.stringify({
|
||||
profile_image_url: user.profile_image_url,
|
||||
role: user.role,
|
||||
email: user.email,
|
||||
name: user.name,
|
||||
password: user.password !== '' ? user.password : undefined
|
||||
|
|
|
|||
|
|
@ -49,6 +49,9 @@
|
|||
let loading = false;
|
||||
|
||||
const verifyOllamaHandler = async () => {
|
||||
// remove trailing slash from url
|
||||
url = url.replace(/\/$/, '');
|
||||
|
||||
const res = await verifyOllamaConnection(localStorage.token, {
|
||||
url,
|
||||
key
|
||||
|
|
@ -62,6 +65,9 @@
|
|||
};
|
||||
|
||||
const verifyOpenAIHandler = async () => {
|
||||
// remove trailing slash from url
|
||||
url = url.replace(/\/$/, '');
|
||||
|
||||
const res = await verifyOpenAIConnection(
|
||||
localStorage.token,
|
||||
{
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
<script>
|
||||
import { getContext, tick, onMount } from 'svelte';
|
||||
import { toast } from 'svelte-sonner';
|
||||
import { goto } from '$app/navigation';
|
||||
import { page } from '$app/stores';
|
||||
|
||||
import Leaderboard from './Evaluations/Leaderboard.svelte';
|
||||
import Feedbacks from './Evaluations/Feedbacks.svelte';
|
||||
|
||||
|
|
@ -8,7 +10,24 @@
|
|||
|
||||
const i18n = getContext('i18n');
|
||||
|
||||
let selectedTab = 'leaderboard';
|
||||
let selectedTab;
|
||||
$: {
|
||||
const pathParts = $page.url.pathname.split('/');
|
||||
const tabFromPath = pathParts[pathParts.length - 1];
|
||||
selectedTab = ['leaderboard', 'feedbacks'].includes(tabFromPath) ? tabFromPath : 'leaderboard';
|
||||
}
|
||||
|
||||
$: if (selectedTab) {
|
||||
// scroll to selectedTab
|
||||
scrollToTab(selectedTab);
|
||||
}
|
||||
|
||||
const scrollToTab = (tabId) => {
|
||||
const tabElement = document.getElementById(tabId);
|
||||
if (tabElement) {
|
||||
tabElement.scrollIntoView({ behavior: 'smooth', block: 'nearest', inline: 'start' });
|
||||
}
|
||||
};
|
||||
|
||||
let loaded = false;
|
||||
let feedbacks = [];
|
||||
|
|
@ -27,6 +46,9 @@
|
|||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Scroll to the selected tab on mount
|
||||
scrollToTab(selectedTab);
|
||||
});
|
||||
</script>
|
||||
|
||||
|
|
@ -37,12 +59,13 @@
|
|||
class="tabs flex flex-row overflow-x-auto gap-2.5 max-w-full lg:gap-1 lg:flex-col lg:flex-none lg:w-40 dark:text-gray-200 text-sm font-medium text-left scrollbar-none"
|
||||
>
|
||||
<button
|
||||
id="leaderboard"
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg lg:flex-none flex text-right transition {selectedTab ===
|
||||
'leaderboard'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
on:click={() => {
|
||||
selectedTab = 'leaderboard';
|
||||
goto('/admin/evaluations/leaderboard');
|
||||
}}
|
||||
>
|
||||
<div class=" self-center mr-2">
|
||||
|
|
@ -63,12 +86,13 @@
|
|||
</button>
|
||||
|
||||
<button
|
||||
id="feedbacks"
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg lg:flex-none flex text-right transition {selectedTab ===
|
||||
'feedbacks'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
on:click={() => {
|
||||
selectedTab = 'feedbacks';
|
||||
goto('/admin/evaluations/feedbacks');
|
||||
}}
|
||||
>
|
||||
<div class=" self-center mr-2">
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
<script>
|
||||
import { getContext, tick, onMount } from 'svelte';
|
||||
import { page } from '$app/stores';
|
||||
import { goto } from '$app/navigation';
|
||||
import { toast } from 'svelte-sonner';
|
||||
|
||||
import { config } from '$lib/stores';
|
||||
|
|
@ -26,6 +28,41 @@
|
|||
|
||||
let selectedTab = 'general';
|
||||
|
||||
// Get current tab from URL pathname, default to 'general'
|
||||
$: {
|
||||
const pathParts = $page.url.pathname.split('/');
|
||||
const tabFromPath = pathParts[pathParts.length - 1];
|
||||
selectedTab = [
|
||||
'general',
|
||||
'connections',
|
||||
'models',
|
||||
'evaluations',
|
||||
'tools',
|
||||
'documents',
|
||||
'web',
|
||||
'code-execution',
|
||||
'interface',
|
||||
'audio',
|
||||
'images',
|
||||
'pipelines',
|
||||
'db'
|
||||
].includes(tabFromPath)
|
||||
? tabFromPath
|
||||
: 'general';
|
||||
}
|
||||
|
||||
$: if (selectedTab) {
|
||||
// scroll to selectedTab
|
||||
scrollToTab(selectedTab);
|
||||
}
|
||||
|
||||
const scrollToTab = (tabId) => {
|
||||
const tabElement = document.getElementById(tabId);
|
||||
if (tabElement) {
|
||||
tabElement.scrollIntoView({ behavior: 'smooth', block: 'nearest', inline: 'start' });
|
||||
}
|
||||
};
|
||||
|
||||
onMount(() => {
|
||||
const containerElement = document.getElementById('admin-settings-tabs-container');
|
||||
|
||||
|
|
@ -37,6 +74,9 @@
|
|||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Scroll to the selected tab on mount
|
||||
scrollToTab(selectedTab);
|
||||
});
|
||||
</script>
|
||||
|
||||
|
|
@ -46,12 +86,13 @@
|
|||
class="tabs flex flex-row overflow-x-auto gap-2.5 max-w-full lg:gap-1 lg:flex-col lg:flex-none lg:w-40 dark:text-gray-200 text-sm font-medium text-left scrollbar-none"
|
||||
>
|
||||
<button
|
||||
id="general"
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg flex-1 lg:flex-none flex text-right transition {selectedTab ===
|
||||
'general'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
on:click={() => {
|
||||
selectedTab = 'general';
|
||||
goto('/admin/settings/general');
|
||||
}}
|
||||
>
|
||||
<div class=" self-center mr-2">
|
||||
|
|
@ -72,12 +113,13 @@
|
|||
</button>
|
||||
|
||||
<button
|
||||
id="connections"
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition {selectedTab ===
|
||||
'connections'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
on:click={() => {
|
||||
selectedTab = 'connections';
|
||||
goto('/admin/settings/connections');
|
||||
}}
|
||||
>
|
||||
<div class=" self-center mr-2">
|
||||
|
|
@ -96,12 +138,13 @@
|
|||
</button>
|
||||
|
||||
<button
|
||||
id="models"
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition {selectedTab ===
|
||||
'models'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
on:click={() => {
|
||||
selectedTab = 'models';
|
||||
goto('/admin/settings/models');
|
||||
}}
|
||||
>
|
||||
<div class=" self-center mr-2">
|
||||
|
|
@ -122,12 +165,13 @@
|
|||
</button>
|
||||
|
||||
<button
|
||||
id="evaluations"
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition {selectedTab ===
|
||||
'evaluations'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
on:click={() => {
|
||||
selectedTab = 'evaluations';
|
||||
goto('/admin/settings/evaluations');
|
||||
}}
|
||||
>
|
||||
<div class=" self-center mr-2">
|
||||
|
|
@ -137,12 +181,13 @@
|
|||
</button>
|
||||
|
||||
<button
|
||||
id="tools"
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition {selectedTab ===
|
||||
'tools'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
on:click={() => {
|
||||
selectedTab = 'tools';
|
||||
goto('/admin/settings/tools');
|
||||
}}
|
||||
>
|
||||
<div class=" self-center mr-2">
|
||||
|
|
@ -163,12 +208,13 @@
|
|||
</button>
|
||||
|
||||
<button
|
||||
id="documents"
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition {selectedTab ===
|
||||
'documents'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
on:click={() => {
|
||||
selectedTab = 'documents';
|
||||
goto('/admin/settings/documents');
|
||||
}}
|
||||
>
|
||||
<div class=" self-center mr-2">
|
||||
|
|
@ -193,12 +239,13 @@
|
|||
</button>
|
||||
|
||||
<button
|
||||
id="web"
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition {selectedTab ===
|
||||
'web'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
on:click={() => {
|
||||
selectedTab = 'web';
|
||||
goto('/admin/settings/web');
|
||||
}}
|
||||
>
|
||||
<div class=" self-center mr-2">
|
||||
|
|
@ -217,12 +264,13 @@
|
|||
</button>
|
||||
|
||||
<button
|
||||
id="code-execution"
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition {selectedTab ===
|
||||
'code-execution'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
on:click={() => {
|
||||
selectedTab = 'code-execution';
|
||||
goto('/admin/settings/code-execution');
|
||||
}}
|
||||
>
|
||||
<div class=" self-center mr-2">
|
||||
|
|
@ -243,12 +291,13 @@
|
|||
</button>
|
||||
|
||||
<button
|
||||
id="interface"
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition {selectedTab ===
|
||||
'interface'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
on:click={() => {
|
||||
selectedTab = 'interface';
|
||||
goto('/admin/settings/interface');
|
||||
}}
|
||||
>
|
||||
<div class=" self-center mr-2">
|
||||
|
|
@ -269,12 +318,13 @@
|
|||
</button>
|
||||
|
||||
<button
|
||||
id="audio"
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition {selectedTab ===
|
||||
'audio'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
on:click={() => {
|
||||
selectedTab = 'audio';
|
||||
goto('/admin/settings/audio');
|
||||
}}
|
||||
>
|
||||
<div class=" self-center mr-2">
|
||||
|
|
@ -296,12 +346,13 @@
|
|||
</button>
|
||||
|
||||
<button
|
||||
id="images"
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition {selectedTab ===
|
||||
'images'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
on:click={() => {
|
||||
selectedTab = 'images';
|
||||
goto('/admin/settings/images');
|
||||
}}
|
||||
>
|
||||
<div class=" self-center mr-2">
|
||||
|
|
@ -322,12 +373,13 @@
|
|||
</button>
|
||||
|
||||
<button
|
||||
id="pipelines"
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition {selectedTab ===
|
||||
'pipelines'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
on:click={() => {
|
||||
selectedTab = 'pipelines';
|
||||
goto('/admin/settings/pipelines');
|
||||
}}
|
||||
>
|
||||
<div class=" self-center mr-2">
|
||||
|
|
@ -352,12 +404,13 @@
|
|||
</button>
|
||||
|
||||
<button
|
||||
id="db"
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition {selectedTab ===
|
||||
'db'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
on:click={() => {
|
||||
selectedTab = 'db';
|
||||
goto('/admin/settings/db');
|
||||
}}
|
||||
>
|
||||
<div class=" self-center mr-2">
|
||||
|
|
|
|||
|
|
@ -194,17 +194,20 @@
|
|||
await embeddingModelUpdateHandler();
|
||||
}
|
||||
|
||||
RAGConfig.ALLOWED_FILE_EXTENSIONS = (RAGConfig?.ALLOWED_FILE_EXTENSIONS ?? '')
|
||||
.split(',')
|
||||
.map((ext) => ext.trim())
|
||||
.filter((ext) => ext !== '');
|
||||
|
||||
RAGConfig.DATALAB_MARKER_LANGS = RAGConfig.DATALAB_MARKER_LANGS.split(',')
|
||||
.map((code) => code.trim())
|
||||
.filter((code) => code !== '')
|
||||
.join(', ');
|
||||
|
||||
const res = await updateRAGConfig(localStorage.token, RAGConfig);
|
||||
const res = await updateRAGConfig(localStorage.token, {
|
||||
...RAGConfig,
|
||||
ALLOWED_FILE_EXTENSIONS: RAGConfig.ALLOWED_FILE_EXTENSIONS.split(',')
|
||||
.map((ext) => ext.trim())
|
||||
.filter((ext) => ext !== ''),
|
||||
DATALAB_MARKER_LANGS: RAGConfig.DATALAB_MARKER_LANGS.split(',')
|
||||
.map((code) => code.trim())
|
||||
.filter((code) => code !== '')
|
||||
.join(', '),
|
||||
DOCLING_PICTURE_DESCRIPTION_LOCAL: JSON.parse(
|
||||
RAGConfig.DOCLING_PICTURE_DESCRIPTION_LOCAL || '{}'
|
||||
),
|
||||
DOCLING_PICTURE_DESCRIPTION_API: JSON.parse(RAGConfig.DOCLING_PICTURE_DESCRIPTION_API || '{}')
|
||||
});
|
||||
dispatch('save');
|
||||
};
|
||||
|
||||
|
|
@ -232,6 +235,18 @@
|
|||
|
||||
const config = await getRAGConfig(localStorage.token);
|
||||
config.ALLOWED_FILE_EXTENSIONS = (config?.ALLOWED_FILE_EXTENSIONS ?? []).join(', ');
|
||||
|
||||
config.DOCLING_PICTURE_DESCRIPTION_LOCAL = JSON.stringify(
|
||||
config.DOCLING_PICTURE_DESCRIPTION_LOCAL ?? {},
|
||||
null,
|
||||
2
|
||||
);
|
||||
config.DOCLING_PICTURE_DESCRIPTION_API = JSON.stringify(
|
||||
config.DOCLING_PICTURE_DESCRIPTION_API ?? {},
|
||||
null,
|
||||
2
|
||||
);
|
||||
|
||||
RAGConfig = config;
|
||||
});
|
||||
</script>
|
||||
|
|
@ -510,6 +525,71 @@
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{#if RAGConfig.DOCLING_DO_PICTURE_DESCRIPTION}
|
||||
<div class="flex justify-between w-full mt-2">
|
||||
<div class="self-center text-xs font-medium">
|
||||
<Tooltip content={''} placement="top-start">
|
||||
{$i18n.t('Picture Description Mode')}
|
||||
</Tooltip>
|
||||
</div>
|
||||
<div class="">
|
||||
<select
|
||||
class="dark:bg-gray-900 w-fit pr-8 rounded-sm px-2 text-xs bg-transparent outline-hidden text-right"
|
||||
bind:value={RAGConfig.DOCLING_PICTURE_DESCRIPTION_MODE}
|
||||
>
|
||||
<option value="">{$i18n.t('Default')}</option>
|
||||
<option value="local">{$i18n.t('Local')}</option>
|
||||
<option value="api">{$i18n.t('API')}</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{#if RAGConfig.DOCLING_PICTURE_DESCRIPTION_MODE === 'local'}
|
||||
<div class="flex flex-col gap-2 mt-2">
|
||||
<div class=" flex flex-col w-full justify-between">
|
||||
<div class=" mb-1 text-xs font-medium">
|
||||
{$i18n.t('Picture Description Local Config')}
|
||||
</div>
|
||||
<div class="flex w-full items-center relative">
|
||||
<Tooltip
|
||||
content={$i18n.t(
|
||||
'Options for running a local vision-language model in the picture description. The parameters refer to a model hosted on Hugging Face. This parameter is mutually exclusive with picture_description_api.'
|
||||
)}
|
||||
placement="top-start"
|
||||
className="w-full"
|
||||
>
|
||||
<Textarea
|
||||
bind:value={RAGConfig.DOCLING_PICTURE_DESCRIPTION_LOCAL}
|
||||
placeholder={$i18n.t('Enter Config in JSON format')}
|
||||
/>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{:else if RAGConfig.DOCLING_PICTURE_DESCRIPTION_MODE === 'api'}
|
||||
<div class="flex flex-col gap-2 mt-2">
|
||||
<div class=" flex flex-col w-full justify-between">
|
||||
<div class=" mb-1 text-xs font-medium">
|
||||
{$i18n.t('Picture Description API Config')}
|
||||
</div>
|
||||
<div class="flex w-full items-center relative">
|
||||
<Tooltip
|
||||
content={$i18n.t(
|
||||
'API details for using a vision-language model in the picture description. This parameter is mutually exclusive with picture_description_local.'
|
||||
)}
|
||||
placement="top-start"
|
||||
className="w-full"
|
||||
>
|
||||
<Textarea
|
||||
bind:value={RAGConfig.DOCLING_PICTURE_DESCRIPTION_API}
|
||||
placeholder={$i18n.t('Enter Config in JSON format')}
|
||||
/>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
{/if}
|
||||
{:else if RAGConfig.CONTENT_EXTRACTION_ENGINE === 'document_intelligence'}
|
||||
<div class="my-0.5 flex gap-2 pr-2">
|
||||
<input
|
||||
|
|
@ -830,12 +910,7 @@
|
|||
<div class=" mb-2.5 flex w-full justify-between">
|
||||
<div class=" self-center text-xs font-medium">{$i18n.t('Hybrid Search')}</div>
|
||||
<div class="flex items-center relative">
|
||||
<Switch
|
||||
bind:state={RAGConfig.ENABLE_RAG_HYBRID_SEARCH}
|
||||
on:change={() => {
|
||||
submitHandler();
|
||||
}}
|
||||
/>
|
||||
<Switch bind:state={RAGConfig.ENABLE_RAG_HYBRID_SEARCH} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
|
|
|||
|
|
@ -79,6 +79,7 @@
|
|||
const updateHandler = async () => {
|
||||
webhookUrl = await updateWebhookUrl(localStorage.token, webhookUrl);
|
||||
const res = await updateAdminConfig(localStorage.token, adminConfig);
|
||||
await updateLdapConfig(localStorage.token, ENABLE_LDAP);
|
||||
await updateLdapServerHandler();
|
||||
|
||||
if (res) {
|
||||
|
|
@ -311,7 +312,6 @@
|
|||
{$i18n.t('Pending User Overlay Title')}
|
||||
</div>
|
||||
<Textarea
|
||||
rows={2}
|
||||
placeholder={$i18n.t(
|
||||
'Enter a title for the pending user info overlay. Leave empty for default.'
|
||||
)}
|
||||
|
|
@ -401,12 +401,7 @@
|
|||
<div class=" font-medium">{$i18n.t('LDAP')}</div>
|
||||
|
||||
<div class="mt-1">
|
||||
<Switch
|
||||
bind:state={ENABLE_LDAP}
|
||||
on:change={async () => {
|
||||
updateLdapConfig(localStorage.token, ENABLE_LDAP);
|
||||
}}
|
||||
/>
|
||||
<Switch bind:state={ENABLE_LDAP} />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
|
|
|||
|
|
@ -31,6 +31,8 @@
|
|||
TASK_MODEL_EXTERNAL: '',
|
||||
ENABLE_TITLE_GENERATION: true,
|
||||
TITLE_GENERATION_PROMPT_TEMPLATE: '',
|
||||
ENABLE_FOLLOW_UP_GENERATION: true,
|
||||
FOLLOW_UP_GENERATION_PROMPT_TEMPLATE: '',
|
||||
IMAGE_PROMPT_GENERATION_PROMPT_TEMPLATE: '',
|
||||
ENABLE_AUTOCOMPLETE_GENERATION: true,
|
||||
AUTOCOMPLETE_GENERATION_INPUT_MAX_LENGTH: -1,
|
||||
|
|
@ -235,6 +237,32 @@
|
|||
</div>
|
||||
{/if}
|
||||
|
||||
<div class="mb-2.5 flex w-full items-center justify-between">
|
||||
<div class=" self-center text-xs font-medium">
|
||||
{$i18n.t('Follow Up Generation')}
|
||||
</div>
|
||||
|
||||
<Switch bind:state={taskConfig.ENABLE_FOLLOW_UP_GENERATION} />
|
||||
</div>
|
||||
|
||||
{#if taskConfig.ENABLE_FOLLOW_UP_GENERATION}
|
||||
<div class="mb-2.5">
|
||||
<div class=" mb-1 text-xs font-medium">{$i18n.t('Follow Up Generation Prompt')}</div>
|
||||
|
||||
<Tooltip
|
||||
content={$i18n.t('Leave empty to use the default prompt, or enter a custom prompt')}
|
||||
placement="top-start"
|
||||
>
|
||||
<Textarea
|
||||
bind:value={taskConfig.FOLLOW_UP_GENERATION_PROMPT_TEMPLATE}
|
||||
placeholder={$i18n.t(
|
||||
'Leave empty to use the default prompt, or enter a custom prompt'
|
||||
)}
|
||||
/>
|
||||
</Tooltip>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<div class="mb-2.5 flex w-full items-center justify-between">
|
||||
<div class=" self-center text-xs font-medium">
|
||||
{$i18n.t('Tags Generation')}
|
||||
|
|
|
|||
|
|
@ -45,7 +45,7 @@
|
|||
|
||||
<div slot="content">
|
||||
<DropdownMenu.Content
|
||||
class="w-full max-w-[160px] rounded-xl px-1 py-1.5 border border-gray-300/30 dark:border-gray-700/50 z-50 bg-white dark:bg-gray-850 dark:text-white shadow-sm"
|
||||
class="w-full max-w-[170px] rounded-xl px-1 py-1.5 border border-gray-300/30 dark:border-gray-700/50 z-50 bg-white dark:bg-gray-850 dark:text-white shadow-sm"
|
||||
sideOffset={-2}
|
||||
side="bottom"
|
||||
align="start"
|
||||
|
|
|
|||
|
|
@ -446,15 +446,54 @@
|
|||
</div>
|
||||
</div>
|
||||
{:else if webConfig.WEB_SEARCH_ENGINE === 'perplexity'}
|
||||
<div>
|
||||
<div class=" self-center text-xs font-medium mb-1">
|
||||
{$i18n.t('Perplexity API Key')}
|
||||
</div>
|
||||
<div class="mb-2.5 flex w-full flex-col">
|
||||
<div>
|
||||
<div class=" self-center text-xs font-medium mb-1">
|
||||
{$i18n.t('Perplexity API Key')}
|
||||
</div>
|
||||
|
||||
<SensitiveInput
|
||||
placeholder={$i18n.t('Enter Perplexity API Key')}
|
||||
bind:value={webConfig.PERPLEXITY_API_KEY}
|
||||
/>
|
||||
<SensitiveInput
|
||||
placeholder={$i18n.t('Enter Perplexity API Key')}
|
||||
bind:value={webConfig.PERPLEXITY_API_KEY}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="mb-2.5 flex w-full flex-col">
|
||||
<div>
|
||||
<div class="self-center text-xs font-medium mb-1">
|
||||
{$i18n.t('Perplexity Model')}
|
||||
</div>
|
||||
<input
|
||||
list="perplexity-model-list"
|
||||
class="w-full rounded-lg py-2 px-4 text-sm bg-gray-50 dark:text-gray-300 dark:bg-gray-850 outline-hidden"
|
||||
bind:value={webConfig.PERPLEXITY_MODEL}
|
||||
/>
|
||||
|
||||
<datalist id="perplexity-model-list">
|
||||
<option value="sonar">Sonar</option>
|
||||
<option value="sonar-pro">Sonar Pro</option>
|
||||
<option value="sonar-reasoning">Sonar Reasoning</option>
|
||||
<option value="sonar-reasoning-pro">Sonar Reasoning Pro</option>
|
||||
<option value="sonar-deep-research">Sonar Deep Research</option>
|
||||
</datalist>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="mb-2.5 flex w-full flex-col">
|
||||
<div>
|
||||
<div class=" self-center text-xs font-medium mb-1">
|
||||
{$i18n.t('Perplexity Search Context Usage')}
|
||||
</div>
|
||||
<select
|
||||
class="w-full rounded-lg py-2 px-4 text-sm bg-gray-50 dark:text-gray-300 dark:bg-gray-850 outline-hidden"
|
||||
bind:value={webConfig.PERPLEXITY_SEARCH_CONTEXT_USAGE}
|
||||
>
|
||||
<option value="low">Low</option>
|
||||
<option value="medium">Medium</option>
|
||||
<option value="high">High</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
{:else if webConfig.WEB_SEARCH_ENGINE === 'sougou'}
|
||||
<div class="mb-2.5 flex w-full flex-col">
|
||||
|
|
|
|||
|
|
@ -4,13 +4,32 @@
|
|||
|
||||
import { goto } from '$app/navigation';
|
||||
import { user } from '$lib/stores';
|
||||
import { page } from '$app/stores';
|
||||
|
||||
import UserList from './Users/UserList.svelte';
|
||||
import Groups from './Users/Groups.svelte';
|
||||
|
||||
const i18n = getContext('i18n');
|
||||
|
||||
let selectedTab = 'overview';
|
||||
let selectedTab;
|
||||
$: {
|
||||
const pathParts = $page.url.pathname.split('/');
|
||||
const tabFromPath = pathParts[pathParts.length - 1];
|
||||
selectedTab = ['overview', 'groups'].includes(tabFromPath) ? tabFromPath : 'overview';
|
||||
}
|
||||
|
||||
$: if (selectedTab) {
|
||||
// scroll to selectedTab
|
||||
scrollToTab(selectedTab);
|
||||
}
|
||||
|
||||
const scrollToTab = (tabId) => {
|
||||
const tabElement = document.getElementById(tabId);
|
||||
if (tabElement) {
|
||||
tabElement.scrollIntoView({ behavior: 'smooth', block: 'nearest', inline: 'start' });
|
||||
}
|
||||
};
|
||||
|
||||
let loaded = false;
|
||||
|
||||
onMount(async () => {
|
||||
|
|
@ -30,6 +49,9 @@
|
|||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Scroll to the selected tab on mount
|
||||
scrollToTab(selectedTab);
|
||||
});
|
||||
</script>
|
||||
|
||||
|
|
@ -39,12 +61,13 @@
|
|||
class=" flex flex-row overflow-x-auto gap-2.5 max-w-full lg:gap-1 lg:flex-col lg:flex-none lg:w-40 dark:text-gray-200 text-sm font-medium text-left scrollbar-none"
|
||||
>
|
||||
<button
|
||||
id="overview"
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg lg:flex-none flex text-right transition {selectedTab ===
|
||||
'overview'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
on:click={() => {
|
||||
selectedTab = 'overview';
|
||||
goto('/admin/users/overview');
|
||||
}}
|
||||
>
|
||||
<div class=" self-center mr-2">
|
||||
|
|
@ -63,12 +86,13 @@
|
|||
</button>
|
||||
|
||||
<button
|
||||
id="groups"
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg lg:flex-none flex text-right transition {selectedTab ===
|
||||
'groups'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
on:click={() => {
|
||||
selectedTab = 'groups';
|
||||
goto('/admin/users/groups');
|
||||
}}
|
||||
>
|
||||
<div class=" self-center mr-2">
|
||||
|
|
|
|||
|
|
@ -52,27 +52,6 @@
|
|||
|
||||
let showUserChatsModal = false;
|
||||
let showEditUserModal = false;
|
||||
let showUpdateRoleModal = false;
|
||||
|
||||
const onUpdateRole = (user) => {
|
||||
if (user.role === 'user') {
|
||||
updateRoleHandler(user.id, 'admin');
|
||||
} else if (user.role === 'pending') {
|
||||
updateRoleHandler(user.id, 'user');
|
||||
} else {
|
||||
updateRoleHandler(user.id, 'pending');
|
||||
}
|
||||
};
|
||||
const updateRoleHandler = async (id, role) => {
|
||||
const res = await updateUserRole(localStorage.token, id, role).catch((error) => {
|
||||
toast.error(`${error}`);
|
||||
return null;
|
||||
});
|
||||
|
||||
if (res) {
|
||||
getUserList();
|
||||
}
|
||||
};
|
||||
|
||||
const deleteUserHandler = async (id) => {
|
||||
const res = await deleteUserById(localStorage.token, id).catch((error) => {
|
||||
|
|
@ -133,21 +112,6 @@
|
|||
}}
|
||||
/>
|
||||
|
||||
<RoleUpdateConfirmDialog
|
||||
bind:show={showUpdateRoleModal}
|
||||
on:confirm={() => {
|
||||
onUpdateRole(selectedUser);
|
||||
}}
|
||||
message={$i18n.t(`Are you sure you want to update this user\'s role to **{{ROLE}}**?`, {
|
||||
ROLE:
|
||||
selectedUser?.role === 'user'
|
||||
? 'admin'
|
||||
: selectedUser?.role === 'pending'
|
||||
? 'user'
|
||||
: 'pending'
|
||||
})}
|
||||
/>
|
||||
|
||||
{#key selectedUser}
|
||||
<EditUserModal
|
||||
bind:show={showEditUserModal}
|
||||
|
|
@ -415,7 +379,7 @@
|
|||
class=" translate-y-0.5"
|
||||
on:click={() => {
|
||||
selectedUser = user;
|
||||
showUpdateRoleModal = true;
|
||||
showEditUserModal = !showEditUserModal;
|
||||
}}
|
||||
>
|
||||
<Badge
|
||||
|
|
|
|||
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
let _user = {
|
||||
profile_image_url: '',
|
||||
role: 'pending',
|
||||
name: '',
|
||||
email: '',
|
||||
password: ''
|
||||
|
|
@ -95,6 +96,23 @@
|
|||
|
||||
<div class=" px-5 pt-3 pb-5">
|
||||
<div class=" flex flex-col space-y-1.5">
|
||||
<div class="flex flex-col w-full">
|
||||
<div class=" mb-1 text-xs text-gray-500">{$i18n.t('Role')}</div>
|
||||
|
||||
<div class="flex-1">
|
||||
<select
|
||||
class="w-full rounded-sm text-sm bg-transparent disabled:text-gray-500 dark:disabled:text-gray-500 outline-hidden"
|
||||
bind:value={_user.role}
|
||||
disabled={_user.id == sessionUser.id}
|
||||
required
|
||||
>
|
||||
<option value="admin">{$i18n.t('Admin')}</option>
|
||||
<option value="user">{$i18n.t('User')}</option>
|
||||
<option value="pending">{$i18n.t('Pending')}</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="flex flex-col w-full">
|
||||
<div class=" mb-1 text-xs text-gray-500">{$i18n.t('Email')}</div>
|
||||
|
||||
|
|
|
|||
|
|
@ -239,6 +239,8 @@
|
|||
};
|
||||
|
||||
const showMessage = async (message) => {
|
||||
await tick();
|
||||
|
||||
const _chatId = JSON.parse(JSON.stringify($chatId));
|
||||
let _messageId = JSON.parse(JSON.stringify(message.id));
|
||||
|
||||
|
|
@ -298,6 +300,12 @@
|
|||
message.content = data.content;
|
||||
} else if (type === 'chat:message:files' || type === 'files') {
|
||||
message.files = data.files;
|
||||
} else if (type === 'chat:message:follow_ups') {
|
||||
message.followUps = data.follow_ups;
|
||||
|
||||
if (autoScroll) {
|
||||
scrollToBottom('smooth');
|
||||
}
|
||||
} else if (type === 'chat:title') {
|
||||
chatTitle.set(data);
|
||||
currentChatPage.set(1);
|
||||
|
|
@ -430,6 +438,12 @@
|
|||
window.addEventListener('message', onMessageHandler);
|
||||
$socket?.on('chat-events', chatEventHandler);
|
||||
|
||||
page.subscribe((page) => {
|
||||
if (page.url.pathname === '/') {
|
||||
initNewChat();
|
||||
}
|
||||
});
|
||||
|
||||
if (!$chatId) {
|
||||
chatIdUnsubscriber = chatId.subscribe(async (value) => {
|
||||
if (!value) {
|
||||
|
|
@ -774,6 +788,7 @@
|
|||
|
||||
autoScroll = true;
|
||||
|
||||
resetInput();
|
||||
await chatId.set('');
|
||||
await chatTitle.set('');
|
||||
|
||||
|
|
@ -911,10 +926,13 @@
|
|||
}
|
||||
};
|
||||
|
||||
const scrollToBottom = async () => {
|
||||
const scrollToBottom = async (behavior = 'auto') => {
|
||||
await tick();
|
||||
if (messagesContainerElement) {
|
||||
messagesContainerElement.scrollTop = messagesContainerElement.scrollHeight;
|
||||
messagesContainerElement.scrollTo({
|
||||
top: messagesContainerElement.scrollHeight,
|
||||
behavior
|
||||
});
|
||||
}
|
||||
};
|
||||
const chatCompletedHandler = async (chatId, modelId, responseMessageId, messages) => {
|
||||
|
|
@ -1624,9 +1642,6 @@
|
|||
params: {
|
||||
...$settings?.params,
|
||||
...params,
|
||||
|
||||
format: $settings.requestFormat ?? undefined,
|
||||
keep_alive: $settings.keepAlive ?? undefined,
|
||||
stop:
|
||||
(params?.stop ?? $settings?.params?.stop ?? undefined)
|
||||
? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
|
||||
|
|
@ -1676,19 +1691,20 @@
|
|||
chat_id: $chatId,
|
||||
id: responseMessageId,
|
||||
|
||||
...(!$temporaryChatEnabled &&
|
||||
(messages.length == 1 ||
|
||||
(messages.length == 2 &&
|
||||
messages.at(0)?.role === 'system' &&
|
||||
messages.at(1)?.role === 'user')) &&
|
||||
(selectedModels[0] === model.id || atSelectedModel !== undefined)
|
||||
? {
|
||||
background_tasks: {
|
||||
background_tasks: {
|
||||
...(!$temporaryChatEnabled &&
|
||||
(messages.length == 1 ||
|
||||
(messages.length == 2 &&
|
||||
messages.at(0)?.role === 'system' &&
|
||||
messages.at(1)?.role === 'user')) &&
|
||||
(selectedModels[0] === model.id || atSelectedModel !== undefined)
|
||||
? {
|
||||
title_generation: $settings?.title?.auto ?? true,
|
||||
tags_generation: $settings?.autoTags ?? true
|
||||
}
|
||||
}
|
||||
: {}),
|
||||
: {}),
|
||||
follow_up_generation: $settings?.autoFollowUps ?? true
|
||||
},
|
||||
|
||||
...(stream && (model.info?.meta?.capabilities?.usage ?? false)
|
||||
? {
|
||||
|
|
@ -2072,7 +2088,7 @@
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<div class=" pb-[1rem]">
|
||||
<div class=" pb-2">
|
||||
<MessageInput
|
||||
{history}
|
||||
{taskIds}
|
||||
|
|
|
|||
|
|
@ -69,10 +69,10 @@
|
|||
{#if $temporaryChatEnabled}
|
||||
<Tooltip
|
||||
content={$i18n.t('This chat won’t appear in history and your messages will not be saved.')}
|
||||
className="w-full flex justify-center mb-0.5"
|
||||
className="w-full flex justify-start mb-0.5"
|
||||
placement="top"
|
||||
>
|
||||
<div class="flex items-center gap-2 text-gray-500 font-medium text-lg my-2 w-fit">
|
||||
<div class="flex items-center gap-2 text-gray-500 font-medium text-lg mt-2 w-fit">
|
||||
<EyeSlash strokeWidth="2.5" className="size-5" />{$i18n.t('Temporary Chat')}
|
||||
</div>
|
||||
</Tooltip>
|
||||
|
|
|
|||
|
|
@ -190,7 +190,7 @@
|
|||
</div>
|
||||
|
||||
{#if selectedId}
|
||||
<hr class="dark:border-gray-800 my-1 w-full" />
|
||||
<hr class="border-gray-50 dark:border-gray-800 my-1 w-full" />
|
||||
|
||||
<div class="my-2 text-xs">
|
||||
{#if !loading}
|
||||
|
|
|
|||
|
|
@ -1,5 +1,9 @@
|
|||
<script lang="ts">
|
||||
import DOMPurify from 'dompurify';
|
||||
import { marked } from 'marked';
|
||||
|
||||
import { toast } from 'svelte-sonner';
|
||||
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import { createPicker, getAuthToken } from '$lib/utils/google-drive-picker';
|
||||
import { pickAndDownloadFile } from '$lib/utils/onedrive-file-picker';
|
||||
|
|
@ -595,7 +599,7 @@
|
|||
/>
|
||||
{:else}
|
||||
<form
|
||||
class="w-full flex gap-1.5"
|
||||
class="w-full flex flex-col gap-1.5"
|
||||
on:submit|preventDefault={() => {
|
||||
// check if selectedModels support image input
|
||||
dispatch('submit', prompt);
|
||||
|
|
@ -1520,6 +1524,14 @@
|
|||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{#if $config?.license_metadata?.input_footer}
|
||||
<div class=" text-xs text-gray-500 text-center line-clamp-1 marked">
|
||||
{@html DOMPurify.sanitize(marked($config?.license_metadata?.input_footer))}
|
||||
</div>
|
||||
{:else}
|
||||
<div class="mb-1" />
|
||||
{/if}
|
||||
</form>
|
||||
{/if}
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -205,8 +205,10 @@
|
|||
return;
|
||||
}
|
||||
|
||||
const mineTypes = ['audio/webm; codecs=opus', 'audio/mp4'];
|
||||
|
||||
mediaRecorder = new MediaRecorder(stream, {
|
||||
mimeType: 'audio/webm; codecs=opus'
|
||||
mimeType: mineTypes.find((type) => MediaRecorder.isTypeSupported(type))
|
||||
});
|
||||
|
||||
mediaRecorder.onstart = () => {
|
||||
|
|
|
|||
|
|
@ -28,7 +28,7 @@
|
|||
<!-- svelte-ignore a11y-media-has-caption -->
|
||||
<video
|
||||
class="w-full my-2"
|
||||
src={videoSrc}
|
||||
src={videoSrc.replaceAll('&', '&')}
|
||||
title="Video player"
|
||||
frameborder="0"
|
||||
referrerpolicy="strict-origin-when-cross-origin"
|
||||
|
|
@ -38,6 +38,20 @@
|
|||
{:else}
|
||||
{token.text}
|
||||
{/if}
|
||||
{:else if html && html.includes('<audio')}
|
||||
{@const audio = html.match(/<audio[^>]*>([\s\S]*?)<\/audio>/)}
|
||||
{@const audioSrc = audio && audio[1]}
|
||||
{#if audioSrc}
|
||||
<!-- svelte-ignore a11y-media-has-caption -->
|
||||
<audio
|
||||
class="w-full my-2"
|
||||
src={audioSrc.replaceAll('&', '&')}
|
||||
title="Audio player"
|
||||
controls
|
||||
></audio>
|
||||
{:else}
|
||||
{token.text}
|
||||
{/if}
|
||||
{:else if token.text && token.text.match(/<iframe\s+[^>]*src="https:\/\/www\.youtube\.com\/embed\/([a-zA-Z0-9_-]{11})(?:\?[^"]*)?"[^>]*><\/iframe>/)}
|
||||
{@const match = token.text.match(
|
||||
/<iframe\s+[^>]*src="https:\/\/www\.youtube\.com\/embed\/([a-zA-Z0-9_-]{11})(?:\?[^"]*)?"[^>]*><\/iframe>/
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@
|
|||
src.startsWith('/')
|
||||
? src
|
||||
: `/user.png`}
|
||||
class=" {className} object-cover rounded-full -translate-y-[1px]"
|
||||
class=" {className} object-cover rounded-full"
|
||||
alt="profile"
|
||||
draggable="false"
|
||||
/>
|
||||
|
|
|
|||
|
|
@ -137,8 +137,8 @@
|
|||
</div>
|
||||
|
||||
<div class="w-full flex justify-center">
|
||||
<div class=" relative w-fit">
|
||||
<div class="mt-1.5 w-fit flex gap-1 pb-5">
|
||||
<div class=" relative w-fit overflow-x-auto scrollbar-none">
|
||||
<div class="mt-1.5 w-fit flex gap-1 pb-2">
|
||||
<!-- 1-10 scale -->
|
||||
{#each Array.from({ length: 10 }).map((_, i) => i + 1) as rating}
|
||||
<button
|
||||
|
|
@ -156,7 +156,7 @@
|
|||
{/each}
|
||||
</div>
|
||||
|
||||
<div class="absolute bottom-0 left-0 right-0 flex justify-between text-xs">
|
||||
<div class="sticky top-0 bottom-0 left-0 right-0 flex justify-between text-xs">
|
||||
<div>
|
||||
1 - {$i18n.t('Awful')}
|
||||
</div>
|
||||
|
|
|
|||
|
|
@ -48,6 +48,9 @@
|
|||
import ContentRenderer from './ContentRenderer.svelte';
|
||||
import { KokoroWorker } from '$lib/workers/KokoroWorker';
|
||||
import FileItem from '$lib/components/common/FileItem.svelte';
|
||||
import FollowUps from './ResponseMessage/FollowUps.svelte';
|
||||
import { fade } from 'svelte/transition';
|
||||
import { flyAndScale } from '$lib/utils/transitions';
|
||||
|
||||
interface MessageType {
|
||||
id: string;
|
||||
|
|
@ -598,7 +601,7 @@
|
|||
id="message-{message.id}"
|
||||
dir={$settings.chatDirection}
|
||||
>
|
||||
<div class={`shrink-0 ltr:mr-3 rtl:ml-3`}>
|
||||
<div class={`shrink-0 ltr:mr-3 rtl:ml-3 hidden @lg:flex `}>
|
||||
<ProfileImage
|
||||
src={model?.info?.meta?.profile_image_url ??
|
||||
($i18n.language === 'dg-DG' ? `/doge.png` : `${WEBUI_BASE_URL}/static/favicon.png`)}
|
||||
|
|
@ -606,7 +609,7 @@
|
|||
/>
|
||||
</div>
|
||||
|
||||
<div class="flex-auto w-0 pl-1 relative">
|
||||
<div class="flex-auto w-0 pl-1 relative -translate-y-0.5">
|
||||
<Name>
|
||||
<Tooltip content={model?.name ?? message.model} placement="top-start">
|
||||
<span class="line-clamp-1 text-black dark:text-white">
|
||||
|
|
@ -866,12 +869,14 @@
|
|||
{#if siblings.length > 1}
|
||||
<div class="flex self-center min-w-fit" dir="ltr">
|
||||
<button
|
||||
aria-label={$i18n.t('Previous message')}
|
||||
class="self-center p-1 hover:bg-black/5 dark:hover:bg-white/5 dark:hover:text-white hover:text-black rounded-md transition"
|
||||
on:click={() => {
|
||||
showPreviousMessage(message);
|
||||
}}
|
||||
>
|
||||
<svg
|
||||
aria-hidden="true"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
|
|
@ -937,10 +942,12 @@
|
|||
on:click={() => {
|
||||
showNextMessage(message);
|
||||
}}
|
||||
aria-label={$i18n.t('Next message')}
|
||||
>
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
aria-hidden="true"
|
||||
viewBox="0 0 24 24"
|
||||
stroke="currentColor"
|
||||
stroke-width="2.5"
|
||||
|
|
@ -961,6 +968,7 @@
|
|||
{#if $user?.role === 'user' ? ($user?.permissions?.chat?.edit ?? true) : true}
|
||||
<Tooltip content={$i18n.t('Edit')} placement="bottom">
|
||||
<button
|
||||
aria-label={$i18n.t('Edit')}
|
||||
class="{isLastMessage
|
||||
? 'visible'
|
||||
: 'invisible group-hover:visible'} p-1.5 hover:bg-black/5 dark:hover:bg-white/5 rounded-lg dark:hover:text-white hover:text-black transition"
|
||||
|
|
@ -973,6 +981,7 @@
|
|||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
stroke-width="2.3"
|
||||
aria-hidden="true"
|
||||
stroke="currentColor"
|
||||
class="w-4 h-4"
|
||||
>
|
||||
|
|
@ -989,6 +998,7 @@
|
|||
|
||||
<Tooltip content={$i18n.t('Copy')} placement="bottom">
|
||||
<button
|
||||
aria-label={$i18n.t('Copy')}
|
||||
class="{isLastMessage
|
||||
? 'visible'
|
||||
: 'invisible group-hover:visible'} p-1.5 hover:bg-black/5 dark:hover:bg-white/5 rounded-lg dark:hover:text-white hover:text-black transition copy-response-button"
|
||||
|
|
@ -999,6 +1009,7 @@
|
|||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
aria-hidden="true"
|
||||
viewBox="0 0 24 24"
|
||||
stroke-width="2.3"
|
||||
stroke="currentColor"
|
||||
|
|
@ -1016,6 +1027,7 @@
|
|||
{#if $user?.role === 'admin' || ($user?.permissions?.chat?.tts ?? true)}
|
||||
<Tooltip content={$i18n.t('Read Aloud')} placement="bottom">
|
||||
<button
|
||||
aria-label={$i18n.t('Read Aloud')}
|
||||
id="speak-button-{message.id}"
|
||||
class="{isLastMessage
|
||||
? 'visible'
|
||||
|
|
@ -1031,6 +1043,7 @@
|
|||
class=" w-4 h-4"
|
||||
fill="currentColor"
|
||||
viewBox="0 0 24 24"
|
||||
aria-hidden="true"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<style>
|
||||
|
|
@ -1063,6 +1076,7 @@
|
|||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
aria-hidden="true"
|
||||
stroke-width="2.3"
|
||||
stroke="currentColor"
|
||||
class="w-4 h-4"
|
||||
|
|
@ -1078,6 +1092,7 @@
|
|||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
aria-hidden="true"
|
||||
stroke-width="2.3"
|
||||
stroke="currentColor"
|
||||
class="w-4 h-4"
|
||||
|
|
@ -1096,6 +1111,7 @@
|
|||
{#if $config?.features.enable_image_generation && ($user?.role === 'admin' || $user?.permissions?.features?.image_generation) && !readOnly}
|
||||
<Tooltip content={$i18n.t('Generate Image')} placement="bottom">
|
||||
<button
|
||||
aria-label={$i18n.t('Generate Image')}
|
||||
class="{isLastMessage
|
||||
? 'visible'
|
||||
: 'invisible group-hover:visible'} p-1.5 hover:bg-black/5 dark:hover:bg-white/5 rounded-lg dark:hover:text-white hover:text-black transition"
|
||||
|
|
@ -1107,6 +1123,7 @@
|
|||
>
|
||||
{#if generatingImage}
|
||||
<svg
|
||||
aria-hidden="true"
|
||||
class=" w-4 h-4"
|
||||
fill="currentColor"
|
||||
viewBox="0 0 24 24"
|
||||
|
|
@ -1141,6 +1158,7 @@
|
|||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
aria-hidden="true"
|
||||
viewBox="0 0 24 24"
|
||||
stroke-width="2.3"
|
||||
stroke="currentColor"
|
||||
|
|
@ -1173,6 +1191,7 @@
|
|||
placement="bottom"
|
||||
>
|
||||
<button
|
||||
aria-hidden="true"
|
||||
class=" {isLastMessage
|
||||
? 'visible'
|
||||
: 'invisible group-hover:visible'} p-1.5 hover:bg-black/5 dark:hover:bg-white/5 rounded-lg dark:hover:text-white hover:text-black transition whitespace-pre-wrap"
|
||||
|
|
@ -1182,6 +1201,7 @@
|
|||
id="info-{message.id}"
|
||||
>
|
||||
<svg
|
||||
aria-hidden="true"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
|
|
@ -1203,6 +1223,7 @@
|
|||
{#if !$temporaryChatEnabled && ($config?.features.enable_message_rating ?? true)}
|
||||
<Tooltip content={$i18n.t('Good Response')} placement="bottom">
|
||||
<button
|
||||
aria-label={$i18n.t('Good Response')}
|
||||
class="{isLastMessage
|
||||
? 'visible'
|
||||
: 'invisible group-hover:visible'} p-1.5 hover:bg-black/5 dark:hover:bg-white/5 rounded-lg {(
|
||||
|
|
@ -1221,6 +1242,7 @@
|
|||
}}
|
||||
>
|
||||
<svg
|
||||
aria-hidden="true"
|
||||
stroke="currentColor"
|
||||
fill="none"
|
||||
stroke-width="2.3"
|
||||
|
|
@ -1239,6 +1261,7 @@
|
|||
|
||||
<Tooltip content={$i18n.t('Bad Response')} placement="bottom">
|
||||
<button
|
||||
aria-label={$i18n.t('Bad Response')}
|
||||
class="{isLastMessage
|
||||
? 'visible'
|
||||
: 'invisible group-hover:visible'} p-1.5 hover:bg-black/5 dark:hover:bg-white/5 rounded-lg {(
|
||||
|
|
@ -1257,6 +1280,7 @@
|
|||
}}
|
||||
>
|
||||
<svg
|
||||
aria-hidden="true"
|
||||
stroke="currentColor"
|
||||
fill="none"
|
||||
stroke-width="2.3"
|
||||
|
|
@ -1277,6 +1301,7 @@
|
|||
{#if isLastMessage}
|
||||
<Tooltip content={$i18n.t('Continue Response')} placement="bottom">
|
||||
<button
|
||||
aria-label={$i18n.t('Continue Response')}
|
||||
type="button"
|
||||
id="continue-response-button"
|
||||
class="{isLastMessage
|
||||
|
|
@ -1287,6 +1312,7 @@
|
|||
}}
|
||||
>
|
||||
<svg
|
||||
aria-hidden="true"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
|
|
@ -1312,6 +1338,7 @@
|
|||
<Tooltip content={$i18n.t('Regenerate')} placement="bottom">
|
||||
<button
|
||||
type="button"
|
||||
aria-label={$i18n.t('Regenerate')}
|
||||
class="{isLastMessage
|
||||
? 'visible'
|
||||
: 'invisible group-hover:visible'} p-1.5 hover:bg-black/5 dark:hover:bg-white/5 rounded-lg dark:hover:text-white hover:text-black transition regenerate-response-button"
|
||||
|
|
@ -1337,6 +1364,7 @@
|
|||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
stroke-width="2.3"
|
||||
aria-hidden="true"
|
||||
stroke="currentColor"
|
||||
class="w-4 h-4"
|
||||
>
|
||||
|
|
@ -1353,6 +1381,7 @@
|
|||
<Tooltip content={$i18n.t('Delete')} placement="bottom">
|
||||
<button
|
||||
type="button"
|
||||
aria-label={$i18n.t('Delete')}
|
||||
id="delete-response-button"
|
||||
class="{isLastMessage
|
||||
? 'visible'
|
||||
|
|
@ -1367,6 +1396,7 @@
|
|||
viewBox="0 0 24 24"
|
||||
stroke-width="2"
|
||||
stroke="currentColor"
|
||||
aria-hidden="true"
|
||||
class="w-4 h-4"
|
||||
>
|
||||
<path
|
||||
|
|
@ -1384,6 +1414,7 @@
|
|||
<Tooltip content={action.name} placement="bottom">
|
||||
<button
|
||||
type="button"
|
||||
aria-label={action.name}
|
||||
class="{isLastMessage
|
||||
? 'visible'
|
||||
: 'invisible group-hover:visible'} p-1.5 hover:bg-black/5 dark:hover:bg-white/5 rounded-lg dark:hover:text-white hover:text-black transition"
|
||||
|
|
@ -1425,6 +1456,17 @@
|
|||
}}
|
||||
/>
|
||||
{/if}
|
||||
|
||||
{#if isLastMessage && message.done && !readOnly && (message?.followUps ?? []).length > 0}
|
||||
<div class="mt-2.5" in:fade={{ duration: 100 }}>
|
||||
<FollowUps
|
||||
followUps={message?.followUps}
|
||||
onClick={(prompt) => {
|
||||
submitMessage(message?.id, prompt);
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
{/if}
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,36 @@
|
|||
<script lang="ts">
|
||||
import ArrowTurnDownRight from '$lib/components/icons/ArrowTurnDownRight.svelte';
|
||||
import { onMount, tick, getContext } from 'svelte';
|
||||
|
||||
const i18n = getContext('i18n');
|
||||
|
||||
export let followUps: string[] = [];
|
||||
export let onClick: (followUp: string) => void = () => {};
|
||||
</script>
|
||||
|
||||
<div class="mt-4">
|
||||
<div class="text-sm font-medium">
|
||||
{$i18n.t('Follow up')}
|
||||
</div>
|
||||
|
||||
<div class="flex flex-col text-left gap-1 mt-1.5">
|
||||
{#each followUps as followUp, idx (idx)}
|
||||
<button
|
||||
class=" mr-2 py-1.5 bg-transparent text-left text-sm flex items-center gap-2 px-1.5 text-gray-500 dark:text-gray-400 hover:text-black dark:hover:text-white transition"
|
||||
on:click={() => onClick(followUp)}
|
||||
title={followUp}
|
||||
aria-label={followUp}
|
||||
>
|
||||
<ArrowTurnDownRight className="size-3.5" />
|
||||
|
||||
<div class="line-clamp-1">
|
||||
{followUp}
|
||||
</div>
|
||||
</button>
|
||||
|
||||
{#if idx < followUps.length - 1}
|
||||
<hr class="border-gray-100 dark:border-gray-850" />
|
||||
{/if}
|
||||
{/each}
|
||||
</div>
|
||||
</div>
|
||||
|
|
@ -25,6 +25,19 @@
|
|||
toast.success($i18n.t('Default model updated'));
|
||||
};
|
||||
|
||||
const pinModelHandler = async (modelId) => {
|
||||
let pinnedModels = $settings?.pinnedModels ?? [];
|
||||
|
||||
if (pinnedModels.includes(modelId)) {
|
||||
pinnedModels = pinnedModels.filter((id) => id !== modelId);
|
||||
} else {
|
||||
pinnedModels = [...new Set([...pinnedModels, modelId])];
|
||||
}
|
||||
|
||||
settings.set({ ...$settings, pinnedModels: pinnedModels });
|
||||
await updateUserSettings(localStorage.token, { ui: $settings });
|
||||
};
|
||||
|
||||
$: if (selectedModels.length > 0 && $models.length > 0) {
|
||||
selectedModels = selectedModels.map((model) =>
|
||||
$models.map((m) => m.id).includes(model) ? model : ''
|
||||
|
|
@ -49,6 +62,7 @@
|
|||
? ($user?.permissions?.chat?.temporary ?? true) &&
|
||||
!($user?.permissions?.chat?.temporary_enforced ?? false)
|
||||
: true}
|
||||
{pinModelHandler}
|
||||
bind:value={selectedModel}
|
||||
/>
|
||||
</div>
|
||||
|
|
|
|||
250
src/lib/components/chat/ModelSelector/ModelItem.svelte
Normal file
250
src/lib/components/chat/ModelSelector/ModelItem.svelte
Normal file
|
|
@ -0,0 +1,250 @@
|
|||
<script lang="ts">
|
||||
import { marked } from 'marked';
|
||||
|
||||
import { getContext, tick } from 'svelte';
|
||||
import dayjs from '$lib/dayjs';
|
||||
|
||||
import { mobile, settings, user } from '$lib/stores';
|
||||
|
||||
import Tooltip from '$lib/components/common/Tooltip.svelte';
|
||||
import { copyToClipboard, sanitizeResponseContent } from '$lib/utils';
|
||||
import ArrowUpTray from '$lib/components/icons/ArrowUpTray.svelte';
|
||||
import Check from '$lib/components/icons/Check.svelte';
|
||||
import ModelItemMenu from './ModelItemMenu.svelte';
|
||||
import EllipsisHorizontal from '$lib/components/icons/EllipsisHorizontal.svelte';
|
||||
import { toast } from 'svelte-sonner';
|
||||
|
||||
const i18n = getContext('i18n');
|
||||
|
||||
export let selectedModelIdx: number = -1;
|
||||
export let item: any = {};
|
||||
export let index: number = -1;
|
||||
export let value: string = '';
|
||||
|
||||
export let unloadModelHandler: (modelValue: string) => void = () => {};
|
||||
export let pinModelHandler: (modelId: string) => void = () => {};
|
||||
|
||||
export let onClick: () => void = () => {};
|
||||
|
||||
const copyLinkHandler = async (model) => {
|
||||
const baseUrl = window.location.origin;
|
||||
const res = await copyToClipboard(`${baseUrl}/?model=${encodeURIComponent(model.id)}`);
|
||||
|
||||
if (res) {
|
||||
toast.success($i18n.t('Copied link to clipboard'));
|
||||
} else {
|
||||
toast.error($i18n.t('Failed to copy link'));
|
||||
}
|
||||
};
|
||||
|
||||
let showMenu = false;
|
||||
</script>
|
||||
|
||||
<button
|
||||
aria-label="model-item"
|
||||
class="flex group/item w-full text-left font-medium line-clamp-1 select-none items-center rounded-button py-2 pl-3 pr-1.5 text-sm text-gray-700 dark:text-gray-100 outline-hidden transition-all duration-75 hover:bg-gray-100 dark:hover:bg-gray-800 rounded-lg cursor-pointer data-highlighted:bg-muted {index ===
|
||||
selectedModelIdx
|
||||
? 'bg-gray-100 dark:bg-gray-800 group-hover:bg-transparent'
|
||||
: ''}"
|
||||
data-arrow-selected={index === selectedModelIdx}
|
||||
data-value={item.value}
|
||||
on:click={() => {
|
||||
onClick();
|
||||
}}
|
||||
>
|
||||
<div class="flex flex-col flex-1 gap-1.5">
|
||||
{#if (item?.model?.tags ?? []).length > 0}
|
||||
<div
|
||||
class="flex gap-0.5 self-center items-start h-full w-full translate-y-[0.5px] overflow-x-auto scrollbar-none"
|
||||
>
|
||||
{#each item.model?.tags.sort((a, b) => a.name.localeCompare(b.name)) as tag}
|
||||
<Tooltip content={tag.name} className="flex-shrink-0">
|
||||
<div
|
||||
class=" text-xs font-bold px-1 rounded-sm uppercase bg-gray-500/20 text-gray-700 dark:text-gray-200"
|
||||
>
|
||||
{tag.name}
|
||||
</div>
|
||||
</Tooltip>
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<div class="flex items-center gap-2">
|
||||
<div class="flex items-center min-w-fit">
|
||||
<Tooltip content={$user?.role === 'admin' ? (item?.value ?? '') : ''} placement="top-start">
|
||||
<img
|
||||
src={item.model?.info?.meta?.profile_image_url ?? '/static/favicon.png'}
|
||||
alt="Model"
|
||||
class="rounded-full size-5 flex items-center"
|
||||
/>
|
||||
</Tooltip>
|
||||
</div>
|
||||
|
||||
<div class="flex items-center">
|
||||
<Tooltip content={`${item.label} (${item.value})`} placement="top-start">
|
||||
<div class="line-clamp-1">
|
||||
{item.label}
|
||||
</div>
|
||||
</Tooltip>
|
||||
</div>
|
||||
|
||||
<div class=" shrink-0 flex items-center gap-2">
|
||||
{#if item.model.owned_by === 'ollama'}
|
||||
{#if (item.model.ollama?.details?.parameter_size ?? '') !== ''}
|
||||
<div class="flex items-center translate-y-[0.5px]">
|
||||
<Tooltip
|
||||
content={`${
|
||||
item.model.ollama?.details?.quantization_level
|
||||
? item.model.ollama?.details?.quantization_level + ' '
|
||||
: ''
|
||||
}${
|
||||
item.model.ollama?.size
|
||||
? `(${(item.model.ollama?.size / 1024 ** 3).toFixed(1)}GB)`
|
||||
: ''
|
||||
}`}
|
||||
className="self-end"
|
||||
>
|
||||
<span class=" text-xs font-medium text-gray-600 dark:text-gray-400 line-clamp-1"
|
||||
>{item.model.ollama?.details?.parameter_size ?? ''}</span
|
||||
>
|
||||
</Tooltip>
|
||||
</div>
|
||||
{/if}
|
||||
{#if item.model.ollama?.expires_at && new Date(item.model.ollama?.expires_at * 1000) > new Date()}
|
||||
<div class="flex items-center translate-y-[0.5px] px-0.5">
|
||||
<Tooltip
|
||||
content={`${$i18n.t('Unloads {{FROM_NOW}}', {
|
||||
FROM_NOW: dayjs(item.model.ollama?.expires_at * 1000).fromNow()
|
||||
})}`}
|
||||
className="self-end"
|
||||
>
|
||||
<div class=" flex items-center">
|
||||
<span class="relative flex size-2">
|
||||
<span
|
||||
class="animate-ping absolute inline-flex h-full w-full rounded-full bg-green-400 opacity-75"
|
||||
/>
|
||||
<span class="relative inline-flex rounded-full size-2 bg-green-500" />
|
||||
</span>
|
||||
</div>
|
||||
</Tooltip>
|
||||
</div>
|
||||
{/if}
|
||||
{/if}
|
||||
|
||||
<!-- {JSON.stringify(item.info)} -->
|
||||
|
||||
{#if item.model?.direct}
|
||||
<Tooltip content={`${$i18n.t('Direct')}`}>
|
||||
<div class="translate-y-[1px]">
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 16 16"
|
||||
fill="currentColor"
|
||||
class="size-3"
|
||||
>
|
||||
<path
|
||||
fill-rule="evenodd"
|
||||
d="M2 2.75A.75.75 0 0 1 2.75 2C8.963 2 14 7.037 14 13.25a.75.75 0 0 1-1.5 0c0-5.385-4.365-9.75-9.75-9.75A.75.75 0 0 1 2 2.75Zm0 4.5a.75.75 0 0 1 .75-.75 6.75 6.75 0 0 1 6.75 6.75.75.75 0 0 1-1.5 0C8 10.35 5.65 8 2.75 8A.75.75 0 0 1 2 7.25ZM3.5 11a1.5 1.5 0 1 0 0 3 1.5 1.5 0 0 0 0-3Z"
|
||||
clip-rule="evenodd"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
</Tooltip>
|
||||
{:else if item.model.connection_type === 'external'}
|
||||
<Tooltip content={`${$i18n.t('External')}`}>
|
||||
<div class="translate-y-[1px]">
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 16 16"
|
||||
fill="currentColor"
|
||||
class="size-3"
|
||||
>
|
||||
<path
|
||||
fill-rule="evenodd"
|
||||
d="M8.914 6.025a.75.75 0 0 1 1.06 0 3.5 3.5 0 0 1 0 4.95l-2 2a3.5 3.5 0 0 1-5.396-4.402.75.75 0 0 1 1.251.827 2 2 0 0 0 3.085 2.514l2-2a2 2 0 0 0 0-2.828.75.75 0 0 1 0-1.06Z"
|
||||
clip-rule="evenodd"
|
||||
/>
|
||||
<path
|
||||
fill-rule="evenodd"
|
||||
d="M7.086 9.975a.75.75 0 0 1-1.06 0 3.5 3.5 0 0 1 0-4.95l2-2a3.5 3.5 0 0 1 5.396 4.402.75.75 0 0 1-1.251-.827 2 2 0 0 0-3.085-2.514l-2 2a2 2 0 0 0 0 2.828.75.75 0 0 1 0 1.06Z"
|
||||
clip-rule="evenodd"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
</Tooltip>
|
||||
{/if}
|
||||
|
||||
{#if item.model?.info?.meta?.description}
|
||||
<Tooltip
|
||||
content={`${marked.parse(
|
||||
sanitizeResponseContent(item.model?.info?.meta?.description).replaceAll('\n', '<br>')
|
||||
)}`}
|
||||
>
|
||||
<div class=" translate-y-[1px]">
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
stroke-width="1.5"
|
||||
stroke="currentColor"
|
||||
class="w-4 h-4"
|
||||
>
|
||||
<path
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
d="m11.25 11.25.041-.02a.75.75 0 0 1 1.063.852l-.708 2.836a.75.75 0 0 0 1.063.853l.041-.021M21 12a9 9 0 1 1-18 0 9 9 0 0 1 18 0Zm-9-3.75h.008v.008H12V8.25Z"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
</Tooltip>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="ml-auto pl-2 pr-1 flex items-center gap-1.5 shrink-0">
|
||||
{#if $user?.role === 'admin' && item.model.owned_by === 'ollama' && item.model.ollama?.expires_at && new Date(item.model.ollama?.expires_at * 1000) > new Date()}
|
||||
<Tooltip
|
||||
content={`${$i18n.t('Eject')}`}
|
||||
className="flex-shrink-0 group-hover/item:opacity-100 opacity-0 "
|
||||
>
|
||||
<button
|
||||
class="flex"
|
||||
on:click={(e) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
unloadModelHandler(item.value);
|
||||
}}
|
||||
>
|
||||
<ArrowUpTray className="size-3" />
|
||||
</button>
|
||||
</Tooltip>
|
||||
{/if}
|
||||
|
||||
<ModelItemMenu
|
||||
bind:show={showMenu}
|
||||
model={item.model}
|
||||
{pinModelHandler}
|
||||
copyLinkHandler={() => {
|
||||
copyLinkHandler(item.model);
|
||||
}}
|
||||
>
|
||||
<button
|
||||
class="flex"
|
||||
on:click={(e) => {
|
||||
e.preventDefault();
|
||||
e.stopPropagation();
|
||||
showMenu = !showMenu;
|
||||
}}
|
||||
>
|
||||
<EllipsisHorizontal />
|
||||
</button>
|
||||
</ModelItemMenu>
|
||||
|
||||
{#if value === item.value}
|
||||
<div>
|
||||
<Check className="size-3" />
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
</button>
|
||||
90
src/lib/components/chat/ModelSelector/ModelItemMenu.svelte
Normal file
90
src/lib/components/chat/ModelSelector/ModelItemMenu.svelte
Normal file
|
|
@ -0,0 +1,90 @@
|
|||
<script lang="ts">
|
||||
import { DropdownMenu } from 'bits-ui';
|
||||
import { flyAndScale } from '$lib/utils/transitions';
|
||||
|
||||
import { getContext } from 'svelte';
|
||||
|
||||
import Tooltip from '$lib/components/common/Tooltip.svelte';
|
||||
import Link from '$lib/components/icons/Link.svelte';
|
||||
import Eye from '$lib/components/icons/Eye.svelte';
|
||||
import EyeSlash from '$lib/components/icons/EyeSlash.svelte';
|
||||
import { settings } from '$lib/stores';
|
||||
|
||||
const i18n = getContext('i18n');
|
||||
|
||||
export let show = false;
|
||||
export let model;
|
||||
|
||||
export let pinModelHandler: (modelId: string) => void = () => {};
|
||||
export let copyLinkHandler: Function = () => {};
|
||||
|
||||
export let onClose: Function = () => {};
|
||||
</script>
|
||||
|
||||
<DropdownMenu.Root
|
||||
bind:open={show}
|
||||
closeFocus={false}
|
||||
onOpenChange={(state) => {
|
||||
if (state === false) {
|
||||
onClose();
|
||||
}
|
||||
}}
|
||||
typeahead={false}
|
||||
>
|
||||
<DropdownMenu.Trigger>
|
||||
<Tooltip content={$i18n.t('More')} className=" group-hover/item:opacity-100 opacity-0">
|
||||
<slot />
|
||||
</Tooltip>
|
||||
</DropdownMenu.Trigger>
|
||||
|
||||
<DropdownMenu.Content
|
||||
strategy="fixed"
|
||||
class="w-full max-w-[180px] text-sm rounded-xl px-1 py-1.5 z-[9999999] bg-white dark:bg-gray-850 dark:text-white shadow-lg"
|
||||
sideOffset={-2}
|
||||
side="bottom"
|
||||
align="end"
|
||||
transition={flyAndScale}
|
||||
>
|
||||
<button
|
||||
type="button"
|
||||
class="flex rounded-md py-1.5 px-3 w-full hover:bg-gray-50 dark:hover:bg-gray-800 transition items-center gap-2"
|
||||
on:click={(e) => {
|
||||
e.stopPropagation();
|
||||
e.preventDefault();
|
||||
|
||||
pinModelHandler(model?.id);
|
||||
show = false;
|
||||
}}
|
||||
>
|
||||
{#if ($settings?.pinnedModels ?? []).includes(model?.id)}
|
||||
<EyeSlash />
|
||||
{:else}
|
||||
<Eye />
|
||||
{/if}
|
||||
|
||||
<div class="flex items-center">
|
||||
{#if ($settings?.pinnedModels ?? []).includes(model?.id)}
|
||||
{$i18n.t('Hide from Sidebar')}
|
||||
{:else}
|
||||
{$i18n.t('Keep in Sidebar')}
|
||||
{/if}
|
||||
</div>
|
||||
</button>
|
||||
|
||||
<button
|
||||
type="button"
|
||||
class="flex rounded-md py-1.5 px-3 w-full hover:bg-gray-50 dark:hover:bg-gray-800 transition items-center gap-2"
|
||||
on:click={(e) => {
|
||||
e.stopPropagation();
|
||||
e.preventDefault();
|
||||
|
||||
copyLinkHandler();
|
||||
show = false;
|
||||
}}
|
||||
>
|
||||
<Link />
|
||||
|
||||
<div class="flex items-center">{$i18n.t('Copy Link')}</div>
|
||||
</button>
|
||||
</DropdownMenu.Content>
|
||||
</DropdownMenu.Root>
|
||||
|
|
@ -3,12 +3,13 @@
|
|||
import { marked } from 'marked';
|
||||
import Fuse from 'fuse.js';
|
||||
|
||||
import dayjs from '$lib/dayjs';
|
||||
import relativeTime from 'dayjs/plugin/relativeTime';
|
||||
dayjs.extend(relativeTime);
|
||||
|
||||
import { flyAndScale } from '$lib/utils/transitions';
|
||||
import { createEventDispatcher, onMount, getContext, tick } from 'svelte';
|
||||
|
||||
import ChevronDown from '$lib/components/icons/ChevronDown.svelte';
|
||||
import Check from '$lib/components/icons/Check.svelte';
|
||||
import Search from '$lib/components/icons/Search.svelte';
|
||||
import { goto } from '$app/navigation';
|
||||
|
||||
import { deleteModel, getOllamaVersion, pullModel, unloadModel } from '$lib/apis/ollama';
|
||||
|
||||
|
|
@ -25,14 +26,14 @@
|
|||
import { capitalizeFirstLetter, sanitizeResponseContent, splitStream } from '$lib/utils';
|
||||
import { getModels } from '$lib/apis';
|
||||
|
||||
import ChevronDown from '$lib/components/icons/ChevronDown.svelte';
|
||||
import Check from '$lib/components/icons/Check.svelte';
|
||||
import Search from '$lib/components/icons/Search.svelte';
|
||||
import Tooltip from '$lib/components/common/Tooltip.svelte';
|
||||
import Switch from '$lib/components/common/Switch.svelte';
|
||||
import ChatBubbleOval from '$lib/components/icons/ChatBubbleOval.svelte';
|
||||
import { goto } from '$app/navigation';
|
||||
import dayjs from '$lib/dayjs';
|
||||
import relativeTime from 'dayjs/plugin/relativeTime';
|
||||
import ArrowUpTray from '$lib/components/icons/ArrowUpTray.svelte';
|
||||
dayjs.extend(relativeTime);
|
||||
|
||||
import ModelItem from './ModelItem.svelte';
|
||||
|
||||
const i18n = getContext('i18n');
|
||||
const dispatch = createEventDispatcher();
|
||||
|
|
@ -56,6 +57,8 @@
|
|||
export let className = 'w-[32rem]';
|
||||
export let triggerClassName = 'text-lg';
|
||||
|
||||
export let pinModelHandler: (modelId: string) => void = () => {};
|
||||
|
||||
let tagsContainerElement;
|
||||
|
||||
let show = false;
|
||||
|
|
@ -407,10 +410,10 @@
|
|||
</div>
|
||||
{/if}
|
||||
|
||||
<div class="px-3 max-h-64 overflow-y-auto scrollbar-hidden group relative">
|
||||
<div class="px-3">
|
||||
{#if tags && items.filter((item) => !(item.model?.info?.meta?.hidden ?? false)).length > 0}
|
||||
<div
|
||||
class=" flex w-full sticky top-0 z-10 bg-white dark:bg-gray-850 overflow-x-auto scrollbar-none"
|
||||
class=" flex w-full bg-white dark:bg-gray-850 overflow-x-auto scrollbar-none"
|
||||
on:wheel={(e) => {
|
||||
if (e.deltaY !== 0) {
|
||||
e.preventDefault();
|
||||
|
|
@ -492,212 +495,24 @@
|
|||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<div class="px-3 max-h-64 overflow-y-auto group relative">
|
||||
{#each filteredItems as item, index}
|
||||
<button
|
||||
aria-label="model-item"
|
||||
class="flex w-full text-left font-medium line-clamp-1 select-none items-center rounded-button py-2 pl-3 pr-1.5 text-sm text-gray-700 dark:text-gray-100 outline-hidden transition-all duration-75 hover:bg-gray-100 dark:hover:bg-gray-800 rounded-lg cursor-pointer data-highlighted:bg-muted {index ===
|
||||
selectedModelIdx
|
||||
? 'bg-gray-100 dark:bg-gray-800 group-hover:bg-transparent'
|
||||
: ''}"
|
||||
data-arrow-selected={index === selectedModelIdx}
|
||||
data-value={item.value}
|
||||
on:click={() => {
|
||||
<ModelItem
|
||||
{selectedModelIdx}
|
||||
{item}
|
||||
{index}
|
||||
{value}
|
||||
{pinModelHandler}
|
||||
{unloadModelHandler}
|
||||
onClick={() => {
|
||||
value = item.value;
|
||||
selectedModelIdx = index;
|
||||
|
||||
show = false;
|
||||
}}
|
||||
>
|
||||
<div class="flex flex-col">
|
||||
{#if $mobile && (item?.model?.tags ?? []).length > 0}
|
||||
<div class="flex gap-0.5 self-start h-full mb-1.5 -translate-x-1">
|
||||
{#each item.model?.tags.sort((a, b) => a.name.localeCompare(b.name)) as tag}
|
||||
<div
|
||||
class=" text-xs font-bold px-1 rounded-sm uppercase line-clamp-1 bg-gray-500/20 text-gray-700 dark:text-gray-200"
|
||||
>
|
||||
{tag.name}
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
<div class="flex items-center gap-2">
|
||||
<div class="flex items-center min-w-fit">
|
||||
<div class="line-clamp-1">
|
||||
<div class="flex items-center min-w-fit">
|
||||
<Tooltip
|
||||
content={$user?.role === 'admin' ? (item?.value ?? '') : ''}
|
||||
placement="top-start"
|
||||
>
|
||||
<img
|
||||
src={item.model?.info?.meta?.profile_image_url ?? '/static/favicon.png'}
|
||||
alt="Model"
|
||||
class="rounded-full size-5 flex items-center mr-2"
|
||||
/>
|
||||
|
||||
<div class="flex items-center line-clamp-1">
|
||||
<div class="line-clamp-1">
|
||||
{item.label}
|
||||
</div>
|
||||
</div>
|
||||
</Tooltip>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{#if item.model.owned_by === 'ollama'}
|
||||
{#if (item.model.ollama?.details?.parameter_size ?? '') !== ''}
|
||||
<div class="flex items-center translate-y-[0.5px]">
|
||||
<Tooltip
|
||||
content={`${
|
||||
item.model.ollama?.details?.quantization_level
|
||||
? item.model.ollama?.details?.quantization_level + ' '
|
||||
: ''
|
||||
}${
|
||||
item.model.ollama?.size
|
||||
? `(${(item.model.ollama?.size / 1024 ** 3).toFixed(1)}GB)`
|
||||
: ''
|
||||
}`}
|
||||
className="self-end"
|
||||
>
|
||||
<span
|
||||
class=" text-xs font-medium text-gray-600 dark:text-gray-400 line-clamp-1"
|
||||
>{item.model.ollama?.details?.parameter_size ?? ''}</span
|
||||
>
|
||||
</Tooltip>
|
||||
</div>
|
||||
{/if}
|
||||
{#if item.model.ollama?.expires_at && new Date(item.model.ollama?.expires_at * 1000) > new Date()}
|
||||
<div class="flex items-center translate-y-[0.5px] px-0.5">
|
||||
<Tooltip
|
||||
content={`${$i18n.t('Unloads {{FROM_NOW}}', {
|
||||
FROM_NOW: dayjs(item.model.ollama?.expires_at * 1000).fromNow()
|
||||
})}`}
|
||||
className="self-end"
|
||||
>
|
||||
<div class=" flex items-center">
|
||||
<span class="relative flex size-2">
|
||||
<span
|
||||
class="animate-ping absolute inline-flex h-full w-full rounded-full bg-green-400 opacity-75"
|
||||
/>
|
||||
<span class="relative inline-flex rounded-full size-2 bg-green-500" />
|
||||
</span>
|
||||
</div>
|
||||
</Tooltip>
|
||||
</div>
|
||||
{/if}
|
||||
{/if}
|
||||
|
||||
<!-- {JSON.stringify(item.info)} -->
|
||||
|
||||
{#if item.model?.direct}
|
||||
<Tooltip content={`${$i18n.t('Direct')}`}>
|
||||
<div class="translate-y-[1px]">
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 16 16"
|
||||
fill="currentColor"
|
||||
class="size-3"
|
||||
>
|
||||
<path
|
||||
fill-rule="evenodd"
|
||||
d="M2 2.75A.75.75 0 0 1 2.75 2C8.963 2 14 7.037 14 13.25a.75.75 0 0 1-1.5 0c0-5.385-4.365-9.75-9.75-9.75A.75.75 0 0 1 2 2.75Zm0 4.5a.75.75 0 0 1 .75-.75 6.75 6.75 0 0 1 6.75 6.75.75.75 0 0 1-1.5 0C8 10.35 5.65 8 2.75 8A.75.75 0 0 1 2 7.25ZM3.5 11a1.5 1.5 0 1 0 0 3 1.5 1.5 0 0 0 0-3Z"
|
||||
clip-rule="evenodd"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
</Tooltip>
|
||||
{:else if item.model.connection_type === 'external'}
|
||||
<Tooltip content={`${$i18n.t('External')}`}>
|
||||
<div class="translate-y-[1px]">
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 16 16"
|
||||
fill="currentColor"
|
||||
class="size-3"
|
||||
>
|
||||
<path
|
||||
fill-rule="evenodd"
|
||||
d="M8.914 6.025a.75.75 0 0 1 1.06 0 3.5 3.5 0 0 1 0 4.95l-2 2a3.5 3.5 0 0 1-5.396-4.402.75.75 0 0 1 1.251.827 2 2 0 0 0 3.085 2.514l2-2a2 2 0 0 0 0-2.828.75.75 0 0 1 0-1.06Z"
|
||||
clip-rule="evenodd"
|
||||
/>
|
||||
<path
|
||||
fill-rule="evenodd"
|
||||
d="M7.086 9.975a.75.75 0 0 1-1.06 0 3.5 3.5 0 0 1 0-4.95l2-2a3.5 3.5 0 0 1 5.396 4.402.75.75 0 0 1-1.251-.827 2 2 0 0 0-3.085-2.514l-2 2a2 2 0 0 0 0 2.828.75.75 0 0 1 0 1.06Z"
|
||||
clip-rule="evenodd"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
</Tooltip>
|
||||
{/if}
|
||||
|
||||
{#if item.model?.info?.meta?.description}
|
||||
<Tooltip
|
||||
content={`${marked.parse(
|
||||
sanitizeResponseContent(item.model?.info?.meta?.description).replaceAll(
|
||||
'\n',
|
||||
'<br>'
|
||||
)
|
||||
)}`}
|
||||
>
|
||||
<div class=" translate-y-[1px]">
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
fill="none"
|
||||
viewBox="0 0 24 24"
|
||||
stroke-width="1.5"
|
||||
stroke="currentColor"
|
||||
class="w-4 h-4"
|
||||
>
|
||||
<path
|
||||
stroke-linecap="round"
|
||||
stroke-linejoin="round"
|
||||
d="m11.25 11.25.041-.02a.75.75 0 0 1 1.063.852l-.708 2.836a.75.75 0 0 0 1.063.853l.041-.021M21 12a9 9 0 1 1-18 0 9 9 0 0 1 18 0Zm-9-3.75h.008v.008H12V8.25Z"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
</Tooltip>
|
||||
{/if}
|
||||
|
||||
{#if !$mobile && (item?.model?.tags ?? []).length > 0}
|
||||
<div
|
||||
class="flex gap-0.5 self-center items-center h-full translate-y-[0.5px] overflow-x-auto scrollbar-none"
|
||||
>
|
||||
{#each item.model?.tags.sort((a, b) => a.name.localeCompare(b.name)) as tag}
|
||||
<Tooltip content={tag.name} className="flex-shrink-0">
|
||||
<div
|
||||
class=" text-xs font-bold px-1 rounded-sm uppercase bg-gray-500/20 text-gray-700 dark:text-gray-200"
|
||||
>
|
||||
{tag.name}
|
||||
</div>
|
||||
</Tooltip>
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="ml-auto pl-2 pr-1 flex gap-1.5 items-center">
|
||||
{#if $user?.role === 'admin' && item.model.owned_by === 'ollama' && item.model.ollama?.expires_at && new Date(item.model.ollama?.expires_at * 1000) > new Date()}
|
||||
<Tooltip content={`${$i18n.t('Eject')}`} className="flex-shrink-0">
|
||||
<button
|
||||
class="flex"
|
||||
on:click={() => {
|
||||
unloadModelHandler(item.value);
|
||||
}}
|
||||
>
|
||||
<ArrowUpTray className="size-3" />
|
||||
</button>
|
||||
</Tooltip>
|
||||
{/if}
|
||||
|
||||
{#if value === item.value}
|
||||
<div>
|
||||
<Check className="size-3" />
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
</button>
|
||||
/>
|
||||
{:else}
|
||||
<div class="">
|
||||
<div class="block px-3 py-2 text-sm text-gray-700 dark:text-gray-100">
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
<script lang="ts">
|
||||
import Switch from '$lib/components/common/Switch.svelte';
|
||||
import Textarea from '$lib/components/common/Textarea.svelte';
|
||||
import Tooltip from '$lib/components/common/Tooltip.svelte';
|
||||
import Plus from '$lib/components/icons/Plus.svelte';
|
||||
import { getContext } from 'svelte';
|
||||
|
|
@ -34,6 +35,9 @@
|
|||
repeat_penalty: null,
|
||||
use_mmap: null,
|
||||
use_mlock: null,
|
||||
think: null,
|
||||
format: null,
|
||||
keep_alive: null,
|
||||
num_keep: null,
|
||||
num_ctx: null,
|
||||
num_batch: null,
|
||||
|
|
@ -1092,6 +1096,74 @@
|
|||
</div>
|
||||
{/if}
|
||||
|
||||
<div class=" py-0.5 w-full justify-between">
|
||||
<Tooltip
|
||||
content={$i18n.t(
|
||||
'This option enables or disables the use of the reasoning feature in Ollama, which allows the model to think before generating a response. When enabled, the model can take a moment to process the conversation context and generate a more thoughtful response.'
|
||||
)}
|
||||
placement="top-start"
|
||||
className="inline-tooltip"
|
||||
>
|
||||
<div class=" py-0.5 flex w-full justify-between">
|
||||
<div class=" self-center text-xs font-medium">
|
||||
{'think'} ({$i18n.t('Ollama')})
|
||||
</div>
|
||||
<button
|
||||
class="p-1 px-3 text-xs flex rounded-sm transition"
|
||||
on:click={() => {
|
||||
params.think = (params?.think ?? null) === null ? true : params.think ? false : null;
|
||||
}}
|
||||
type="button"
|
||||
>
|
||||
{#if params.think === true}
|
||||
<span class="ml-2 self-center">{$i18n.t('On')}</span>
|
||||
{:else if params.think === false}
|
||||
<span class="ml-2 self-center">{$i18n.t('Off')}</span>
|
||||
{:else}
|
||||
<span class="ml-2 self-center">{$i18n.t('Default')}</span>
|
||||
{/if}
|
||||
</button>
|
||||
</div>
|
||||
</Tooltip>
|
||||
</div>
|
||||
|
||||
<div class=" py-0.5 w-full justify-between">
|
||||
<Tooltip
|
||||
content={$i18n.t('The format to return a response in. Format can be json or a JSON schema.')}
|
||||
placement="top-start"
|
||||
className="inline-tooltip"
|
||||
>
|
||||
<div class=" py-0.5 flex w-full justify-between">
|
||||
<div class=" self-center text-xs font-medium">
|
||||
{'format'} ({$i18n.t('Ollama')})
|
||||
</div>
|
||||
<button
|
||||
class="p-1 px-3 text-xs flex rounded-sm transition"
|
||||
on:click={() => {
|
||||
params.format = (params?.format ?? null) === null ? 'json' : null;
|
||||
}}
|
||||
type="button"
|
||||
>
|
||||
{#if (params?.format ?? null) === null}
|
||||
<span class="ml-2 self-center">{$i18n.t('Default')}</span>
|
||||
{:else}
|
||||
<span class="ml-2 self-center">{$i18n.t('JSON')}</span>
|
||||
{/if}
|
||||
</button>
|
||||
</div>
|
||||
</Tooltip>
|
||||
|
||||
{#if (params?.format ?? null) !== null}
|
||||
<div class="flex mt-0.5 space-x-2">
|
||||
<Textarea
|
||||
className="w-full text-sm bg-transparent outline-hidden"
|
||||
placeholder={$i18n.t('e.g. "json" or a JSON schema')}
|
||||
bind:value={params.format}
|
||||
/>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<div class=" py-0.5 w-full justify-between">
|
||||
<Tooltip
|
||||
content={$i18n.t(
|
||||
|
|
@ -1368,6 +1440,46 @@
|
|||
{/if}
|
||||
</div>
|
||||
|
||||
<div class=" py-0.5 w-full justify-between">
|
||||
<Tooltip
|
||||
content={$i18n.t(
|
||||
'This option controls how long the model will stay loaded into memory following the request (default: 5m)'
|
||||
)}
|
||||
placement="top-start"
|
||||
className="inline-tooltip"
|
||||
>
|
||||
<div class=" py-0.5 flex w-full justify-between">
|
||||
<div class=" self-center text-xs font-medium">
|
||||
{'keep_alive'} ({$i18n.t('Ollama')})
|
||||
</div>
|
||||
<button
|
||||
class="p-1 px-3 text-xs flex rounded-sm transition"
|
||||
on:click={() => {
|
||||
params.keep_alive = (params?.keep_alive ?? null) === null ? '5m' : null;
|
||||
}}
|
||||
type="button"
|
||||
>
|
||||
{#if (params?.keep_alive ?? null) === null}
|
||||
<span class="ml-2 self-center">{$i18n.t('Default')}</span>
|
||||
{:else}
|
||||
<span class="ml-2 self-center">{$i18n.t('Custom')}</span>
|
||||
{/if}
|
||||
</button>
|
||||
</div>
|
||||
</Tooltip>
|
||||
|
||||
{#if (params?.keep_alive ?? null) !== null}
|
||||
<div class="flex mt-0.5 space-x-2">
|
||||
<input
|
||||
class="w-full text-sm bg-transparent outline-hidden"
|
||||
type="text"
|
||||
placeholder={$i18n.t("e.g. '30s','10m'. Valid time units are 's', 'm', 'h'.")}
|
||||
bind:value={params.keep_alive}
|
||||
/>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
{#if custom && admin}
|
||||
<div class="flex flex-col justify-center">
|
||||
{#each Object.keys(params?.custom_params ?? {}) as key}
|
||||
|
|
|
|||
|
|
@ -10,12 +10,11 @@
|
|||
|
||||
import AdvancedParams from './Advanced/AdvancedParams.svelte';
|
||||
import Textarea from '$lib/components/common/Textarea.svelte';
|
||||
|
||||
export let saveSettings: Function;
|
||||
export let getModels: Function;
|
||||
|
||||
// General
|
||||
let themes = ['dark', 'light', 'rose-pine dark', 'rose-pine-dawn light', 'oled-dark'];
|
||||
let themes = ['dark', 'light', 'oled-dark'];
|
||||
let selectedTheme = 'system';
|
||||
|
||||
let languages: Awaited<ReturnType<typeof getLanguages>> = [];
|
||||
|
|
@ -40,10 +39,6 @@
|
|||
}
|
||||
};
|
||||
|
||||
// Advanced
|
||||
let requestFormat = null;
|
||||
let keepAlive: string | null = null;
|
||||
|
||||
let params = {
|
||||
// Advanced
|
||||
stream_response: null,
|
||||
|
|
@ -71,37 +66,7 @@
|
|||
num_gpu: null
|
||||
};
|
||||
|
||||
const validateJSON = (json) => {
|
||||
try {
|
||||
const obj = JSON.parse(json);
|
||||
|
||||
if (obj && typeof obj === 'object') {
|
||||
return true;
|
||||
}
|
||||
} catch (e) {}
|
||||
return false;
|
||||
};
|
||||
|
||||
const toggleRequestFormat = async () => {
|
||||
if (requestFormat === null) {
|
||||
requestFormat = 'json';
|
||||
} else {
|
||||
requestFormat = null;
|
||||
}
|
||||
|
||||
saveSettings({ requestFormat: requestFormat !== null ? requestFormat : undefined });
|
||||
};
|
||||
|
||||
const saveHandler = async () => {
|
||||
if (requestFormat !== null && requestFormat !== 'json') {
|
||||
if (validateJSON(requestFormat) === false) {
|
||||
toast.error($i18n.t('Invalid JSON schema'));
|
||||
return;
|
||||
} else {
|
||||
requestFormat = JSON.parse(requestFormat);
|
||||
}
|
||||
}
|
||||
|
||||
saveSettings({
|
||||
system: system !== '' ? system : undefined,
|
||||
params: {
|
||||
|
|
@ -130,15 +95,12 @@
|
|||
use_mmap: params.use_mmap !== null ? params.use_mmap : undefined,
|
||||
use_mlock: params.use_mlock !== null ? params.use_mlock : undefined,
|
||||
num_thread: params.num_thread !== null ? params.num_thread : undefined,
|
||||
num_gpu: params.num_gpu !== null ? params.num_gpu : undefined
|
||||
},
|
||||
keepAlive: keepAlive ? (isNaN(keepAlive) ? keepAlive : parseInt(keepAlive)) : undefined,
|
||||
requestFormat: requestFormat !== null ? requestFormat : undefined
|
||||
num_gpu: params.num_gpu !== null ? params.num_gpu : undefined,
|
||||
keep_alive: params.keep_alive !== null ? params.keep_alive : undefined,
|
||||
format: params.format !== null ? params.format : undefined
|
||||
}
|
||||
});
|
||||
dispatch('save');
|
||||
|
||||
requestFormat =
|
||||
typeof requestFormat === 'object' ? JSON.stringify(requestFormat, null, 2) : requestFormat;
|
||||
};
|
||||
|
||||
onMount(async () => {
|
||||
|
|
@ -149,14 +111,6 @@
|
|||
notificationEnabled = $settings.notificationEnabled ?? false;
|
||||
system = $settings.system ?? '';
|
||||
|
||||
requestFormat = $settings.requestFormat ?? null;
|
||||
if (requestFormat !== null && requestFormat !== 'json') {
|
||||
requestFormat =
|
||||
typeof requestFormat === 'object' ? JSON.stringify(requestFormat, null, 2) : requestFormat;
|
||||
}
|
||||
|
||||
keepAlive = $settings.keepAlive ?? null;
|
||||
|
||||
params = { ...params, ...$settings.params };
|
||||
params.stop = $settings?.params?.stop ? ($settings?.params?.stop ?? []).join(',') : null;
|
||||
});
|
||||
|
|
@ -335,77 +289,6 @@
|
|||
|
||||
{#if showAdvanced}
|
||||
<AdvancedParams admin={$user?.role === 'admin'} bind:params />
|
||||
<hr class=" border-gray-100 dark:border-gray-850" />
|
||||
|
||||
<div class=" w-full justify-between">
|
||||
<div class="flex w-full justify-between">
|
||||
<div class=" self-center text-xs font-medium">{$i18n.t('Keep Alive')}</div>
|
||||
|
||||
<button
|
||||
class="p-1 px-3 text-xs flex rounded-sm transition"
|
||||
type="button"
|
||||
on:click={() => {
|
||||
keepAlive = keepAlive === null ? '5m' : null;
|
||||
}}
|
||||
>
|
||||
{#if keepAlive === null}
|
||||
<span class="ml-2 self-center"> {$i18n.t('Default')} </span>
|
||||
{:else}
|
||||
<span class="ml-2 self-center"> {$i18n.t('Custom')} </span>
|
||||
{/if}
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{#if keepAlive !== null}
|
||||
<div class="flex mt-1 space-x-2">
|
||||
<input
|
||||
class="w-full text-sm dark:text-gray-300 dark:bg-gray-850 outline-hidden"
|
||||
type="text"
|
||||
placeholder={$i18n.t("e.g. '30s','10m'. Valid time units are 's', 'm', 'h'.")}
|
||||
bind:value={keepAlive}
|
||||
/>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<div class=" flex w-full justify-between">
|
||||
<div class=" self-center text-xs font-medium">{$i18n.t('Request Mode')}</div>
|
||||
|
||||
<button
|
||||
class="p-1 px-3 text-xs flex rounded-sm transition"
|
||||
on:click={() => {
|
||||
toggleRequestFormat();
|
||||
}}
|
||||
>
|
||||
{#if requestFormat === null}
|
||||
<span class="ml-2 self-center"> {$i18n.t('Default')} </span>
|
||||
{:else}
|
||||
<!-- <svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 20 20"
|
||||
fill="currentColor"
|
||||
class="w-4 h-4 self-center"
|
||||
>
|
||||
<path
|
||||
d="M10 2a.75.75 0 01.75.75v1.5a.75.75 0 01-1.5 0v-1.5A.75.75 0 0110 2zM10 15a.75.75 0 01.75.75v1.5a.75.75 0 01-1.5 0v-1.5A.75.75 0 0110 15zM10 7a3 3 0 100 6 3 3 0 000-6zM15.657 5.404a.75.75 0 10-1.06-1.06l-1.061 1.06a.75.75 0 001.06 1.06l1.06-1.06zM6.464 14.596a.75.75 0 10-1.06-1.06l-1.06 1.06a.75.75 0 001.06 1.06l1.06-1.06zM18 10a.75.75 0 01-.75.75h-1.5a.75.75 0 010-1.5h1.5A.75.75 0 0118 10zM5 10a.75.75 0 01-.75.75h-1.5a.75.75 0 010-1.5h1.5A.75.75 0 015 10zM14.596 15.657a.75.75 0 001.06-1.06l-1.06-1.061a.75.75 0 10-1.06 1.06l1.06 1.06zM5.404 6.464a.75.75 0 001.06-1.06l-1.06-1.06a.75.75 0 10-1.061 1.06l1.06 1.06z"
|
||||
/>
|
||||
</svg> -->
|
||||
<span class="ml-2 self-center"> {$i18n.t('JSON')} </span>
|
||||
{/if}
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{#if requestFormat !== null}
|
||||
<div class="flex mt-1 space-x-2">
|
||||
<Textarea
|
||||
className="w-full text-sm dark:text-gray-300 dark:bg-gray-900 outline-hidden"
|
||||
placeholder={$i18n.t('e.g. "json" or a JSON schema')}
|
||||
bind:value={requestFormat}
|
||||
/>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
|
|
|
|||
|
|
@ -17,6 +17,7 @@
|
|||
|
||||
// Addons
|
||||
let titleAutoGenerate = true;
|
||||
let autoFollowUps = true;
|
||||
let autoTags = true;
|
||||
|
||||
let responseAutoCopy = false;
|
||||
|
|
@ -197,6 +198,11 @@
|
|||
});
|
||||
};
|
||||
|
||||
const toggleAutoFollowUps = async () => {
|
||||
autoFollowUps = !autoFollowUps;
|
||||
saveSettings({ autoFollowUps });
|
||||
};
|
||||
|
||||
const toggleAutoTags = async () => {
|
||||
autoTags = !autoTags;
|
||||
saveSettings({ autoTags });
|
||||
|
|
@ -286,35 +292,36 @@
|
|||
|
||||
onMount(async () => {
|
||||
titleAutoGenerate = $settings?.title?.auto ?? true;
|
||||
autoTags = $settings.autoTags ?? true;
|
||||
autoTags = $settings?.autoTags ?? true;
|
||||
autoFollowUps = $settings?.autoFollowUps ?? true;
|
||||
|
||||
highContrastMode = $settings.highContrastMode ?? false;
|
||||
highContrastMode = $settings?.highContrastMode ?? false;
|
||||
|
||||
detectArtifacts = $settings.detectArtifacts ?? true;
|
||||
responseAutoCopy = $settings.responseAutoCopy ?? false;
|
||||
detectArtifacts = $settings?.detectArtifacts ?? true;
|
||||
responseAutoCopy = $settings?.responseAutoCopy ?? false;
|
||||
|
||||
showUsername = $settings.showUsername ?? false;
|
||||
showUpdateToast = $settings.showUpdateToast ?? true;
|
||||
showChangelog = $settings.showChangelog ?? true;
|
||||
showUsername = $settings?.showUsername ?? false;
|
||||
showUpdateToast = $settings?.showUpdateToast ?? true;
|
||||
showChangelog = $settings?.showChangelog ?? true;
|
||||
|
||||
showEmojiInCall = $settings.showEmojiInCall ?? false;
|
||||
voiceInterruption = $settings.voiceInterruption ?? false;
|
||||
showEmojiInCall = $settings?.showEmojiInCall ?? false;
|
||||
voiceInterruption = $settings?.voiceInterruption ?? false;
|
||||
|
||||
richTextInput = $settings.richTextInput ?? true;
|
||||
promptAutocomplete = $settings.promptAutocomplete ?? false;
|
||||
largeTextAsFile = $settings.largeTextAsFile ?? false;
|
||||
copyFormatted = $settings.copyFormatted ?? false;
|
||||
richTextInput = $settings?.richTextInput ?? true;
|
||||
promptAutocomplete = $settings?.promptAutocomplete ?? false;
|
||||
largeTextAsFile = $settings?.largeTextAsFile ?? false;
|
||||
copyFormatted = $settings?.copyFormatted ?? false;
|
||||
|
||||
collapseCodeBlocks = $settings.collapseCodeBlocks ?? false;
|
||||
expandDetails = $settings.expandDetails ?? false;
|
||||
collapseCodeBlocks = $settings?.collapseCodeBlocks ?? false;
|
||||
expandDetails = $settings?.expandDetails ?? false;
|
||||
|
||||
landingPageMode = $settings.landingPageMode ?? '';
|
||||
chatBubble = $settings.chatBubble ?? true;
|
||||
widescreenMode = $settings.widescreenMode ?? false;
|
||||
splitLargeChunks = $settings.splitLargeChunks ?? false;
|
||||
scrollOnBranchChange = $settings.scrollOnBranchChange ?? true;
|
||||
chatDirection = $settings.chatDirection ?? 'auto';
|
||||
userLocation = $settings.userLocation ?? false;
|
||||
landingPageMode = $settings?.landingPageMode ?? '';
|
||||
chatBubble = $settings?.chatBubble ?? true;
|
||||
widescreenMode = $settings?.widescreenMode ?? false;
|
||||
splitLargeChunks = $settings?.splitLargeChunks ?? false;
|
||||
scrollOnBranchChange = $settings?.scrollOnBranchChange ?? true;
|
||||
chatDirection = $settings?.chatDirection ?? 'auto';
|
||||
userLocation = $settings?.userLocation ?? false;
|
||||
|
||||
notificationSound = $settings?.notificationSound ?? true;
|
||||
notificationSoundAlways = $settings?.notificationSoundAlways ?? false;
|
||||
|
|
@ -324,19 +331,19 @@
|
|||
|
||||
stylizedPdfExport = $settings?.stylizedPdfExport ?? true;
|
||||
|
||||
hapticFeedback = $settings.hapticFeedback ?? false;
|
||||
ctrlEnterToSend = $settings.ctrlEnterToSend ?? false;
|
||||
hapticFeedback = $settings?.hapticFeedback ?? false;
|
||||
ctrlEnterToSend = $settings?.ctrlEnterToSend ?? false;
|
||||
|
||||
imageCompression = $settings.imageCompression ?? false;
|
||||
imageCompressionSize = $settings.imageCompressionSize ?? { width: '', height: '' };
|
||||
imageCompression = $settings?.imageCompression ?? false;
|
||||
imageCompressionSize = $settings?.imageCompressionSize ?? { width: '', height: '' };
|
||||
|
||||
defaultModelId = $settings?.models?.at(0) ?? '';
|
||||
if ($config?.default_models) {
|
||||
defaultModelId = $config.default_models.split(',')[0];
|
||||
}
|
||||
|
||||
backgroundImageUrl = $settings.backgroundImageUrl ?? null;
|
||||
webSearch = $settings.webSearch ?? null;
|
||||
backgroundImageUrl = $settings?.backgroundImageUrl ?? null;
|
||||
webSearch = $settings?.webSearch ?? null;
|
||||
});
|
||||
</script>
|
||||
|
||||
|
|
@ -619,6 +626,26 @@
|
|||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<div class=" py-0.5 flex w-full justify-between">
|
||||
<div class=" self-center text-xs">{$i18n.t('Follow-Up Auto-Generation')}</div>
|
||||
|
||||
<button
|
||||
class="p-1 px-3 text-xs flex rounded-sm transition"
|
||||
on:click={() => {
|
||||
toggleAutoFollowUps();
|
||||
}}
|
||||
type="button"
|
||||
>
|
||||
{#if autoFollowUps === true}
|
||||
<span class="ml-2 self-center">{$i18n.t('On')}</span>
|
||||
{:else}
|
||||
<span class="ml-2 self-center">{$i18n.t('Off')}</span>
|
||||
{/if}
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<div class=" py-0.5 flex w-full justify-between">
|
||||
<div class=" self-center text-xs">{$i18n.t('Chat Tags Auto-Generation')}</div>
|
||||
|
|
|
|||
|
|
@ -34,93 +34,160 @@
|
|||
id: 'general',
|
||||
title: 'General',
|
||||
keywords: [
|
||||
'general',
|
||||
'theme',
|
||||
'language',
|
||||
'notifications',
|
||||
'system',
|
||||
'systemprompt',
|
||||
'prompt',
|
||||
'advanced',
|
||||
'settings',
|
||||
'defaultsettings',
|
||||
'advancedparams',
|
||||
'advancedparameters',
|
||||
'advanced params',
|
||||
'advanced parameters',
|
||||
'configuration',
|
||||
'systemsettings',
|
||||
'notificationsettings',
|
||||
'systempromptconfig',
|
||||
'languageoptions',
|
||||
'defaultparameters',
|
||||
'systemparameters'
|
||||
'default parameters',
|
||||
'defaultsettings',
|
||||
'default settings',
|
||||
'general',
|
||||
'keepalive',
|
||||
'keep alive',
|
||||
'languages',
|
||||
'notifications',
|
||||
'requestmode',
|
||||
'request mode',
|
||||
'systemparameters',
|
||||
'system parameters',
|
||||
'systemprompt',
|
||||
'system prompt',
|
||||
'systemsettings',
|
||||
'system settings',
|
||||
'theme',
|
||||
'translate',
|
||||
'webuisettings',
|
||||
'webui settings'
|
||||
]
|
||||
},
|
||||
{
|
||||
id: 'interface',
|
||||
title: 'Interface',
|
||||
keywords: [
|
||||
'defaultmodel',
|
||||
'selectmodel',
|
||||
'ui',
|
||||
'userinterface',
|
||||
'display',
|
||||
'layout',
|
||||
'design',
|
||||
'landingpage',
|
||||
'landingpagemode',
|
||||
'default',
|
||||
'chat',
|
||||
'chatbubble',
|
||||
'chatui',
|
||||
'username',
|
||||
'showusername',
|
||||
'displayusername',
|
||||
'widescreen',
|
||||
'widescreenmode',
|
||||
'fullscreen',
|
||||
'expandmode',
|
||||
'allow user location',
|
||||
'allow voice interruption in call',
|
||||
'allowuserlocation',
|
||||
'allowvoiceinterruptionincall',
|
||||
'always collapse codeblocks',
|
||||
'always collapse code blocks',
|
||||
'always expand details',
|
||||
'always on web search',
|
||||
'always play notification sound',
|
||||
'alwayscollapsecodeblocks',
|
||||
'alwaysexpanddetails',
|
||||
'alwaysonwebsearch',
|
||||
'alwaysplaynotificationsound',
|
||||
'android',
|
||||
'auto chat tags',
|
||||
'auto copy response to clipboard',
|
||||
'auto title',
|
||||
'autochattags',
|
||||
'autocopyresponsetoclipboard',
|
||||
'autotitle',
|
||||
'beta',
|
||||
'call',
|
||||
'chat background image',
|
||||
'chat bubble ui',
|
||||
'chat direction',
|
||||
'chat tags autogen',
|
||||
'chat tags autogeneration',
|
||||
'chat ui',
|
||||
'chatbackgroundimage',
|
||||
'chatbubbleui',
|
||||
'chatdirection',
|
||||
'chat tags autogeneration',
|
||||
'chattagsautogeneration',
|
||||
'chatui',
|
||||
'copy formatted text',
|
||||
'copyformattedtext',
|
||||
'default model',
|
||||
'defaultmodel',
|
||||
'design',
|
||||
'detect artifacts automatically',
|
||||
'detectartifactsautomatically',
|
||||
'display emoji in call',
|
||||
'display username',
|
||||
'displayemojiincall',
|
||||
'displayusername',
|
||||
'enter key behavior',
|
||||
'enterkeybehavior',
|
||||
'expand mode',
|
||||
'expandmode',
|
||||
'file',
|
||||
'followup autogeneration',
|
||||
'followupautogeneration',
|
||||
'fullscreen',
|
||||
'fullwidthmode',
|
||||
'full width mode',
|
||||
'haptic feedback',
|
||||
'hapticfeedback',
|
||||
'high contrast mode',
|
||||
'highcontrastmode',
|
||||
'iframe sandbox allow forms',
|
||||
'iframe sandbox allow same origin',
|
||||
'iframesandboxallowforms',
|
||||
'iframesandboxallowsameorigin',
|
||||
'imagecompression',
|
||||
'image compression',
|
||||
'imagemaxcompressionsize',
|
||||
'image max compression size',
|
||||
'interface customization',
|
||||
'interface options',
|
||||
'interfacecustomization',
|
||||
'interfaceoptions',
|
||||
'landing page mode',
|
||||
'landingpagemode',
|
||||
'layout',
|
||||
'left to right',
|
||||
'left-to-right',
|
||||
'lefttoright',
|
||||
'ltr',
|
||||
'paste large text as file',
|
||||
'pastelargetextasfile',
|
||||
'reset background',
|
||||
'resetbackground',
|
||||
'response auto copy',
|
||||
'responseautocopy',
|
||||
'rich text input for chat',
|
||||
'richtextinputforchat',
|
||||
'right to left',
|
||||
'right-to-left',
|
||||
'righttoleft',
|
||||
'rtl',
|
||||
'notifications',
|
||||
'toast',
|
||||
'toastnotifications',
|
||||
'largechunks',
|
||||
'streamlargechunks',
|
||||
'scroll',
|
||||
'scrollonbranchchange',
|
||||
'scroll behavior',
|
||||
'scroll on branch change',
|
||||
'scrollbehavior',
|
||||
'richtext',
|
||||
'richtextinput',
|
||||
'background',
|
||||
'chatbackground',
|
||||
'chatbackgroundimage',
|
||||
'backgroundimage',
|
||||
'uploadbackground',
|
||||
'resetbackground',
|
||||
'titleautogen',
|
||||
'scrollonbranchchange',
|
||||
'select model',
|
||||
'selectmodel',
|
||||
'settings',
|
||||
'show username',
|
||||
'showusername',
|
||||
'stream large chunks',
|
||||
'streamlargechunks',
|
||||
'stylized pdf export',
|
||||
'stylizedpdfexport',
|
||||
'title autogeneration',
|
||||
'titleautogeneration',
|
||||
'autotitle',
|
||||
'chattags',
|
||||
'autochattags',
|
||||
'responseautocopy',
|
||||
'clipboard',
|
||||
'location',
|
||||
'userlocation',
|
||||
'toast notifications for new updates',
|
||||
'toastnotificationsfornewupdates',
|
||||
'upload background',
|
||||
'uploadbackground',
|
||||
'user interface',
|
||||
'user location access',
|
||||
'userinterface',
|
||||
'userlocationaccess',
|
||||
'haptic',
|
||||
'hapticfeedback',
|
||||
'vibration',
|
||||
'voice',
|
||||
'voice control',
|
||||
'voicecontrol',
|
||||
'voiceinterruption',
|
||||
'call',
|
||||
'emojis',
|
||||
'displayemoji',
|
||||
'save',
|
||||
'interfaceoptions',
|
||||
'interfacecustomization',
|
||||
'alwaysonwebsearch'
|
||||
'widescreen mode',
|
||||
'widescreenmode',
|
||||
'whatsnew',
|
||||
'whats new',
|
||||
'websearchinchat',
|
||||
'web search in chat'
|
||||
]
|
||||
},
|
||||
...($user?.role === 'admin' ||
|
||||
|
|
@ -129,7 +196,15 @@
|
|||
{
|
||||
id: 'connections',
|
||||
title: 'Connections',
|
||||
keywords: []
|
||||
keywords: [
|
||||
'addconnection',
|
||||
'add connection',
|
||||
'manageconnections',
|
||||
'manage connections',
|
||||
'manage direct connections',
|
||||
'managedirectconnections',
|
||||
'settings'
|
||||
]
|
||||
}
|
||||
]
|
||||
: []),
|
||||
|
|
@ -140,7 +215,15 @@
|
|||
{
|
||||
id: 'tools',
|
||||
title: 'Tools',
|
||||
keywords: []
|
||||
keywords: [
|
||||
'addconnection',
|
||||
'add connection',
|
||||
'managetools',
|
||||
'manage tools',
|
||||
'manage tool servers',
|
||||
'managetoolservers',
|
||||
'settings'
|
||||
]
|
||||
}
|
||||
]
|
||||
: []),
|
||||
|
|
@ -149,159 +232,233 @@
|
|||
id: 'personalization',
|
||||
title: 'Personalization',
|
||||
keywords: [
|
||||
'personalization',
|
||||
'memory',
|
||||
'personalize',
|
||||
'preferences',
|
||||
'profile',
|
||||
'personalsettings',
|
||||
'account preferences',
|
||||
'account settings',
|
||||
'accountpreferences',
|
||||
'accountsettings',
|
||||
'custom settings',
|
||||
'customsettings',
|
||||
'userpreferences',
|
||||
'accountpreferences'
|
||||
'experimental',
|
||||
'memories',
|
||||
'memory',
|
||||
'personalization',
|
||||
'personalize',
|
||||
'personal settings',
|
||||
'personalsettings',
|
||||
'profile',
|
||||
'user preferences',
|
||||
'userpreferences'
|
||||
]
|
||||
},
|
||||
{
|
||||
id: 'audio',
|
||||
title: 'Audio',
|
||||
keywords: [
|
||||
'audio',
|
||||
'sound',
|
||||
'soundsettings',
|
||||
'audiocontrol',
|
||||
'volume',
|
||||
'speech',
|
||||
'speechrecognition',
|
||||
'stt',
|
||||
'speechtotext',
|
||||
'tts',
|
||||
'texttospeech',
|
||||
'playback',
|
||||
'playbackspeed',
|
||||
'voiceplayback',
|
||||
'speechplayback',
|
||||
'audiooutput',
|
||||
'speechengine',
|
||||
'voicecontrol',
|
||||
'audioplayback',
|
||||
'transcription',
|
||||
'autotranscribe',
|
||||
'autosend',
|
||||
'speechsettings',
|
||||
'audiovoice',
|
||||
'voiceoptions',
|
||||
'setvoice',
|
||||
'nonlocalvoices',
|
||||
'savesettings',
|
||||
'audio config',
|
||||
'audio control',
|
||||
'audio features',
|
||||
'audio input',
|
||||
'audio output',
|
||||
'audio playback',
|
||||
'audio voice',
|
||||
'audioconfig',
|
||||
'speechconfig',
|
||||
'voicerecognition',
|
||||
'speechsynthesis',
|
||||
'speechmode',
|
||||
'voicespeed',
|
||||
'speechrate',
|
||||
'speechspeed',
|
||||
'audioinput',
|
||||
'audiocontrol',
|
||||
'audiofeatures',
|
||||
'voicemodes'
|
||||
'audioinput',
|
||||
'audiooutput',
|
||||
'audioplayback',
|
||||
'audiovoice',
|
||||
'auto playback response',
|
||||
'autoplaybackresponse',
|
||||
'auto transcribe',
|
||||
'autotranscribe',
|
||||
'instant auto send after voice transcription',
|
||||
'instantautosendaftervoicetranscription',
|
||||
'language',
|
||||
'non local voices',
|
||||
'nonlocalvoices',
|
||||
'save settings',
|
||||
'savesettings',
|
||||
'set voice',
|
||||
'setvoice',
|
||||
'sound settings',
|
||||
'soundsettings',
|
||||
'speech config',
|
||||
'speech mode',
|
||||
'speech playback speed',
|
||||
'speech rate',
|
||||
'speech recognition',
|
||||
'speech settings',
|
||||
'speech speed',
|
||||
'speech synthesis',
|
||||
'speech to text engine',
|
||||
'speechconfig',
|
||||
'speechmode',
|
||||
'speechplaybackspeed',
|
||||
'speechrate',
|
||||
'speechrecognition',
|
||||
'speechsettings',
|
||||
'speechspeed',
|
||||
'speechsynthesis',
|
||||
'speechtotextengine',
|
||||
'speedch playback rate',
|
||||
'speedchplaybackrate',
|
||||
'stt settings',
|
||||
'sttsettings',
|
||||
'text to speech engine',
|
||||
'text to speech',
|
||||
'textospeechengine',
|
||||
'texttospeech',
|
||||
'texttospeechvoice',
|
||||
'text to speech voice',
|
||||
'voice control',
|
||||
'voice modes',
|
||||
'voice options',
|
||||
'voice playback',
|
||||
'voice recognition',
|
||||
'voice speed',
|
||||
'voicecontrol',
|
||||
'voicemodes',
|
||||
'voiceoptions',
|
||||
'voiceplayback',
|
||||
'voicerecognition',
|
||||
'voicespeed',
|
||||
'volume'
|
||||
]
|
||||
},
|
||||
{
|
||||
id: 'chats',
|
||||
title: 'Chats',
|
||||
keywords: [
|
||||
'chat',
|
||||
'messages',
|
||||
'conversations',
|
||||
'chatsettings',
|
||||
'history',
|
||||
'archive all chats',
|
||||
'archive chats',
|
||||
'archiveallchats',
|
||||
'archivechats',
|
||||
'archived chats',
|
||||
'archivedchats',
|
||||
'chat activity',
|
||||
'chat history',
|
||||
'chat settings',
|
||||
'chatactivity',
|
||||
'chathistory',
|
||||
'messagehistory',
|
||||
'messagearchive',
|
||||
'convo',
|
||||
'chats',
|
||||
'chatsettings',
|
||||
'conversation activity',
|
||||
'conversation history',
|
||||
'conversationactivity',
|
||||
'conversationhistory',
|
||||
'exportmessages',
|
||||
'chatactivity'
|
||||
'conversations',
|
||||
'convos',
|
||||
'delete all chats',
|
||||
'delete chats',
|
||||
'deleteallchats',
|
||||
'deletechats',
|
||||
'export chats',
|
||||
'exportchats',
|
||||
'import chats',
|
||||
'importchats',
|
||||
'message activity',
|
||||
'message archive',
|
||||
'message history',
|
||||
'messagearchive',
|
||||
'messagehistory'
|
||||
]
|
||||
},
|
||||
{
|
||||
id: 'account',
|
||||
title: 'Account',
|
||||
keywords: [
|
||||
'account',
|
||||
'profile',
|
||||
'security',
|
||||
'privacy',
|
||||
'settings',
|
||||
'account preferences',
|
||||
'account settings',
|
||||
'accountpreferences',
|
||||
'accountsettings',
|
||||
'api keys',
|
||||
'apikeys',
|
||||
'change password',
|
||||
'changepassword',
|
||||
'jwt token',
|
||||
'jwttoken',
|
||||
'login',
|
||||
'new password',
|
||||
'newpassword',
|
||||
'notification webhook url',
|
||||
'notificationwebhookurl',
|
||||
'personal settings',
|
||||
'personalsettings',
|
||||
'privacy settings',
|
||||
'privacysettings',
|
||||
'profileavatar',
|
||||
'profile avatar',
|
||||
'profile details',
|
||||
'profile image',
|
||||
'profile picture',
|
||||
'profiledetails',
|
||||
'profileimage',
|
||||
'profilepicture',
|
||||
'security settings',
|
||||
'securitysettings',
|
||||
'update account',
|
||||
'update password',
|
||||
'updateaccount',
|
||||
'updatepassword',
|
||||
'user account',
|
||||
'user data',
|
||||
'user preferences',
|
||||
'user profile',
|
||||
'useraccount',
|
||||
'userdata',
|
||||
'api',
|
||||
'apikey',
|
||||
'username',
|
||||
'userpreferences',
|
||||
'userprofile',
|
||||
'profiledetails',
|
||||
'accountsettings',
|
||||
'accountpreferences',
|
||||
'securitysettings',
|
||||
'privacysettings'
|
||||
]
|
||||
},
|
||||
{
|
||||
id: 'admin',
|
||||
title: 'Admin',
|
||||
keywords: [
|
||||
'admin',
|
||||
'administrator',
|
||||
'adminsettings',
|
||||
'adminpanel',
|
||||
'systemadmin',
|
||||
'administratoraccess',
|
||||
'systemcontrol',
|
||||
'manage',
|
||||
'management',
|
||||
'admincontrols',
|
||||
'adminfeatures',
|
||||
'usercontrol',
|
||||
'arenamodel',
|
||||
'evaluations',
|
||||
'websearch',
|
||||
'database',
|
||||
'pipelines',
|
||||
'images',
|
||||
'audio',
|
||||
'documents',
|
||||
'rag',
|
||||
'models',
|
||||
'ollama',
|
||||
'openai',
|
||||
'users'
|
||||
'webhook url',
|
||||
'webhookurl'
|
||||
]
|
||||
},
|
||||
{
|
||||
id: 'about',
|
||||
title: 'About',
|
||||
keywords: [
|
||||
'about',
|
||||
'info',
|
||||
'information',
|
||||
'version',
|
||||
'documentation',
|
||||
'help',
|
||||
'support',
|
||||
'details',
|
||||
'aboutus',
|
||||
'softwareinfo',
|
||||
'timothyjaeryangbaek',
|
||||
'openwebui',
|
||||
'release',
|
||||
'updates',
|
||||
'updateinfo',
|
||||
'versioninfo',
|
||||
'about app',
|
||||
'about me',
|
||||
'about open webui',
|
||||
'about page',
|
||||
'about us',
|
||||
'aboutapp',
|
||||
'terms',
|
||||
'termsandconditions',
|
||||
'aboutme',
|
||||
'aboutopenwebui',
|
||||
'aboutpage',
|
||||
'aboutus',
|
||||
'check for updates',
|
||||
'checkforupdates',
|
||||
'contact',
|
||||
'aboutpage'
|
||||
'copyright',
|
||||
'details',
|
||||
'discord',
|
||||
'documentation',
|
||||
'github',
|
||||
'help',
|
||||
'information',
|
||||
'license',
|
||||
'redistributions',
|
||||
'release',
|
||||
'see whats new',
|
||||
'seewhatsnew',
|
||||
'settings',
|
||||
'software info',
|
||||
'softwareinfo',
|
||||
'support',
|
||||
'terms and conditions',
|
||||
'terms of use',
|
||||
'termsandconditions',
|
||||
'termsofuse',
|
||||
'timothy jae ryang baek',
|
||||
'timothy j baek',
|
||||
'timothyjaeryangbaek',
|
||||
'timothyjbaek',
|
||||
'twitter',
|
||||
'update info',
|
||||
'updateinfo',
|
||||
'version info',
|
||||
'versioninfo'
|
||||
]
|
||||
}
|
||||
];
|
||||
|
|
@ -405,15 +562,21 @@
|
|||
<div class="flex flex-col md:flex-row w-full px-4 pt-1 pb-4 md:space-x-4">
|
||||
<div
|
||||
id="settings-tabs-container"
|
||||
class="tabs flex flex-row overflow-x-auto gap-2.5 md:gap-1 md:flex-col flex-1 md:flex-none md:w-40 dark:text-gray-200 text-sm font-medium text-left mb-1 md:mb-0 -translate-y-1"
|
||||
class="tabs flex flex-row overflow-x-auto gap-2.5 md:gap-1 md:flex-col flex-1 md:flex-none md:w-40 md:min-h-[32rem] md:max-h-[32rem] dark:text-gray-200 text-sm font-medium text-left mb-1 md:mb-0 -translate-y-1"
|
||||
>
|
||||
<div class="hidden md:flex w-full rounded-xl -mb-1 px-0.5 gap-2" id="settings-search">
|
||||
<div class="self-center rounded-l-xl bg-transparent">
|
||||
<Search className="size-3.5" />
|
||||
<Search
|
||||
className="size-3.5"
|
||||
strokeWidth={($settings?.highContrastMode ?? false) ? '3' : '1.5'}
|
||||
/>
|
||||
</div>
|
||||
<label class="sr-only" for="search-input-settings-modal">{$i18n.t('Search')}</label>
|
||||
<input
|
||||
class="w-full py-1.5 text-sm bg-transparent dark:text-gray-300 outline-hidden"
|
||||
class={`w-full py-1.5 text-sm bg-transparent dark:text-gray-300 outline-hidden
|
||||
${($settings?.highContrastMode ?? false) ? 'placeholder-gray-800' : ''}`}
|
||||
bind:value={search}
|
||||
id="search-input-settings-modal"
|
||||
on:input={searchDebounceHandler}
|
||||
placeholder={$i18n.t('Search')}
|
||||
/>
|
||||
|
|
@ -423,10 +586,16 @@
|
|||
{#each visibleTabs as tabId (tabId)}
|
||||
{#if tabId === 'general'}
|
||||
<button
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition {selectedTab ===
|
||||
'general'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
class={`px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition
|
||||
${
|
||||
selectedTab === 'general'
|
||||
? ($settings?.highContrastMode ?? false)
|
||||
? 'dark:bg-gray-800 bg-gray-200'
|
||||
: ''
|
||||
: ($settings?.highContrastMode ?? false)
|
||||
? 'hover:bg-gray-200 dark:hover:bg-gray-800'
|
||||
: 'text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'
|
||||
}`}
|
||||
on:click={() => {
|
||||
selectedTab = 'general';
|
||||
}}
|
||||
|
|
@ -449,10 +618,16 @@
|
|||
</button>
|
||||
{:else if tabId === 'interface'}
|
||||
<button
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition {selectedTab ===
|
||||
'interface'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
class={`px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition
|
||||
${
|
||||
selectedTab === 'interface'
|
||||
? ($settings?.highContrastMode ?? false)
|
||||
? 'dark:bg-gray-800 bg-gray-200'
|
||||
: ''
|
||||
: ($settings?.highContrastMode ?? false)
|
||||
? 'hover:bg-gray-200 dark:hover:bg-gray-800'
|
||||
: 'text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'
|
||||
}`}
|
||||
on:click={() => {
|
||||
selectedTab = 'interface';
|
||||
}}
|
||||
|
|
@ -476,10 +651,16 @@
|
|||
{:else if tabId === 'connections'}
|
||||
{#if $user?.role === 'admin' || ($user?.role === 'user' && $config?.features?.enable_direct_connections)}
|
||||
<button
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition {selectedTab ===
|
||||
'connections'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
class={`px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition
|
||||
${
|
||||
selectedTab === 'connections'
|
||||
? ($settings?.highContrastMode ?? false)
|
||||
? 'dark:bg-gray-800 bg-gray-200'
|
||||
: ''
|
||||
: ($settings?.highContrastMode ?? false)
|
||||
? 'hover:bg-gray-200 dark:hover:bg-gray-800'
|
||||
: 'text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'
|
||||
}`}
|
||||
on:click={() => {
|
||||
selectedTab = 'connections';
|
||||
}}
|
||||
|
|
@ -502,10 +683,16 @@
|
|||
{:else if tabId === 'tools'}
|
||||
{#if $user?.role === 'admin' || ($user?.role === 'user' && $user?.permissions?.features?.direct_tool_servers)}
|
||||
<button
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition {selectedTab ===
|
||||
'tools'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
class={`px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition
|
||||
${
|
||||
selectedTab === 'tools'
|
||||
? ($settings?.highContrastMode ?? false)
|
||||
? 'dark:bg-gray-800 bg-gray-200'
|
||||
: ''
|
||||
: ($settings?.highContrastMode ?? false)
|
||||
? 'hover:bg-gray-200 dark:hover:bg-gray-800'
|
||||
: 'text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'
|
||||
}`}
|
||||
on:click={() => {
|
||||
selectedTab = 'tools';
|
||||
}}
|
||||
|
|
@ -529,10 +716,16 @@
|
|||
{/if}
|
||||
{:else if tabId === 'personalization'}
|
||||
<button
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition {selectedTab ===
|
||||
'personalization'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
class={`px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition
|
||||
${
|
||||
selectedTab === 'personalization'
|
||||
? ($settings?.highContrastMode ?? false)
|
||||
? 'dark:bg-gray-800 bg-gray-200'
|
||||
: ''
|
||||
: ($settings?.highContrastMode ?? false)
|
||||
? 'hover:bg-gray-200 dark:hover:bg-gray-800'
|
||||
: 'text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'
|
||||
}`}
|
||||
on:click={() => {
|
||||
selectedTab = 'personalization';
|
||||
}}
|
||||
|
|
@ -544,10 +737,16 @@
|
|||
</button>
|
||||
{:else if tabId === 'audio'}
|
||||
<button
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition {selectedTab ===
|
||||
'audio'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
class={`px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition
|
||||
${
|
||||
selectedTab === 'audio'
|
||||
? ($settings?.highContrastMode ?? false)
|
||||
? 'dark:bg-gray-800 bg-gray-200'
|
||||
: ''
|
||||
: ($settings?.highContrastMode ?? false)
|
||||
? 'hover:bg-gray-200 dark:hover:bg-gray-800'
|
||||
: 'text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'
|
||||
}`}
|
||||
on:click={() => {
|
||||
selectedTab = 'audio';
|
||||
}}
|
||||
|
|
@ -571,10 +770,16 @@
|
|||
</button>
|
||||
{:else if tabId === 'chats'}
|
||||
<button
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition {selectedTab ===
|
||||
'chats'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
class={`px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition
|
||||
${
|
||||
selectedTab === 'chats'
|
||||
? ($settings?.highContrastMode ?? false)
|
||||
? 'dark:bg-gray-800 bg-gray-200'
|
||||
: ''
|
||||
: ($settings?.highContrastMode ?? false)
|
||||
? 'hover:bg-gray-200 dark:hover:bg-gray-800'
|
||||
: 'text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'
|
||||
}`}
|
||||
on:click={() => {
|
||||
selectedTab = 'chats';
|
||||
}}
|
||||
|
|
@ -597,10 +802,16 @@
|
|||
</button>
|
||||
{:else if tabId === 'account'}
|
||||
<button
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition {selectedTab ===
|
||||
'account'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
class={`px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition
|
||||
${
|
||||
selectedTab === 'account'
|
||||
? ($settings?.highContrastMode ?? false)
|
||||
? 'dark:bg-gray-800 bg-gray-200'
|
||||
: ''
|
||||
: ($settings?.highContrastMode ?? false)
|
||||
? 'hover:bg-gray-200 dark:hover:bg-gray-800'
|
||||
: 'text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'
|
||||
}`}
|
||||
on:click={() => {
|
||||
selectedTab = 'account';
|
||||
}}
|
||||
|
|
@ -623,10 +834,16 @@
|
|||
</button>
|
||||
{:else if tabId === 'about'}
|
||||
<button
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition {selectedTab ===
|
||||
'about'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
class={`px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition
|
||||
${
|
||||
selectedTab === 'about'
|
||||
? ($settings?.highContrastMode ?? false)
|
||||
? 'dark:bg-gray-800 bg-gray-200'
|
||||
: ''
|
||||
: ($settings?.highContrastMode ?? false)
|
||||
? 'hover:bg-gray-200 dark:hover:bg-gray-800'
|
||||
: 'text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'
|
||||
}`}
|
||||
on:click={() => {
|
||||
selectedTab = 'about';
|
||||
}}
|
||||
|
|
@ -647,35 +864,6 @@
|
|||
</div>
|
||||
<div class=" self-center">{$i18n.t('About')}</div>
|
||||
</button>
|
||||
{:else if tabId === 'admin'}
|
||||
{#if $user?.role === 'admin'}
|
||||
<button
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none flex text-left transition {selectedTab ===
|
||||
'admin'
|
||||
? ''
|
||||
: ' text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
on:click={async () => {
|
||||
await goto('/admin/settings');
|
||||
show = false;
|
||||
}}
|
||||
>
|
||||
<div class=" self-center mr-2">
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 24 24"
|
||||
fill="currentColor"
|
||||
class="size-4"
|
||||
>
|
||||
<path
|
||||
fill-rule="evenodd"
|
||||
d="M4.5 3.75a3 3 0 0 0-3 3v10.5a3 3 0 0 0 3 3h15a3 3 0 0 0 3-3V6.75a3 3 0 0 0-3-3h-15Zm4.125 3a2.25 2.25 0 1 0 0 4.5 2.25 2.25 0 0 0 0-4.5Zm-3.873 8.703a4.126 4.126 0 0 1 7.746 0 .75.75 0 0 1-.351.92 7.47 7.47 0 0 1-3.522.877 7.47 7.47 0 0 1-3.522-.877.75.75 0 0 1-.351-.92ZM15 8.25a.75.75 0 0 0 0 1.5h3.75a.75.75 0 0 0 0-1.5H15ZM14.25 12a.75.75 0 0 1 .75-.75h3.75a.75.75 0 0 1 0 1.5H15a.75.75 0 0 1-.75-.75Zm.75 2.25a.75.75 0 0 0 0 1.5h3.75a.75.75 0 0 0 0-1.5H15Z"
|
||||
clip-rule="evenodd"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
<div class=" self-center">{$i18n.t('Admin Settings')}</div>
|
||||
</button>
|
||||
{/if}
|
||||
{/if}
|
||||
{/each}
|
||||
{:else}
|
||||
|
|
@ -683,6 +871,34 @@
|
|||
{$i18n.t('No results found')}
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
{#if $user?.role === 'admin'}
|
||||
<button
|
||||
class="px-0.5 py-1 min-w-fit rounded-lg flex-1 md:flex-none md:mt-auto flex text-left transition {$settings?.highContrastMode
|
||||
? 'hover:bg-gray-200 dark:hover:bg-gray-800'
|
||||
: 'text-gray-300 dark:text-gray-600 hover:text-gray-700 dark:hover:text-white'}"
|
||||
on:click={async () => {
|
||||
await goto('/admin/settings');
|
||||
show = false;
|
||||
}}
|
||||
>
|
||||
<div class=" self-center mr-2">
|
||||
<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
viewBox="0 0 24 24"
|
||||
fill="currentColor"
|
||||
class="size-4"
|
||||
>
|
||||
<path
|
||||
fill-rule="evenodd"
|
||||
d="M4.5 3.75a3 3 0 0 0-3 3v10.5a3 3 0 0 0 3 3h15a3 3 0 0 0 3-3V6.75a3 3 0 0 0-3-3h-15Zm4.125 3a2.25 2.25 0 1 0 0 4.5 2.25 2.25 0 0 0 0-4.5Zm-3.873 8.703a4.126 4.126 0 0 1 7.746 0 .75.75 0 0 1-.351.92 7.47 7.47 0 0 1-3.522.877 7.47 7.47 0 0 1-3.522-.877.75.75 0 0 1-.351-.92ZM15 8.25a.75.75 0 0 0 0 1.5h3.75a.75.75 0 0 0 0-1.5H15ZM14.25 12a.75.75 0 0 1 .75-.75h3.75a.75.75 0 0 1 0 1.5H15a.75.75 0 0 1-.75-.75Zm.75 2.25a.75.75 0 0 0 0 1.5h3.75a.75.75 0 0 0 0-1.5H15Z"
|
||||
clip-rule="evenodd"
|
||||
/>
|
||||
</svg>
|
||||
</div>
|
||||
<div class=" self-center">{$i18n.t('Admin Settings')}</div>
|
||||
</button>
|
||||
{/if}
|
||||
</div>
|
||||
<div class="flex-1 md:min-h-[32rem] max-h-[32rem]">
|
||||
{#if selectedTab === 'general'}
|
||||
|
|
@ -763,6 +979,7 @@
|
|||
}
|
||||
|
||||
input[type='number'] {
|
||||
appearance: textfield;
|
||||
-moz-appearance: textfield; /* Firefox */
|
||||
}
|
||||
</style>
|
||||
|
|
|
|||
|
|
@ -11,10 +11,13 @@
|
|||
|
||||
import { oneDark } from '@codemirror/theme-one-dark';
|
||||
|
||||
import { onMount, createEventDispatcher, getContext, tick } from 'svelte';
|
||||
import { onMount, createEventDispatcher, getContext, tick, onDestroy } from 'svelte';
|
||||
|
||||
import PyodideWorker from '$lib/workers/pyodide.worker?worker';
|
||||
|
||||
import { formatPythonCode } from '$lib/apis/utils';
|
||||
import { toast } from 'svelte-sonner';
|
||||
import { user } from '$lib/stores';
|
||||
|
||||
const dispatch = createEventDispatcher();
|
||||
const i18n = getContext('i18n');
|
||||
|
|
@ -113,13 +116,82 @@
|
|||
return await language?.load();
|
||||
};
|
||||
|
||||
let pyodideWorkerInstance = null;
|
||||
|
||||
const getPyodideWorker = () => {
|
||||
if (!pyodideWorkerInstance) {
|
||||
pyodideWorkerInstance = new PyodideWorker(); // Your worker constructor
|
||||
}
|
||||
return pyodideWorkerInstance;
|
||||
};
|
||||
|
||||
// Generate unique IDs for requests
|
||||
let _formatReqId = 0;
|
||||
|
||||
const formatPythonCodePyodide = (code) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const id = `format-${++_formatReqId}`;
|
||||
let timeout;
|
||||
const worker = getPyodideWorker();
|
||||
|
||||
const script = `
|
||||
import black
|
||||
print(black.format_str("""${code.replace(/\\/g, '\\\\').replace(/`/g, '\\`').replace(/"/g, '\\"')}""", mode=black.Mode()))
|
||||
`;
|
||||
|
||||
const packages = ['black'];
|
||||
|
||||
function handleMessage(event) {
|
||||
const { id: eventId, stdout, stderr } = event.data;
|
||||
if (eventId !== id) return; // Only handle our message
|
||||
clearTimeout(timeout);
|
||||
worker.removeEventListener('message', handleMessage);
|
||||
worker.removeEventListener('error', handleError);
|
||||
|
||||
if (stderr) {
|
||||
reject(stderr);
|
||||
} else {
|
||||
const formatted = stdout && typeof stdout === 'string' ? stdout.trim() : '';
|
||||
resolve({ code: formatted });
|
||||
}
|
||||
}
|
||||
|
||||
function handleError(event) {
|
||||
clearTimeout(timeout);
|
||||
worker.removeEventListener('message', handleMessage);
|
||||
worker.removeEventListener('error', handleError);
|
||||
reject(event.message || 'Pyodide worker error');
|
||||
}
|
||||
|
||||
worker.addEventListener('message', handleMessage);
|
||||
worker.addEventListener('error', handleError);
|
||||
|
||||
// Send to worker
|
||||
worker.postMessage({ id, code: script, packages });
|
||||
|
||||
// Timeout
|
||||
timeout = setTimeout(() => {
|
||||
worker.removeEventListener('message', handleMessage);
|
||||
worker.removeEventListener('error', handleError);
|
||||
try {
|
||||
worker.terminate();
|
||||
} catch {}
|
||||
pyodideWorkerInstance = null;
|
||||
reject('Execution Time Limit Exceeded');
|
||||
}, 60000);
|
||||
});
|
||||
};
|
||||
|
||||
export const formatPythonCodeHandler = async () => {
|
||||
if (codeEditor) {
|
||||
const res = await formatPythonCode(localStorage.token, _value).catch((error) => {
|
||||
const res = await (
|
||||
$user?.role === 'admin'
|
||||
? formatPythonCode(localStorage.token, _value)
|
||||
: formatPythonCodePyodide(_value)
|
||||
).catch((error) => {
|
||||
toast.error(`${error}`);
|
||||
return null;
|
||||
});
|
||||
|
||||
if (res && res.code) {
|
||||
const formattedCode = res.code;
|
||||
codeEditor.dispatch({
|
||||
|
|
@ -240,6 +312,12 @@
|
|||
document.removeEventListener('keydown', keydownHandler);
|
||||
};
|
||||
});
|
||||
|
||||
onDestroy(() => {
|
||||
if (pyodideWorkerInstance) {
|
||||
pyodideWorkerInstance.terminate();
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
<div id="code-textarea-{id}" class="h-full w-full text-sm" />
|
||||
|
|
|
|||
|
|
@ -13,6 +13,9 @@
|
|||
dayjs.extend(relativeTime);
|
||||
|
||||
async function loadLocale(locales) {
|
||||
if (!locales || !Array.isArray(locales)) {
|
||||
return;
|
||||
}
|
||||
for (const locale of locales) {
|
||||
try {
|
||||
dayjs.locale(locale);
|
||||
|
|
|
|||
|
|
@ -33,7 +33,9 @@
|
|||
});
|
||||
|
||||
onDestroy(() => {
|
||||
observer.disconnect();
|
||||
if (observer) {
|
||||
observer.disconnect();
|
||||
}
|
||||
|
||||
if (intervalId) {
|
||||
clearInterval(intervalId);
|
||||
|
|
|
|||
|
|
@ -2,10 +2,8 @@
|
|||
import DOMPurify from 'dompurify';
|
||||
|
||||
import { onDestroy } from 'svelte';
|
||||
import { marked } from 'marked';
|
||||
|
||||
import tippy from 'tippy.js';
|
||||
import { roundArrow } from 'tippy.js';
|
||||
|
||||
export let placement = 'top';
|
||||
export let content = `I'm a tooltip!`;
|
||||
|
|
@ -47,6 +45,6 @@
|
|||
});
|
||||
</script>
|
||||
|
||||
<div bind:this={tooltipElement} aria-label={DOMPurify.sanitize(content)} class={className}>
|
||||
<div bind:this={tooltipElement} class={className}>
|
||||
<slot />
|
||||
</div>
|
||||
|
|
|
|||
12
src/lib/components/icons/ArrowTurnDownRight.svelte
Normal file
12
src/lib/components/icons/ArrowTurnDownRight.svelte
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
<script lang="ts">
|
||||
export let className = 'size-4';
|
||||
export let strokeWidth = '1.5';
|
||||
</script>
|
||||
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 16 16" fill="currentColor" class={className}>
|
||||
<path
|
||||
fill-rule="evenodd"
|
||||
d="M2.75 2a.75.75 0 0 1 .75.75v6.5h7.94l-.97-.97a.75.75 0 0 1 1.06-1.06l2.25 2.25a.75.75 0 0 1 0 1.06l-2.25 2.25a.75.75 0 1 1-1.06-1.06l.97-.97H2.75A.75.75 0 0 1 2 10V2.75A.75.75 0 0 1 2.75 2Z"
|
||||
clip-rule="evenodd"
|
||||
/>
|
||||
</svg>
|
||||
|
|
@ -21,7 +21,8 @@
|
|||
channels,
|
||||
socket,
|
||||
config,
|
||||
isApp
|
||||
isApp,
|
||||
models
|
||||
} from '$lib/stores';
|
||||
import { onMount, getContext, tick, onDestroy } from 'svelte';
|
||||
|
||||
|
|
@ -649,6 +650,46 @@
|
|||
? 'opacity-20'
|
||||
: ''}"
|
||||
>
|
||||
{#if ($models ?? []).length > 0 && ($settings?.pinnedModels ?? []).length > 0}
|
||||
<div class="mt-0.5">
|
||||
{#each $settings.pinnedModels as modelId (modelId)}
|
||||
{@const model = $models.find((model) => model.id === modelId)}
|
||||
{#if model}
|
||||
<div class="px-1.5 flex justify-center text-gray-800 dark:text-gray-200">
|
||||
<a
|
||||
class="grow flex items-center space-x-2.5 rounded-lg px-2 py-[7px] hover:bg-gray-100 dark:hover:bg-gray-900 transition"
|
||||
href="/?model={modelId}"
|
||||
on:click={() => {
|
||||
selectedChatId = null;
|
||||
chatId.set('');
|
||||
|
||||
if ($mobile) {
|
||||
showSidebar.set(false);
|
||||
}
|
||||
}}
|
||||
draggable="false"
|
||||
>
|
||||
<div class="self-center shrink-0">
|
||||
<img
|
||||
crossorigin="anonymous"
|
||||
src={model?.info?.meta?.profile_image_url ?? '/static/favicon.png'}
|
||||
class=" size-5 rounded-full -translate-x-[0.5px]"
|
||||
alt="logo"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div class="flex self-center translate-y-[0.5px]">
|
||||
<div class=" self-center font-medium text-sm font-primary line-clamp-1">
|
||||
{model?.name ?? modelId}
|
||||
</div>
|
||||
</div>
|
||||
</a>
|
||||
</div>
|
||||
{/if}
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
{#if $config?.features?.enable_channels && ($user?.role === 'admin' || $channels.length > 0)}
|
||||
<Folder
|
||||
className="px-2 mt-0.5"
|
||||
|
|
@ -785,7 +826,7 @@
|
|||
<div
|
||||
class="ml-3 pl-1 mt-[1px] flex flex-col overflow-y-auto scrollbar-hidden border-s border-gray-100 dark:border-gray-900"
|
||||
>
|
||||
{#each $pinnedChats as chat, idx}
|
||||
{#each $pinnedChats as chat, idx (`pinned-chat-${chat?.id ?? idx}`)}
|
||||
<ChatItem
|
||||
className=""
|
||||
id={chat.id}
|
||||
|
|
@ -831,7 +872,7 @@
|
|||
<div class=" flex-1 flex flex-col overflow-y-auto scrollbar-hidden">
|
||||
<div class="pt-1.5">
|
||||
{#if $chats}
|
||||
{#each $chats as chat, idx}
|
||||
{#each $chats as chat, idx (`chat-${chat?.id ?? idx}`)}
|
||||
{#if idx === 0 || (idx > 0 && chat.time_range !== $chats[idx - 1].time_range)}
|
||||
<div
|
||||
class="w-full pl-2.5 text-xs text-gray-500 dark:text-gray-500 font-medium {idx ===
|
||||
|
|
|
|||
|
|
@ -204,9 +204,10 @@
|
|||
const chatTitleInputKeydownHandler = (e) => {
|
||||
if (e.key === 'Enter') {
|
||||
e.preventDefault();
|
||||
editChatTitle(id, chatTitle);
|
||||
confirmEdit = false;
|
||||
chatTitle = '';
|
||||
setTimeout(() => {
|
||||
const input = document.getElementById(`chat-title-input-${id}`);
|
||||
if (input) input.blur();
|
||||
}, 0);
|
||||
} else if (e.key === 'Escape') {
|
||||
e.preventDefault();
|
||||
confirmEdit = false;
|
||||
|
|
|
|||
|
|
@ -29,7 +29,7 @@
|
|||
|
||||
<div slot="content">
|
||||
<DropdownMenu.Content
|
||||
class="w-full max-w-[160px] rounded-lg px-1 py-1.5 z-50 bg-white dark:bg-gray-850 dark:text-white shadow-lg"
|
||||
class="w-full max-w-[170px] rounded-lg px-1 py-1.5 z-50 bg-white dark:bg-gray-850 dark:text-white shadow-lg"
|
||||
sideOffset={-2}
|
||||
side="bottom"
|
||||
align="start"
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@
|
|||
const i18n = getContext('i18n');
|
||||
|
||||
export let show = false;
|
||||
export let className = 'max-w-[160px]';
|
||||
export let className = 'max-w-[170px]';
|
||||
|
||||
export let onRecord = () => {};
|
||||
export let onCaptureAudio = () => {};
|
||||
|
|
|
|||
|
|
@ -52,7 +52,7 @@
|
|||
|
||||
<div class=" pt-1">
|
||||
<button
|
||||
class=" group-hover:text-gray-500 dark:text-gray-900 dark:hover:text-gray-300 transition"
|
||||
class=" group-hover:text-gray-500 dark:text-gray-500 dark:hover:text-gray-300 transition"
|
||||
on:click={() => {
|
||||
onDelete();
|
||||
}}
|
||||
|
|
|
|||
|
|
@ -49,7 +49,7 @@
|
|||
|
||||
<div slot="content">
|
||||
<DropdownMenu.Content
|
||||
class="w-full max-w-[160px] rounded-xl px-1 py-1.5 border border-gray-300/30 dark:border-gray-700/50 z-50 bg-white dark:bg-gray-850 dark:text-white shadow-sm"
|
||||
class="w-full max-w-[170px] rounded-xl px-1 py-1.5 border border-gray-300/30 dark:border-gray-700/50 z-50 bg-white dark:bg-gray-850 dark:text-white shadow-sm"
|
||||
sideOffset={-2}
|
||||
side="bottom"
|
||||
align="end"
|
||||
|
|
|
|||
|
|
@ -48,7 +48,7 @@
|
|||
|
||||
<div slot="content">
|
||||
<DropdownMenu.Content
|
||||
class="w-full max-w-[160px] rounded-xl px-1 py-1.5 border border-gray-300/30 dark:border-gray-700/50 z-50 bg-white dark:bg-gray-850 dark:text-white shadow-sm"
|
||||
class="w-full max-w-[170px] rounded-xl px-1 py-1.5 border border-gray-300/30 dark:border-gray-700/50 z-50 bg-white dark:bg-gray-850 dark:text-white shadow-sm"
|
||||
sideOffset={-2}
|
||||
side="bottom"
|
||||
align="start"
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@
|
|||
import ChevronRight from '../icons/ChevronRight.svelte';
|
||||
import Spinner from '../common/Spinner.svelte';
|
||||
import Tooltip from '../common/Tooltip.svelte';
|
||||
import { capitalizeFirstLetter } from '$lib/utils';
|
||||
import { capitalizeFirstLetter, slugify } from '$lib/utils';
|
||||
import XMark from '../icons/XMark.svelte';
|
||||
|
||||
const i18n = getContext('i18n');
|
||||
|
|
@ -68,7 +68,15 @@
|
|||
};
|
||||
|
||||
const cloneHandler = async (prompt) => {
|
||||
sessionStorage.prompt = JSON.stringify(prompt);
|
||||
const clonedPrompt = { ...prompt };
|
||||
|
||||
clonedPrompt.title = `${clonedPrompt.title} (Clone)`;
|
||||
const baseCommand = clonedPrompt.command.startsWith('/')
|
||||
? clonedPrompt.command.substring(1)
|
||||
: clonedPrompt.command;
|
||||
clonedPrompt.command = slugify(`${baseCommand} clone`);
|
||||
|
||||
sessionStorage.prompt = JSON.stringify(clonedPrompt);
|
||||
goto('/workspace/prompts/create');
|
||||
};
|
||||
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@
|
|||
export let onSubmit: Function;
|
||||
export let edit = false;
|
||||
export let prompt = null;
|
||||
export let clone = false;
|
||||
|
||||
const i18n = getContext('i18n');
|
||||
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@
|
|||
|
||||
<div slot="content">
|
||||
<DropdownMenu.Content
|
||||
class="w-full max-w-[160px] rounded-xl px-1 py-1.5 border border-gray-300/30 dark:border-gray-700/50 z-50 bg-white dark:bg-gray-850 dark:text-white shadow-sm"
|
||||
class="w-full max-w-[170px] rounded-xl px-1 py-1.5 border border-gray-300/30 dark:border-gray-700/50 z-50 bg-white dark:bg-gray-850 dark:text-white shadow-sm"
|
||||
sideOffset={-2}
|
||||
side="bottom"
|
||||
align="start"
|
||||
|
|
|
|||
|
|
@ -40,7 +40,7 @@
|
|||
|
||||
<div slot="content">
|
||||
<DropdownMenu.Content
|
||||
class="w-full max-w-[160px] rounded-xl px-1 py-1.5 border border-gray-300/30 dark:border-gray-700/50 z-50 bg-white dark:bg-gray-850 dark:text-white shadow-sm"
|
||||
class="w-full max-w-[170px] rounded-xl px-1 py-1.5 border border-gray-300/30 dark:border-gray-700/50 z-50 bg-white dark:bg-gray-850 dark:text-white shadow-sm"
|
||||
sideOffset={-2}
|
||||
side="bottom"
|
||||
align="start"
|
||||
|
|
|
|||
|
|
@ -90,7 +90,9 @@
|
|||
"and {{COUNT}} more": "",
|
||||
"and create a new shared link.": "و أنشئ رابط مشترك جديد.",
|
||||
"Android": "",
|
||||
"API": "",
|
||||
"API Base URL": "API الرابط الرئيسي",
|
||||
"API details for using a vision-language model in the picture description. This parameter is mutually exclusive with picture_description_local.": "",
|
||||
"API Key": "API مفتاح",
|
||||
"API Key created.": "API تم أنشاء المفتاح",
|
||||
"API Key Endpoint Restrictions": "",
|
||||
|
|
@ -108,7 +110,6 @@
|
|||
"Are you sure you want to delete this channel?": "",
|
||||
"Are you sure you want to delete this message?": "",
|
||||
"Are you sure you want to unarchive all archived chats?": "",
|
||||
"Are you sure you want to update this user's role to **{{ROLE}}**?": "",
|
||||
"Are you sure?": "هل أنت متأكد ؟",
|
||||
"Arena Models": "",
|
||||
"Artifacts": "",
|
||||
|
|
@ -443,6 +444,7 @@
|
|||
"Enter Chunk Overlap": "أدخل الChunk Overlap",
|
||||
"Enter Chunk Size": "أدخل Chunk الحجم",
|
||||
"Enter comma-separated \"token:bias_value\" pairs (example: 5432:100, 413:-100)": "",
|
||||
"Enter Config in JSON format": "",
|
||||
"Enter content for the pending user info overlay. Leave empty for default.": "",
|
||||
"Enter Datalab Marker API Key": "",
|
||||
"Enter description": "",
|
||||
|
|
@ -611,6 +613,10 @@
|
|||
"Folder deleted successfully": "",
|
||||
"Folder name cannot be empty.": "",
|
||||
"Folder name updated successfully": "",
|
||||
"Follow up": "",
|
||||
"Follow Up Generation": "",
|
||||
"Follow Up Generation Prompt": "",
|
||||
"Follow-Up Auto-Generation": "",
|
||||
"Followed instructions perfectly": "اتبعت التعليمات على أكمل وجه",
|
||||
"Force OCR": "",
|
||||
"Force OCR on all pages of the PDF. This can lead to worse results if you have good text in your PDFs. Defaults to False.": "",
|
||||
|
|
@ -663,6 +669,7 @@
|
|||
"Hex Color": "",
|
||||
"Hex Color - Leave empty for default color": "",
|
||||
"Hide": "أخفاء",
|
||||
"Hide from Sidebar": "",
|
||||
"Hide Model": "",
|
||||
"High Contrast Mode": "",
|
||||
"Home": "",
|
||||
|
|
@ -711,7 +718,6 @@
|
|||
"Invalid file content": "",
|
||||
"Invalid file format.": "",
|
||||
"Invalid JSON file": "",
|
||||
"Invalid JSON schema": "",
|
||||
"Invalid Tag": "تاق غير صالحة",
|
||||
"is typing...": "",
|
||||
"January": "يناير",
|
||||
|
|
@ -726,7 +732,7 @@
|
|||
"JWT Expiration": "JWT تجريبي",
|
||||
"JWT Token": "JWT Token",
|
||||
"Kagi Search API Key": "",
|
||||
"Keep Alive": "Keep Alive",
|
||||
"Keep in Sidebar": "",
|
||||
"Key": "",
|
||||
"Keyboard shortcuts": "اختصارات لوحة المفاتيح",
|
||||
"Knowledge": "",
|
||||
|
|
@ -846,6 +852,7 @@
|
|||
"New Password": "كلمة المرور الجديدة",
|
||||
"New Tool": "",
|
||||
"new-channel": "",
|
||||
"Next message": "",
|
||||
"No chats found for this user.": "",
|
||||
"No chats found.": "",
|
||||
"No content": "",
|
||||
|
|
@ -913,6 +920,7 @@
|
|||
"OpenAI API settings updated": "",
|
||||
"OpenAI URL/Key required.": "URL/مفتاح OpenAI.مطلوب عنوان ",
|
||||
"openapi.json URL or Path": "",
|
||||
"Options for running a local vision-language model in the picture description. The parameters refer to a model hosted on Hugging Face. This parameter is mutually exclusive with picture_description_api.": "",
|
||||
"or": "أو",
|
||||
"Organize your users": "",
|
||||
"Other": "آخر",
|
||||
|
|
@ -928,6 +936,7 @@
|
|||
"PDF document (.pdf)": "PDF ملف (.pdf)",
|
||||
"PDF Extract Images (OCR)": "PDF أستخرج الصور (OCR)",
|
||||
"pending": "قيد الانتظار",
|
||||
"Pending": "",
|
||||
"Pending User Overlay Content": "",
|
||||
"Pending User Overlay Title": "",
|
||||
"Permission denied when accessing media devices": "",
|
||||
|
|
@ -935,7 +944,12 @@
|
|||
"Permission denied when accessing microphone: {{error}}": "{{error}} تم رفض الإذن عند الوصول إلى الميكروفون ",
|
||||
"Permissions": "",
|
||||
"Perplexity API Key": "",
|
||||
"Perplexity Model": "",
|
||||
"Perplexity Search Context Usage": "",
|
||||
"Personalization": "التخصيص",
|
||||
"Picture Description API Config": "",
|
||||
"Picture Description Local Config": "",
|
||||
"Picture Description Mode": "",
|
||||
"Pin": "",
|
||||
"Pinned": "",
|
||||
"Pioneer insights": "",
|
||||
|
|
@ -965,6 +979,7 @@
|
|||
"Preview": "",
|
||||
"Previous 30 days": "أخر 30 يوم",
|
||||
"Previous 7 days": "أخر 7 أيام",
|
||||
"Previous message": "",
|
||||
"Private": "",
|
||||
"Profile Image": "صورة الملف الشخصي",
|
||||
"Prompt": "",
|
||||
|
|
@ -1006,7 +1021,6 @@
|
|||
"Rename": "إعادة تسمية",
|
||||
"Reorder Models": "",
|
||||
"Reply in Thread": "",
|
||||
"Request Mode": "وضع الطلب",
|
||||
"Reranking Engine": "",
|
||||
"Reranking Model": "إعادة تقييم النموذج",
|
||||
"Reset": "",
|
||||
|
|
@ -1178,6 +1192,7 @@
|
|||
"The batch size determines how many text requests are processed together at once. A higher batch size can increase the performance and speed of the model, but it also requires more memory.": "",
|
||||
"The developers behind this plugin are passionate volunteers from the community. If you find this plugin helpful, please consider contributing to its development.": "",
|
||||
"The evaluation leaderboard is based on the Elo rating system and is updated in real-time.": "",
|
||||
"The format to return a response in. Format can be json or a JSON schema.": "",
|
||||
"The language of the input audio. Supplying the input language in ISO-639-1 (e.g. en) format will improve accuracy and latency. Leave blank to automatically detect the language.": "",
|
||||
"The LDAP attribute that maps to the mail that users use to sign in.": "",
|
||||
"The LDAP attribute that maps to the username that users use to sign in.": "",
|
||||
|
|
@ -1195,7 +1210,9 @@
|
|||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "وهذا يضمن حفظ محادثاتك القيمة بشكل آمن في قاعدة بياناتك الخلفية. شكرًا لك!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "",
|
||||
"This model is not publicly available. Please select another model.": "",
|
||||
"This option controls how long the model will stay loaded into memory following the request (default: 5m)": "",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
"This option enables or disables the use of the reasoning feature in Ollama, which allows the model to think before generating a response. When enabled, the model can take a moment to process the conversation context and generate a more thoughtful response.": "",
|
||||
"This option sets the maximum number of tokens the model can generate in its response. Increasing this limit allows the model to provide longer answers, but it may also increase the likelihood of unhelpful or irrelevant content being generated.": "",
|
||||
"This option will delete all existing files in the collection and replace them with newly uploaded files.": "",
|
||||
"This response was generated by \"{{model}}\"": "",
|
||||
|
|
|
|||
|
|
@ -90,7 +90,9 @@
|
|||
"and {{COUNT}} more": "و{{COUNT}} المزيد",
|
||||
"and create a new shared link.": "وإنشاء رابط مشترك جديد.",
|
||||
"Android": "",
|
||||
"API": "",
|
||||
"API Base URL": "الرابط الأساسي لواجهة API",
|
||||
"API details for using a vision-language model in the picture description. This parameter is mutually exclusive with picture_description_local.": "",
|
||||
"API Key": "مفتاح واجهة برمجة التطبيقات (API)",
|
||||
"API Key created.": "تم إنشاء مفتاح واجهة API.",
|
||||
"API Key Endpoint Restrictions": "قيود نقاط نهاية مفتاح API",
|
||||
|
|
@ -108,7 +110,6 @@
|
|||
"Are you sure you want to delete this channel?": "هل أنت متأكد من رغبتك في حذف هذه القناة؟",
|
||||
"Are you sure you want to delete this message?": "هل أنت متأكد من رغبتك في حذف هذه الرسالة؟",
|
||||
"Are you sure you want to unarchive all archived chats?": "هل أنت متأكد من رغبتك في إلغاء أرشفة جميع المحادثات المؤرشفة؟",
|
||||
"Are you sure you want to update this user's role to **{{ROLE}}**?": "",
|
||||
"Are you sure?": "هل أنت متأكد؟",
|
||||
"Arena Models": "نماذج الساحة",
|
||||
"Artifacts": "القطع الأثرية",
|
||||
|
|
@ -443,6 +444,7 @@
|
|||
"Enter Chunk Overlap": "أدخل الChunk Overlap",
|
||||
"Enter Chunk Size": "أدخل Chunk الحجم",
|
||||
"Enter comma-separated \"token:bias_value\" pairs (example: 5432:100, 413:-100)": "أدخل أزواج \"الرمز:قيمة التحيز\" مفصولة بفواصل (مثال: 5432:100، 413:-100)",
|
||||
"Enter Config in JSON format": "",
|
||||
"Enter content for the pending user info overlay. Leave empty for default.": "",
|
||||
"Enter Datalab Marker API Key": "",
|
||||
"Enter description": "أدخل الوصف",
|
||||
|
|
@ -611,6 +613,10 @@
|
|||
"Folder deleted successfully": "تم حذف المجلد بنجاح",
|
||||
"Folder name cannot be empty.": "لا يمكن أن يكون اسم المجلد فارغًا.",
|
||||
"Folder name updated successfully": "تم تحديث اسم المجلد بنجاح",
|
||||
"Follow up": "",
|
||||
"Follow Up Generation": "",
|
||||
"Follow Up Generation Prompt": "",
|
||||
"Follow-Up Auto-Generation": "",
|
||||
"Followed instructions perfectly": "اتبعت التعليمات على أكمل وجه",
|
||||
"Force OCR": "",
|
||||
"Force OCR on all pages of the PDF. This can lead to worse results if you have good text in your PDFs. Defaults to False.": "",
|
||||
|
|
@ -663,6 +669,7 @@
|
|||
"Hex Color": "لون سداسي",
|
||||
"Hex Color - Leave empty for default color": "اللون السداسي - اتركه فارغًا لاستخدام اللون الافتراضي",
|
||||
"Hide": "أخفاء",
|
||||
"Hide from Sidebar": "",
|
||||
"Hide Model": "",
|
||||
"High Contrast Mode": "",
|
||||
"Home": "الصفحة الرئيسية",
|
||||
|
|
@ -711,7 +718,6 @@
|
|||
"Invalid file content": "",
|
||||
"Invalid file format.": "تنسيق ملف غير صالح.",
|
||||
"Invalid JSON file": "",
|
||||
"Invalid JSON schema": "",
|
||||
"Invalid Tag": "تاق غير صالحة",
|
||||
"is typing...": "يكتب...",
|
||||
"January": "يناير",
|
||||
|
|
@ -726,7 +732,7 @@
|
|||
"JWT Expiration": "JWT تجريبي",
|
||||
"JWT Token": "JWT Token",
|
||||
"Kagi Search API Key": "مفتاح API لـ Kagi Search",
|
||||
"Keep Alive": "Keep Alive",
|
||||
"Keep in Sidebar": "",
|
||||
"Key": "المفتاح",
|
||||
"Keyboard shortcuts": "اختصارات لوحة المفاتيح",
|
||||
"Knowledge": "المعرفة",
|
||||
|
|
@ -846,6 +852,7 @@
|
|||
"New Password": "كلمة المرور الجديدة",
|
||||
"New Tool": "",
|
||||
"new-channel": "قناة جديدة",
|
||||
"Next message": "",
|
||||
"No chats found for this user.": "",
|
||||
"No chats found.": "",
|
||||
"No content": "",
|
||||
|
|
@ -913,6 +920,7 @@
|
|||
"OpenAI API settings updated": "تم تحديث إعدادات OpenAI API",
|
||||
"OpenAI URL/Key required.": "URL/مفتاح OpenAI.مطلوب عنوان ",
|
||||
"openapi.json URL or Path": "",
|
||||
"Options for running a local vision-language model in the picture description. The parameters refer to a model hosted on Hugging Face. This parameter is mutually exclusive with picture_description_api.": "",
|
||||
"or": "أو",
|
||||
"Organize your users": "تنظيم المستخدمين الخاصين بك",
|
||||
"Other": "آخر",
|
||||
|
|
@ -928,6 +936,7 @@
|
|||
"PDF document (.pdf)": "PDF ملف (.pdf)",
|
||||
"PDF Extract Images (OCR)": "PDF أستخرج الصور (OCR)",
|
||||
"pending": "قيد الانتظار",
|
||||
"Pending": "",
|
||||
"Pending User Overlay Content": "",
|
||||
"Pending User Overlay Title": "",
|
||||
"Permission denied when accessing media devices": "تم رفض الإذن عند محاولة الوصول إلى أجهزة الوسائط",
|
||||
|
|
@ -935,7 +944,12 @@
|
|||
"Permission denied when accessing microphone: {{error}}": "{{error}} تم رفض الإذن عند الوصول إلى الميكروفون ",
|
||||
"Permissions": "الأذونات",
|
||||
"Perplexity API Key": "مفتاح API لـ Perplexity",
|
||||
"Perplexity Model": "",
|
||||
"Perplexity Search Context Usage": "",
|
||||
"Personalization": "التخصيص",
|
||||
"Picture Description API Config": "",
|
||||
"Picture Description Local Config": "",
|
||||
"Picture Description Mode": "",
|
||||
"Pin": "تثبيت",
|
||||
"Pinned": "مثبت",
|
||||
"Pioneer insights": "رؤى رائدة",
|
||||
|
|
@ -965,6 +979,7 @@
|
|||
"Preview": "",
|
||||
"Previous 30 days": "أخر 30 يوم",
|
||||
"Previous 7 days": "أخر 7 أيام",
|
||||
"Previous message": "",
|
||||
"Private": "",
|
||||
"Profile Image": "صورة الملف الشخصي",
|
||||
"Prompt": "التوجيه",
|
||||
|
|
@ -1006,7 +1021,6 @@
|
|||
"Rename": "إعادة تسمية",
|
||||
"Reorder Models": "إعادة ترتيب النماذج",
|
||||
"Reply in Thread": "الرد داخل سلسلة الرسائل",
|
||||
"Request Mode": "وضع الطلب",
|
||||
"Reranking Engine": "",
|
||||
"Reranking Model": "إعادة تقييم النموذج",
|
||||
"Reset": "إعادة تعيين",
|
||||
|
|
@ -1178,6 +1192,7 @@
|
|||
"The batch size determines how many text requests are processed together at once. A higher batch size can increase the performance and speed of the model, but it also requires more memory.": "يحدد حجم الدفعة عدد طلبات النصوص التي تتم معالجتها معًا. الحجم الأكبر يمكن أن يزيد الأداء والسرعة، ولكنه يحتاج أيضًا إلى ذاكرة أكبر.",
|
||||
"The developers behind this plugin are passionate volunteers from the community. If you find this plugin helpful, please consider contributing to its development.": "المطورون خلف هذا المكون الإضافي هم متطوعون شغوفون من المجتمع. إذا وجدت هذا المكون مفيدًا، فكر في المساهمة في تطويره.",
|
||||
"The evaluation leaderboard is based on the Elo rating system and is updated in real-time.": "قائمة التقييم تعتمد على نظام Elo ويتم تحديثها في الوقت الفعلي.",
|
||||
"The format to return a response in. Format can be json or a JSON schema.": "",
|
||||
"The language of the input audio. Supplying the input language in ISO-639-1 (e.g. en) format will improve accuracy and latency. Leave blank to automatically detect the language.": "",
|
||||
"The LDAP attribute that maps to the mail that users use to sign in.": "السمة LDAP التي تتوافق مع البريد الإلكتروني الذي يستخدمه المستخدمون لتسجيل الدخول.",
|
||||
"The LDAP attribute that maps to the username that users use to sign in.": "السمة LDAP التي تتوافق مع اسم المستخدم الذي يستخدمه المستخدمون لتسجيل الدخول.",
|
||||
|
|
@ -1195,7 +1210,9 @@
|
|||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "وهذا يضمن حفظ محادثاتك القيمة بشكل آمن في قاعدة بياناتك الخلفية. شكرًا لك!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "هذه ميزة تجريبية، وقد لا تعمل كما هو متوقع وقد تتغير في أي وقت.",
|
||||
"This model is not publicly available. Please select another model.": "",
|
||||
"This option controls how long the model will stay loaded into memory following the request (default: 5m)": "",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "هذا الخيار يحدد عدد الرموز التي يتم الاحتفاظ بها عند تحديث السياق. مثلاً، إذا تم ضبطه على 2، سيتم الاحتفاظ بآخر رمزين من السياق. الحفاظ على السياق يساعد في استمرارية المحادثة، لكنه قد يحد من التفاعل مع مواضيع جديدة.",
|
||||
"This option enables or disables the use of the reasoning feature in Ollama, which allows the model to think before generating a response. When enabled, the model can take a moment to process the conversation context and generate a more thoughtful response.": "",
|
||||
"This option sets the maximum number of tokens the model can generate in its response. Increasing this limit allows the model to provide longer answers, but it may also increase the likelihood of unhelpful or irrelevant content being generated.": "يحدد هذا الخيار الحد الأقصى لعدد الرموز التي يمكن للنموذج توليدها في الرد. زيادته تتيح للنموذج تقديم إجابات أطول، لكنها قد تزيد من احتمالية توليد محتوى غير مفيد أو غير ذي صلة.",
|
||||
"This option will delete all existing files in the collection and replace them with newly uploaded files.": "سيؤدي هذا الخيار إلى حذف جميع الملفات الحالية في المجموعة واستبدالها بالملفات التي تم تحميلها حديثًا.",
|
||||
"This response was generated by \"{{model}}\"": "تم توليد هذا الرد بواسطة \"{{model}}\"",
|
||||
|
|
|
|||
|
|
@ -90,7 +90,9 @@
|
|||
"and {{COUNT}} more": "и още {{COUNT}}",
|
||||
"and create a new shared link.": "и създай нов общ линк.",
|
||||
"Android": "",
|
||||
"API": "",
|
||||
"API Base URL": "API Базов URL",
|
||||
"API details for using a vision-language model in the picture description. This parameter is mutually exclusive with picture_description_local.": "",
|
||||
"API Key": "API Ключ",
|
||||
"API Key created.": "API Ключ създаден.",
|
||||
"API Key Endpoint Restrictions": "Ограничения на крайните точки за API Ключ",
|
||||
|
|
@ -108,7 +110,6 @@
|
|||
"Are you sure you want to delete this channel?": "Сигурни ли сте, че искате да изтриете този канал?",
|
||||
"Are you sure you want to delete this message?": "Сигурни ли сте, че искате да изтриете това съобщение?",
|
||||
"Are you sure you want to unarchive all archived chats?": "Сигурни ли сте, че искате да разархивирате всички архивирани чатове?",
|
||||
"Are you sure you want to update this user's role to **{{ROLE}}**?": "",
|
||||
"Are you sure?": "Сигурни ли сте?",
|
||||
"Arena Models": "Арена Модели",
|
||||
"Artifacts": "Артефакти",
|
||||
|
|
@ -443,6 +444,7 @@
|
|||
"Enter Chunk Overlap": "Въведете припокриване на чънкове",
|
||||
"Enter Chunk Size": "Въведете размер на чънк",
|
||||
"Enter comma-separated \"token:bias_value\" pairs (example: 5432:100, 413:-100)": "",
|
||||
"Enter Config in JSON format": "",
|
||||
"Enter content for the pending user info overlay. Leave empty for default.": "",
|
||||
"Enter Datalab Marker API Key": "",
|
||||
"Enter description": "Въведете описание",
|
||||
|
|
@ -611,6 +613,10 @@
|
|||
"Folder deleted successfully": "Папката е изтрита успешно",
|
||||
"Folder name cannot be empty.": "Името на папката не може да бъде празно.",
|
||||
"Folder name updated successfully": "Името на папката е актуализирано успешно",
|
||||
"Follow up": "",
|
||||
"Follow Up Generation": "",
|
||||
"Follow Up Generation Prompt": "",
|
||||
"Follow-Up Auto-Generation": "",
|
||||
"Followed instructions perfectly": "Следвайте инструкциите перфектно",
|
||||
"Force OCR": "",
|
||||
"Force OCR on all pages of the PDF. This can lead to worse results if you have good text in your PDFs. Defaults to False.": "",
|
||||
|
|
@ -663,6 +669,7 @@
|
|||
"Hex Color": "Hex цвят",
|
||||
"Hex Color - Leave empty for default color": "Hex цвят - Оставете празно за цвят по подразбиране",
|
||||
"Hide": "Скрий",
|
||||
"Hide from Sidebar": "",
|
||||
"Hide Model": "",
|
||||
"High Contrast Mode": "",
|
||||
"Home": "Начало",
|
||||
|
|
@ -711,7 +718,6 @@
|
|||
"Invalid file content": "",
|
||||
"Invalid file format.": "Невалиден формат на файла.",
|
||||
"Invalid JSON file": "",
|
||||
"Invalid JSON schema": "",
|
||||
"Invalid Tag": "Невалиден таг",
|
||||
"is typing...": "пише...",
|
||||
"January": "Януари",
|
||||
|
|
@ -726,7 +732,7 @@
|
|||
"JWT Expiration": "JWT изтичане",
|
||||
"JWT Token": "JWT токен",
|
||||
"Kagi Search API Key": "API ключ за Kagi Search",
|
||||
"Keep Alive": "Поддържай активен",
|
||||
"Keep in Sidebar": "",
|
||||
"Key": "Ключ",
|
||||
"Keyboard shortcuts": "Клавиши за бърз достъп",
|
||||
"Knowledge": "Знания",
|
||||
|
|
@ -846,6 +852,7 @@
|
|||
"New Password": "Нова парола",
|
||||
"New Tool": "",
|
||||
"new-channel": "нов-канал",
|
||||
"Next message": "",
|
||||
"No chats found for this user.": "",
|
||||
"No chats found.": "",
|
||||
"No content": "Без съдържание",
|
||||
|
|
@ -913,6 +920,7 @@
|
|||
"OpenAI API settings updated": "Настройките на OpenAI API са актуализирани",
|
||||
"OpenAI URL/Key required.": "OpenAI URL/Key е задължителен.",
|
||||
"openapi.json URL or Path": "",
|
||||
"Options for running a local vision-language model in the picture description. The parameters refer to a model hosted on Hugging Face. This parameter is mutually exclusive with picture_description_api.": "",
|
||||
"or": "или",
|
||||
"Organize your users": "Организирайте вашите потребители",
|
||||
"Other": "Друго",
|
||||
|
|
@ -928,6 +936,7 @@
|
|||
"PDF document (.pdf)": "PDF документ (.pdf)",
|
||||
"PDF Extract Images (OCR)": "Извличане на изображения от PDF (OCR)",
|
||||
"pending": "в очакване",
|
||||
"Pending": "",
|
||||
"Pending User Overlay Content": "",
|
||||
"Pending User Overlay Title": "",
|
||||
"Permission denied when accessing media devices": "Отказан достъп при опит за достъп до медийни устройства",
|
||||
|
|
@ -935,7 +944,12 @@
|
|||
"Permission denied when accessing microphone: {{error}}": "Отказан достъп при опит за достъп до микрофона: {{error}}",
|
||||
"Permissions": "Разрешения",
|
||||
"Perplexity API Key": "",
|
||||
"Perplexity Model": "",
|
||||
"Perplexity Search Context Usage": "",
|
||||
"Personalization": "Персонализация",
|
||||
"Picture Description API Config": "",
|
||||
"Picture Description Local Config": "",
|
||||
"Picture Description Mode": "",
|
||||
"Pin": "Закачи",
|
||||
"Pinned": "Закачено",
|
||||
"Pioneer insights": "Пионерски прозрения",
|
||||
|
|
@ -965,6 +979,7 @@
|
|||
"Preview": "",
|
||||
"Previous 30 days": "Предишните 30 дни",
|
||||
"Previous 7 days": "Предишните 7 дни",
|
||||
"Previous message": "",
|
||||
"Private": "",
|
||||
"Profile Image": "Профилна снимка",
|
||||
"Prompt": "Промпт",
|
||||
|
|
@ -1006,7 +1021,6 @@
|
|||
"Rename": "Преименуване",
|
||||
"Reorder Models": "Преорганизиране на моделите",
|
||||
"Reply in Thread": "Отговори в тред",
|
||||
"Request Mode": "Режим на заявка",
|
||||
"Reranking Engine": "Двигател за пренареждане",
|
||||
"Reranking Model": "Модел за преподреждане",
|
||||
"Reset": "Нулиране",
|
||||
|
|
@ -1178,6 +1192,7 @@
|
|||
"The batch size determines how many text requests are processed together at once. A higher batch size can increase the performance and speed of the model, but it also requires more memory.": "",
|
||||
"The developers behind this plugin are passionate volunteers from the community. If you find this plugin helpful, please consider contributing to its development.": "Разработчиците зад този плъгин са страстни доброволци от общността. Ако намирате този плъгин полезен, моля, обмислете да допринесете за неговото развитие.",
|
||||
"The evaluation leaderboard is based on the Elo rating system and is updated in real-time.": "Класацията за оценка се базира на рейтинговата система Elo и се обновява в реално време.",
|
||||
"The format to return a response in. Format can be json or a JSON schema.": "",
|
||||
"The language of the input audio. Supplying the input language in ISO-639-1 (e.g. en) format will improve accuracy and latency. Leave blank to automatically detect the language.": "",
|
||||
"The LDAP attribute that maps to the mail that users use to sign in.": "LDAP атрибутът, който съответства на имейла, който потребителите използват за вписване.",
|
||||
"The LDAP attribute that maps to the username that users use to sign in.": "LDAP атрибутът, който съответства на потребителското име, което потребителите използват за вписване.",
|
||||
|
|
@ -1195,7 +1210,9 @@
|
|||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Това гарантира, че ценните ви разговори се запазват сигурно във вашата бекенд база данни. Благодарим ви!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Това е експериментална функция, може да не работи според очакванията и подлежи на промяна по всяко време.",
|
||||
"This model is not publicly available. Please select another model.": "",
|
||||
"This option controls how long the model will stay loaded into memory following the request (default: 5m)": "",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
"This option enables or disables the use of the reasoning feature in Ollama, which allows the model to think before generating a response. When enabled, the model can take a moment to process the conversation context and generate a more thoughtful response.": "",
|
||||
"This option sets the maximum number of tokens the model can generate in its response. Increasing this limit allows the model to provide longer answers, but it may also increase the likelihood of unhelpful or irrelevant content being generated.": "",
|
||||
"This option will delete all existing files in the collection and replace them with newly uploaded files.": "Тази опция ще изтрие всички съществуващи файлове в колекцията и ще ги замени с новокачени файлове.",
|
||||
"This response was generated by \"{{model}}\"": "Този отговор беше генериран от \"{{model}}\"",
|
||||
|
|
|
|||
|
|
@ -90,7 +90,9 @@
|
|||
"and {{COUNT}} more": "",
|
||||
"and create a new shared link.": "এবং একটি নতুন শেয়ারে লিংক তৈরি করুন.",
|
||||
"Android": "",
|
||||
"API": "",
|
||||
"API Base URL": "এপিআই বেজ ইউআরএল",
|
||||
"API details for using a vision-language model in the picture description. This parameter is mutually exclusive with picture_description_local.": "",
|
||||
"API Key": "এপিআই কোড",
|
||||
"API Key created.": "একটি এপিআই কোড তৈরি করা হয়েছে.",
|
||||
"API Key Endpoint Restrictions": "",
|
||||
|
|
@ -108,7 +110,6 @@
|
|||
"Are you sure you want to delete this channel?": "",
|
||||
"Are you sure you want to delete this message?": "",
|
||||
"Are you sure you want to unarchive all archived chats?": "",
|
||||
"Are you sure you want to update this user's role to **{{ROLE}}**?": "",
|
||||
"Are you sure?": "আপনি নিশ্চিত?",
|
||||
"Arena Models": "",
|
||||
"Artifacts": "",
|
||||
|
|
@ -443,6 +444,7 @@
|
|||
"Enter Chunk Overlap": "চাঙ্ক ওভারল্যাপ লিখুন",
|
||||
"Enter Chunk Size": "চাংক সাইজ লিখুন",
|
||||
"Enter comma-separated \"token:bias_value\" pairs (example: 5432:100, 413:-100)": "",
|
||||
"Enter Config in JSON format": "",
|
||||
"Enter content for the pending user info overlay. Leave empty for default.": "",
|
||||
"Enter Datalab Marker API Key": "",
|
||||
"Enter description": "",
|
||||
|
|
@ -611,6 +613,10 @@
|
|||
"Folder deleted successfully": "",
|
||||
"Folder name cannot be empty.": "",
|
||||
"Folder name updated successfully": "",
|
||||
"Follow up": "",
|
||||
"Follow Up Generation": "",
|
||||
"Follow Up Generation Prompt": "",
|
||||
"Follow-Up Auto-Generation": "",
|
||||
"Followed instructions perfectly": "নির্দেশাবলী নিখুঁতভাবে অনুসরণ করা হয়েছে",
|
||||
"Force OCR": "",
|
||||
"Force OCR on all pages of the PDF. This can lead to worse results if you have good text in your PDFs. Defaults to False.": "",
|
||||
|
|
@ -663,6 +669,7 @@
|
|||
"Hex Color": "",
|
||||
"Hex Color - Leave empty for default color": "",
|
||||
"Hide": "লুকান",
|
||||
"Hide from Sidebar": "",
|
||||
"Hide Model": "",
|
||||
"High Contrast Mode": "",
|
||||
"Home": "",
|
||||
|
|
@ -711,7 +718,6 @@
|
|||
"Invalid file content": "",
|
||||
"Invalid file format.": "",
|
||||
"Invalid JSON file": "",
|
||||
"Invalid JSON schema": "",
|
||||
"Invalid Tag": "অবৈধ ট্যাগ",
|
||||
"is typing...": "",
|
||||
"January": "জানুয়ারী",
|
||||
|
|
@ -726,7 +732,7 @@
|
|||
"JWT Expiration": "JWT-র মেয়াদ",
|
||||
"JWT Token": "JWT টোকেন",
|
||||
"Kagi Search API Key": "",
|
||||
"Keep Alive": "সচল রাখুন",
|
||||
"Keep in Sidebar": "",
|
||||
"Key": "",
|
||||
"Keyboard shortcuts": "কিবোর্ড শর্টকাটসমূহ",
|
||||
"Knowledge": "",
|
||||
|
|
@ -846,6 +852,7 @@
|
|||
"New Password": "নতুন পাসওয়ার্ড",
|
||||
"New Tool": "",
|
||||
"new-channel": "",
|
||||
"Next message": "",
|
||||
"No chats found for this user.": "",
|
||||
"No chats found.": "",
|
||||
"No content": "",
|
||||
|
|
@ -913,6 +920,7 @@
|
|||
"OpenAI API settings updated": "",
|
||||
"OpenAI URL/Key required.": "OpenAI URL/Key আবশ্যক",
|
||||
"openapi.json URL or Path": "",
|
||||
"Options for running a local vision-language model in the picture description. The parameters refer to a model hosted on Hugging Face. This parameter is mutually exclusive with picture_description_api.": "",
|
||||
"or": "অথবা",
|
||||
"Organize your users": "",
|
||||
"Other": "অন্যান্য",
|
||||
|
|
@ -928,6 +936,7 @@
|
|||
"PDF document (.pdf)": "PDF ডকুমেন্ট (.pdf)",
|
||||
"PDF Extract Images (OCR)": "পিডিএফ এর ছবি থেকে লেখা বের করুন (OCR)",
|
||||
"pending": "অপেক্ষমান",
|
||||
"Pending": "",
|
||||
"Pending User Overlay Content": "",
|
||||
"Pending User Overlay Title": "",
|
||||
"Permission denied when accessing media devices": "",
|
||||
|
|
@ -935,7 +944,12 @@
|
|||
"Permission denied when accessing microphone: {{error}}": "মাইক্রোফোন ব্যবহারের অনুমতি পাওয়া যায়নি: {{error}}",
|
||||
"Permissions": "",
|
||||
"Perplexity API Key": "",
|
||||
"Perplexity Model": "",
|
||||
"Perplexity Search Context Usage": "",
|
||||
"Personalization": "ডিজিটাল বাংলা",
|
||||
"Picture Description API Config": "",
|
||||
"Picture Description Local Config": "",
|
||||
"Picture Description Mode": "",
|
||||
"Pin": "",
|
||||
"Pinned": "",
|
||||
"Pioneer insights": "",
|
||||
|
|
@ -965,6 +979,7 @@
|
|||
"Preview": "",
|
||||
"Previous 30 days": "পূর্ব ৩০ দিন",
|
||||
"Previous 7 days": "পূর্ব ৭ দিন",
|
||||
"Previous message": "",
|
||||
"Private": "",
|
||||
"Profile Image": "প্রোফাইল ইমেজ",
|
||||
"Prompt": "",
|
||||
|
|
@ -1006,7 +1021,6 @@
|
|||
"Rename": "রেনেম",
|
||||
"Reorder Models": "",
|
||||
"Reply in Thread": "",
|
||||
"Request Mode": "রিকোয়েস্ট মোড",
|
||||
"Reranking Engine": "",
|
||||
"Reranking Model": "রির্যাক্টিং মডেল",
|
||||
"Reset": "",
|
||||
|
|
@ -1178,6 +1192,7 @@
|
|||
"The batch size determines how many text requests are processed together at once. A higher batch size can increase the performance and speed of the model, but it also requires more memory.": "",
|
||||
"The developers behind this plugin are passionate volunteers from the community. If you find this plugin helpful, please consider contributing to its development.": "",
|
||||
"The evaluation leaderboard is based on the Elo rating system and is updated in real-time.": "",
|
||||
"The format to return a response in. Format can be json or a JSON schema.": "",
|
||||
"The language of the input audio. Supplying the input language in ISO-639-1 (e.g. en) format will improve accuracy and latency. Leave blank to automatically detect the language.": "",
|
||||
"The LDAP attribute that maps to the mail that users use to sign in.": "",
|
||||
"The LDAP attribute that maps to the username that users use to sign in.": "",
|
||||
|
|
@ -1195,7 +1210,9 @@
|
|||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "এটা নিশ্চিত করে যে, আপনার গুরুত্বপূর্ণ আলোচনা নিরাপদে আপনার ব্যাকএন্ড ডেটাবেজে সংরক্ষিত আছে। ধন্যবাদ!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "",
|
||||
"This model is not publicly available. Please select another model.": "",
|
||||
"This option controls how long the model will stay loaded into memory following the request (default: 5m)": "",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
"This option enables or disables the use of the reasoning feature in Ollama, which allows the model to think before generating a response. When enabled, the model can take a moment to process the conversation context and generate a more thoughtful response.": "",
|
||||
"This option sets the maximum number of tokens the model can generate in its response. Increasing this limit allows the model to provide longer answers, but it may also increase the likelihood of unhelpful or irrelevant content being generated.": "",
|
||||
"This option will delete all existing files in the collection and replace them with newly uploaded files.": "",
|
||||
"This response was generated by \"{{model}}\"": "",
|
||||
|
|
|
|||
|
|
@ -90,7 +90,9 @@
|
|||
"and {{COUNT}} more": "ད་དུང་ {{COUNT}}",
|
||||
"and create a new shared link.": "དང་མཉམ་སྤྱོད་སྦྲེལ་ཐག་གསར་པ་ཞིག་བཟོ་བ།",
|
||||
"Android": "",
|
||||
"API": "",
|
||||
"API Base URL": "API གཞི་རྩའི་ URL",
|
||||
"API details for using a vision-language model in the picture description. This parameter is mutually exclusive with picture_description_local.": "",
|
||||
"API Key": "API ལྡེ་མིག",
|
||||
"API Key created.": "API ལྡེ་མིག་བཟོས་ཟིན།",
|
||||
"API Key Endpoint Restrictions": "API ལྡེ་མིག་མཇུག་མཐུད་ཚད་བཀག",
|
||||
|
|
@ -108,7 +110,6 @@
|
|||
"Are you sure you want to delete this channel?": "ཁྱེད་ཀྱིས་བགྲོ་གླེང་འདི་བསུབ་འདོད་ངེས་ཡིན་ནམ།",
|
||||
"Are you sure you want to delete this message?": "འཕྲིན་འདི་བསུབ་འདོད་ངེས་ཡིན་ནམ།",
|
||||
"Are you sure you want to unarchive all archived chats?": "ཁྱེད་ཀྱིས་ཡིག་མཛོད་དུ་བཞག་པའི་ཁ་བརྡ་ཡོངས་རྫོགས་ཕྱིར་འདོན་འདོད་ངེས་ཡིན་ནམ།",
|
||||
"Are you sure you want to update this user's role to **{{ROLE}}**?": "",
|
||||
"Are you sure?": "ཁྱོད་ངེས་པ་ཡིན་ནམ།",
|
||||
"Arena Models": "Arena དཔེ་དབྱིབས།",
|
||||
"Artifacts": "རྫས་རྟེན།",
|
||||
|
|
@ -443,6 +444,7 @@
|
|||
"Enter Chunk Overlap": "དུམ་བུ་བསྣོལ་བ་འཇུག་པ།",
|
||||
"Enter Chunk Size": "དུམ་བུའི་ཆེ་ཆུང་འཇུག་པ།",
|
||||
"Enter comma-separated \"token:bias_value\" pairs (example: 5432:100, 413:-100)": "ཚེག་བསྐུངས་ཀྱིས་ལོགས་སུ་བཀར་བའི་ \"ཊོཀ་ཀེན།:ཕྱོགས་ཞེན་རིན་ཐང་།\" ཆ་འཇུག་པ། (དཔེར། 5432:100, 413:-100)",
|
||||
"Enter Config in JSON format": "",
|
||||
"Enter content for the pending user info overlay. Leave empty for default.": "",
|
||||
"Enter Datalab Marker API Key": "",
|
||||
"Enter description": "འགྲེལ་བཤད་འཇུག་པ།",
|
||||
|
|
@ -611,6 +613,10 @@
|
|||
"Folder deleted successfully": "ཡིག་སྣོད་ལེགས་པར་བསུབས་ཟིན།",
|
||||
"Folder name cannot be empty.": "ཡིག་སྣོད་ཀྱི་མིང་སྟོང་པ་ཡིན་མི་ཆོག",
|
||||
"Folder name updated successfully": "ཡིག་སྣོད་ཀྱི་མིང་ལེགས་པར་གསར་སྒྱུར་བྱས་ཟིན།",
|
||||
"Follow up": "",
|
||||
"Follow Up Generation": "",
|
||||
"Follow Up Generation Prompt": "",
|
||||
"Follow-Up Auto-Generation": "",
|
||||
"Followed instructions perfectly": "ལམ་སྟོན་ཡང་དག་པར་བསྒྲུབས།",
|
||||
"Force OCR": "",
|
||||
"Force OCR on all pages of the PDF. This can lead to worse results if you have good text in your PDFs. Defaults to False.": "",
|
||||
|
|
@ -663,6 +669,7 @@
|
|||
"Hex Color": "Hex ཚོན་མདོག",
|
||||
"Hex Color - Leave empty for default color": "Hex ཚོན་མདོག - སྔོན་སྒྲིག་ཚོན་མདོག་གི་ཆེད་དུ་སྟོང་པ་བཞག་པ།",
|
||||
"Hide": "སྦ་བ།",
|
||||
"Hide from Sidebar": "",
|
||||
"Hide Model": "དཔེ་དབྱིབས་སྦ་བ།",
|
||||
"High Contrast Mode": "",
|
||||
"Home": "གཙོ་ངོས།",
|
||||
|
|
@ -711,7 +718,6 @@
|
|||
"Invalid file content": "",
|
||||
"Invalid file format.": "ཡིག་ཆའི་བཀོད་པ་ནུས་མེད།",
|
||||
"Invalid JSON file": "",
|
||||
"Invalid JSON schema": "JSON schema ནུས་མེད།",
|
||||
"Invalid Tag": "རྟགས་ནུས་མེད།",
|
||||
"is typing...": "ཡིག་འབྲུ་རྒྱག་བཞིན་པ།...",
|
||||
"January": "ཟླ་བ་དང་པོ།",
|
||||
|
|
@ -726,7 +732,7 @@
|
|||
"JWT Expiration": "JWT དུས་ཚོད་རྫོགས་པ།",
|
||||
"JWT Token": "JWT Token",
|
||||
"Kagi Search API Key": "Kagi Search API ལྡེ་མིག",
|
||||
"Keep Alive": "གསོན་པོར་གནས་པ།",
|
||||
"Keep in Sidebar": "",
|
||||
"Key": "ལྡེ་མིག",
|
||||
"Keyboard shortcuts": "མཐེབ་གནོན་མྱུར་ལམ།",
|
||||
"Knowledge": "ཤེས་བྱ།",
|
||||
|
|
@ -846,6 +852,7 @@
|
|||
"New Password": "གསང་གྲངས་གསར་པ།",
|
||||
"New Tool": "",
|
||||
"new-channel": "བགྲོ་གླེང་གསར་པ།",
|
||||
"Next message": "",
|
||||
"No chats found for this user.": "",
|
||||
"No chats found.": "",
|
||||
"No content": "",
|
||||
|
|
@ -913,6 +920,7 @@
|
|||
"OpenAI API settings updated": "OpenAI API སྒྲིག་འགོད་གསར་སྒྱུར་བྱས།",
|
||||
"OpenAI URL/Key required.": "OpenAI URL/ལྡེ་མིག་དགོས་ངེས།",
|
||||
"openapi.json URL or Path": "",
|
||||
"Options for running a local vision-language model in the picture description. The parameters refer to a model hosted on Hugging Face. This parameter is mutually exclusive with picture_description_api.": "",
|
||||
"or": "ཡང་ན།",
|
||||
"Organize your users": "ཁྱེད་ཀྱི་བེད་སྤྱོད་མཁན་སྒྲིག་འཛུགས།",
|
||||
"Other": "གཞན།",
|
||||
|
|
@ -928,6 +936,7 @@
|
|||
"PDF document (.pdf)": "PDF ཡིག་ཆ། (.pdf)",
|
||||
"PDF Extract Images (OCR)": "PDF པར་འདོན་སྤེལ། (OCR)",
|
||||
"pending": "སྒུག་བཞིན་པ།",
|
||||
"Pending": "",
|
||||
"Pending User Overlay Content": "",
|
||||
"Pending User Overlay Title": "",
|
||||
"Permission denied when accessing media devices": "བརྒྱུད་ལམ་སྒྲིག་ཆས་འཛུལ་སྤྱོད་སྐབས་དབང་ཚད་ཁས་མ་བླངས།",
|
||||
|
|
@ -935,7 +944,12 @@
|
|||
"Permission denied when accessing microphone: {{error}}": "སྐད་སྒྲ་འཛིན་ཆས་འཛུལ་སྤྱོད་སྐབས་དབང་ཚད་ཁས་མ་བླངས།: {{error}}",
|
||||
"Permissions": "དབང་ཚད།",
|
||||
"Perplexity API Key": "Perplexity API ལྡེ་མིག",
|
||||
"Perplexity Model": "",
|
||||
"Perplexity Search Context Usage": "",
|
||||
"Personalization": "སྒེར་སྤྱོད་ཅན།",
|
||||
"Picture Description API Config": "",
|
||||
"Picture Description Local Config": "",
|
||||
"Picture Description Mode": "",
|
||||
"Pin": "གདབ་པ།",
|
||||
"Pinned": "གདབ་ཟིན།",
|
||||
"Pioneer insights": "སྔོན་དཔག་རིག་ནུས།",
|
||||
|
|
@ -965,6 +979,7 @@
|
|||
"Preview": "",
|
||||
"Previous 30 days": "ཉིན་ ༣༠ སྔོན་མ།",
|
||||
"Previous 7 days": "ཉིན་ ༧ སྔོན་མ།",
|
||||
"Previous message": "",
|
||||
"Private": "སྒེར།",
|
||||
"Profile Image": "སྤྱི་ཐག་པར།",
|
||||
"Prompt": "འགུལ་སློང་།",
|
||||
|
|
@ -1006,7 +1021,6 @@
|
|||
"Rename": "མིང་བསྐྱར་འདོགས།",
|
||||
"Reorder Models": "དཔེ་དབྱིབས་བསྐྱར་སྒྲིག",
|
||||
"Reply in Thread": "བརྗོད་གཞིའི་ནང་ལན་འདེབས།",
|
||||
"Request Mode": "རེ་ཞུའི་མ་དཔེ།",
|
||||
"Reranking Engine": "",
|
||||
"Reranking Model": "བསྐྱར་སྒྲིག་དཔེ་དབྱིབས།",
|
||||
"Reset": "སླར་སྒྲིག",
|
||||
|
|
@ -1178,6 +1192,7 @@
|
|||
"The batch size determines how many text requests are processed together at once. A higher batch size can increase the performance and speed of the model, but it also requires more memory.": "ཚན་ཆུང་གི་ཆེ་ཆུང་གིས་ཡིག་རྐྱང་རེ་ཞུ་ག་ཚོད་མཉམ་དུ་ཐེངས་གཅིག་ལ་སྒྲུབ་དགོས་གཏན་འཁེལ་བྱེད། ཚན་ཆུང་ཆེ་བ་ཡིས་དཔེ་དབྱིབས་ཀྱི་ལས་ཆོད་དང་མྱུར་ཚད་མང་དུ་གཏོང་ཐུབ། འོན་ཀྱང་དེས་དྲན་ཤེས་མང་བ་དགོས།",
|
||||
"The developers behind this plugin are passionate volunteers from the community. If you find this plugin helpful, please consider contributing to its development.": "plugin འདིའི་རྒྱབ་ཀྱི་གསར་སྤེལ་བ་དག་ནི་སྤྱི་ཚོགས་ནས་ཡིན་པའི་སེམས་ཤུགས་ཅན་གྱི་དང་བླངས་པ་ཡིན། གལ་ཏེ་ཁྱེད་ཀྱིས་ plugin འདི་ཕན་ཐོགས་ཡོད་པ་མཐོང་ན། དེའི་གསར་སྤེལ་ལ་ཞལ་འདེབས་གནང་བར་བསམ་ཞིབ་གནང་རོགས།",
|
||||
"The evaluation leaderboard is based on the Elo rating system and is updated in real-time.": "གདེང་འཇོག་འགྲན་རེས་རེའུ་མིག་དེ་ Elo སྐར་མ་སྤྲོད་པའི་མ་ལག་ལ་གཞི་བཅོལ་ཡོད། དེ་མིན་དུས་ཐོག་ཏུ་གསར་སྒྱུར་བྱེད་ཀྱི་ཡོད།",
|
||||
"The format to return a response in. Format can be json or a JSON schema.": "",
|
||||
"The language of the input audio. Supplying the input language in ISO-639-1 (e.g. en) format will improve accuracy and latency. Leave blank to automatically detect the language.": "",
|
||||
"The LDAP attribute that maps to the mail that users use to sign in.": "བེད་སྤྱོད་མཁན་ཚོས་ནང་འཛུལ་བྱེད་སྐབས་བེད་སྤྱོད་གཏོང་བའི་ཡིག་ཟམ་ལ་སྦྲེལ་བའི་ LDAP ཁྱད་ཆོས།",
|
||||
"The LDAP attribute that maps to the username that users use to sign in.": "བེད་སྤྱོད་མཁན་ཚོས་ནང་འཛུལ་བྱེད་སྐབས་བེད་སྤྱོད་གཏོང་བའི་བེད་སྤྱོད་མིང་ལ་སྦྲེལ་བའི་ LDAP ཁྱད་ཆོས།",
|
||||
|
|
@ -1195,7 +1210,9 @@
|
|||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "འདིས་ཁྱེད་ཀྱི་རྩ་ཆེའི་ཁ་བརྡ་དག་བདེ་འཇགས་ངང་ཁྱེད་ཀྱི་རྒྱབ་སྣེ་གནས་ཚུལ་མཛོད་དུ་ཉར་ཚགས་བྱེད་པ་ཁག་ཐེག་བྱེད། ཐུགས་རྗེ་ཆེ།",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "འདི་ནི་ཚོད་ལྟའི་རང་བཞིན་གྱི་ཁྱད་ཆོས་ཤིག་ཡིན། དེ་རེ་སྒུག་ལྟར་ལས་ཀ་བྱེད་མི་སྲིད། དེ་མིན་དུས་ཚོད་གང་རུང་ལ་འགྱུར་བ་འགྲོ་སྲིད།",
|
||||
"This model is not publicly available. Please select another model.": "",
|
||||
"This option controls how long the model will stay loaded into memory following the request (default: 5m)": "",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "འདེམས་ཀ་འདིས་ནང་དོན་གསར་སྒྱུར་བྱེད་སྐབས་ཊོཀ་ཀེན་ག་ཚོད་ཉར་ཚགས་བྱེད་དགོས་ཚོད་འཛིན་བྱེད། དཔེར་ན། གལ་ཏེ་ ༢ ལ་བཀོད་སྒྲིག་བྱས་ན། ཁ་བརྡའི་ནང་དོན་གྱི་ཊོཀ་ཀེན་མཐའ་མ་ ༢ ཉར་ཚགས་བྱེད་ངེས། ནང་དོན་ཉར་ཚགས་བྱས་ན་ཁ་བརྡའི་རྒྱུན་མཐུད་རང་བཞིན་རྒྱུན་སྲུང་བྱེད་པར་རོགས་པ་བྱེད་ཐུབ། འོན་ཀྱང་དེས་བརྗོད་གཞི་གསར་པར་ལན་འདེབས་བྱེད་པའི་ནུས་པ་ཉུང་དུ་གཏོང་སྲིད།",
|
||||
"This option enables or disables the use of the reasoning feature in Ollama, which allows the model to think before generating a response. When enabled, the model can take a moment to process the conversation context and generate a more thoughtful response.": "",
|
||||
"This option sets the maximum number of tokens the model can generate in its response. Increasing this limit allows the model to provide longer answers, but it may also increase the likelihood of unhelpful or irrelevant content being generated.": "འདེམས་ཀ་འདིས་དཔེ་དབྱིབས་ཀྱིས་དེའི་ལན་ནང་བཟོ་ཐུབ་པའི་ཊོཀ་ཀེན་གྱི་གྲངས་མང་ཤོས་འཇོག་པ། ཚད་བཀག་འདི་མང་དུ་བཏང་ན་དཔེ་དབྱིབས་ཀྱིས་ལན་རིང་བ་སྤྲོད་པར་གནང་བ་སྤྲོད། འོན་ཀྱང་དེས་ཕན་ཐོགས་མེད་པའམ་འབྲེལ་མེད་ཀྱི་ནང་དོན་བཟོ་བའི་ཆགས་ཚུལ་མང་དུ་གཏོང་སྲིད།",
|
||||
"This option will delete all existing files in the collection and replace them with newly uploaded files.": "འདེམས་ཀ་འདིས་བསྡུ་གསོག་ནང་གི་ཡོད་པའི་ཡིག་ཆ་ཡོངས་རྫོགས་བསུབ་ནས་དེ་དག་གསར་དུ་སྤར་བའི་ཡིག་ཆས་ཚབ་བྱེད་ངེས།",
|
||||
"This response was generated by \"{{model}}\"": "ལན་འདི་ \"{{model}}\" ཡིས་བཟོས་པ།",
|
||||
|
|
|
|||
|
|
@ -90,7 +90,9 @@
|
|||
"and {{COUNT}} more": "i {{COUNT}} més",
|
||||
"and create a new shared link.": "i crear un nou enllaç compartit.",
|
||||
"Android": "Android",
|
||||
"API": "",
|
||||
"API Base URL": "URL Base de l'API",
|
||||
"API details for using a vision-language model in the picture description. This parameter is mutually exclusive with picture_description_local.": "",
|
||||
"API Key": "clau API",
|
||||
"API Key created.": "clau API creada.",
|
||||
"API Key Endpoint Restrictions": "Restriccions del punt d'accés de la Clau API",
|
||||
|
|
@ -108,7 +110,6 @@
|
|||
"Are you sure you want to delete this channel?": "Estàs segur que vols eliminar aquest canal?",
|
||||
"Are you sure you want to delete this message?": "Estàs segur que vols eliminar aquest missatge?",
|
||||
"Are you sure you want to unarchive all archived chats?": "Estàs segur que vols desarxivar tots els xats arxivats?",
|
||||
"Are you sure you want to update this user's role to **{{ROLE}}**?": "Estàs segur que vols actualitzar el rol de l'usuari a **{{ROLE}}**?",
|
||||
"Are you sure?": "Estàs segur?",
|
||||
"Arena Models": "Models de l'Arena",
|
||||
"Artifacts": "Artefactes",
|
||||
|
|
@ -443,6 +444,7 @@
|
|||
"Enter Chunk Overlap": "Introdueix la mida de solapament de blocs",
|
||||
"Enter Chunk Size": "Introdueix la mida del bloc",
|
||||
"Enter comma-separated \"token:bias_value\" pairs (example: 5432:100, 413:-100)": "Introdueix parelles de \"token:valor de biaix\" separats per comes (exemple: 5432:100, 413:-100)",
|
||||
"Enter Config in JSON format": "",
|
||||
"Enter content for the pending user info overlay. Leave empty for default.": "",
|
||||
"Enter Datalab Marker API Key": "",
|
||||
"Enter description": "Introdueix la descripció",
|
||||
|
|
@ -611,6 +613,10 @@
|
|||
"Folder deleted successfully": "Carpeta eliminada correctament",
|
||||
"Folder name cannot be empty.": "El nom de la carpeta no pot ser buit.",
|
||||
"Folder name updated successfully": "Nom de la carpeta actualitzat correctament",
|
||||
"Follow up": "",
|
||||
"Follow Up Generation": "",
|
||||
"Follow Up Generation Prompt": "",
|
||||
"Follow-Up Auto-Generation": "",
|
||||
"Followed instructions perfectly": "S'han seguit les instruccions perfectament",
|
||||
"Force OCR": "",
|
||||
"Force OCR on all pages of the PDF. This can lead to worse results if you have good text in your PDFs. Defaults to False.": "",
|
||||
|
|
@ -663,6 +669,7 @@
|
|||
"Hex Color": "Color hexadecimal",
|
||||
"Hex Color - Leave empty for default color": "Color hexadecimal - Deixar buit per a color per defecte",
|
||||
"Hide": "Amaga",
|
||||
"Hide from Sidebar": "",
|
||||
"Hide Model": "Amagar el model",
|
||||
"High Contrast Mode": "",
|
||||
"Home": "Inici",
|
||||
|
|
@ -711,7 +718,6 @@
|
|||
"Invalid file content": "Continguts del fitxer no vàlids",
|
||||
"Invalid file format.": "Format d'arxiu no vàlid.",
|
||||
"Invalid JSON file": "",
|
||||
"Invalid JSON schema": "Esquema JSON no vàlid",
|
||||
"Invalid Tag": "Etiqueta no vàlida",
|
||||
"is typing...": "està escrivint...",
|
||||
"January": "Gener",
|
||||
|
|
@ -726,7 +732,7 @@
|
|||
"JWT Expiration": "Caducitat del JWT",
|
||||
"JWT Token": "Token JWT",
|
||||
"Kagi Search API Key": "Clau API de Kagi Search",
|
||||
"Keep Alive": "Manté actiu",
|
||||
"Keep in Sidebar": "",
|
||||
"Key": "Clau",
|
||||
"Keyboard shortcuts": "Dreceres de teclat",
|
||||
"Knowledge": "Coneixement",
|
||||
|
|
@ -846,6 +852,7 @@
|
|||
"New Password": "Nova contrasenya",
|
||||
"New Tool": "",
|
||||
"new-channel": "nou-canal",
|
||||
"Next message": "",
|
||||
"No chats found for this user.": "",
|
||||
"No chats found.": "",
|
||||
"No content": "No hi ha contingut",
|
||||
|
|
@ -913,6 +920,7 @@
|
|||
"OpenAI API settings updated": "Configuració de l'API d'OpenAI actualitzada",
|
||||
"OpenAI URL/Key required.": "URL/Clau d'OpenAI requerides.",
|
||||
"openapi.json URL or Path": "",
|
||||
"Options for running a local vision-language model in the picture description. The parameters refer to a model hosted on Hugging Face. This parameter is mutually exclusive with picture_description_api.": "",
|
||||
"or": "o",
|
||||
"Organize your users": "Organitza els teus usuaris",
|
||||
"Other": "Altres",
|
||||
|
|
@ -928,6 +936,7 @@
|
|||
"PDF document (.pdf)": "Document PDF (.pdf)",
|
||||
"PDF Extract Images (OCR)": "Extreu imatges del PDF (OCR)",
|
||||
"pending": "pendent",
|
||||
"Pending": "",
|
||||
"Pending User Overlay Content": "",
|
||||
"Pending User Overlay Title": "",
|
||||
"Permission denied when accessing media devices": "Permís denegat en accedir a dispositius multimèdia",
|
||||
|
|
@ -935,7 +944,12 @@
|
|||
"Permission denied when accessing microphone: {{error}}": "Permís denegat en accedir al micròfon: {{error}}",
|
||||
"Permissions": "Permisos",
|
||||
"Perplexity API Key": "Clau API de Perplexity",
|
||||
"Perplexity Model": "",
|
||||
"Perplexity Search Context Usage": "",
|
||||
"Personalization": "Personalització",
|
||||
"Picture Description API Config": "",
|
||||
"Picture Description Local Config": "",
|
||||
"Picture Description Mode": "",
|
||||
"Pin": "Fixar",
|
||||
"Pinned": "Fixat",
|
||||
"Pioneer insights": "Perspectives pioneres",
|
||||
|
|
@ -965,6 +979,7 @@
|
|||
"Preview": "",
|
||||
"Previous 30 days": "30 dies anteriors",
|
||||
"Previous 7 days": "7 dies anteriors",
|
||||
"Previous message": "",
|
||||
"Private": "Privat",
|
||||
"Profile Image": "Imatge de perfil",
|
||||
"Prompt": "Indicació",
|
||||
|
|
@ -1006,7 +1021,6 @@
|
|||
"Rename": "Canviar el nom",
|
||||
"Reorder Models": "Reordenar els models",
|
||||
"Reply in Thread": "Respondre al fil",
|
||||
"Request Mode": "Mode de sol·licitud",
|
||||
"Reranking Engine": "Motor de valoració",
|
||||
"Reranking Model": "Model de reavaluació",
|
||||
"Reset": "Restableix",
|
||||
|
|
@ -1178,6 +1192,7 @@
|
|||
"The batch size determines how many text requests are processed together at once. A higher batch size can increase the performance and speed of the model, but it also requires more memory.": "La mida del lot determina quantes sol·licituds de text es processen alhora. Una mida de lot més gran pot augmentar el rendiment i la velocitat del model, però també requereix més memòria.",
|
||||
"The developers behind this plugin are passionate volunteers from the community. If you find this plugin helpful, please consider contributing to its development.": "Els desenvolupadors d'aquest complement són voluntaris apassionats de la comunitat. Si trobeu útil aquest complement, considereu contribuir al seu desenvolupament.",
|
||||
"The evaluation leaderboard is based on the Elo rating system and is updated in real-time.": "La classificació d'avaluació es basa en el sistema de qualificació Elo i s'actualitza en temps real.",
|
||||
"The format to return a response in. Format can be json or a JSON schema.": "",
|
||||
"The language of the input audio. Supplying the input language in ISO-639-1 (e.g. en) format will improve accuracy and latency. Leave blank to automatically detect the language.": "",
|
||||
"The LDAP attribute that maps to the mail that users use to sign in.": "L'atribut LDAP que s'associa al correu que els usuaris utilitzen per iniciar la sessió.",
|
||||
"The LDAP attribute that maps to the username that users use to sign in.": "L'atribut LDAP que mapeja el nom d'usuari amb l'usuari que vol iniciar sessió",
|
||||
|
|
@ -1195,7 +1210,9 @@
|
|||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Això assegura que les teves converses valuoses queden desades de manera segura a la teva base de dades. Gràcies!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Aquesta és una funció experimental, és possible que no funcioni com s'espera i està subjecta a canvis en qualsevol moment.",
|
||||
"This model is not publicly available. Please select another model.": "Aquest model no està disponible públicament. Seleccioneu-ne un altre.",
|
||||
"This option controls how long the model will stay loaded into memory following the request (default: 5m)": "",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "Aquesta opció controla quants tokens es conserven en actualitzar el context. Per exemple, si s'estableix en 2, es conservaran els darrers 2 tokens del context de conversa. Preservar el context pot ajudar a mantenir la continuïtat d'una conversa, però pot reduir la capacitat de respondre a nous temes.",
|
||||
"This option enables or disables the use of the reasoning feature in Ollama, which allows the model to think before generating a response. When enabled, the model can take a moment to process the conversation context and generate a more thoughtful response.": "",
|
||||
"This option sets the maximum number of tokens the model can generate in its response. Increasing this limit allows the model to provide longer answers, but it may also increase the likelihood of unhelpful or irrelevant content being generated.": "Aquesta opció estableix el nombre màxim de tokens que el model pot generar en la seva resposta. Augmentar aquest límit permet que el model proporcioni respostes més llargues, però també pot augmentar la probabilitat que es generi contingut poc útil o irrellevant.",
|
||||
"This option will delete all existing files in the collection and replace them with newly uploaded files.": "Aquesta opció eliminarà tots els fitxers existents de la col·lecció i els substituirà per fitxers recentment penjats.",
|
||||
"This response was generated by \"{{model}}\"": "Aquesta resposta l'ha generat el model \"{{model}}\"",
|
||||
|
|
|
|||
|
|
@ -90,7 +90,9 @@
|
|||
"and {{COUNT}} more": "",
|
||||
"and create a new shared link.": "",
|
||||
"Android": "",
|
||||
"API": "",
|
||||
"API Base URL": "API Base URL",
|
||||
"API details for using a vision-language model in the picture description. This parameter is mutually exclusive with picture_description_local.": "",
|
||||
"API Key": "yawe sa API",
|
||||
"API Key created.": "",
|
||||
"API Key Endpoint Restrictions": "",
|
||||
|
|
@ -108,7 +110,6 @@
|
|||
"Are you sure you want to delete this channel?": "",
|
||||
"Are you sure you want to delete this message?": "",
|
||||
"Are you sure you want to unarchive all archived chats?": "",
|
||||
"Are you sure you want to update this user's role to **{{ROLE}}**?": "",
|
||||
"Are you sure?": "Sigurado ka ?",
|
||||
"Arena Models": "",
|
||||
"Artifacts": "",
|
||||
|
|
@ -443,6 +444,7 @@
|
|||
"Enter Chunk Overlap": "Pagsulod sa block overlap",
|
||||
"Enter Chunk Size": "Isulod ang block size",
|
||||
"Enter comma-separated \"token:bias_value\" pairs (example: 5432:100, 413:-100)": "",
|
||||
"Enter Config in JSON format": "",
|
||||
"Enter content for the pending user info overlay. Leave empty for default.": "",
|
||||
"Enter Datalab Marker API Key": "",
|
||||
"Enter description": "",
|
||||
|
|
@ -611,6 +613,10 @@
|
|||
"Folder deleted successfully": "",
|
||||
"Folder name cannot be empty.": "",
|
||||
"Folder name updated successfully": "",
|
||||
"Follow up": "",
|
||||
"Follow Up Generation": "",
|
||||
"Follow Up Generation Prompt": "",
|
||||
"Follow-Up Auto-Generation": "",
|
||||
"Followed instructions perfectly": "",
|
||||
"Force OCR": "",
|
||||
"Force OCR on all pages of the PDF. This can lead to worse results if you have good text in your PDFs. Defaults to False.": "",
|
||||
|
|
@ -663,6 +669,7 @@
|
|||
"Hex Color": "",
|
||||
"Hex Color - Leave empty for default color": "",
|
||||
"Hide": "Tagoa",
|
||||
"Hide from Sidebar": "",
|
||||
"Hide Model": "",
|
||||
"High Contrast Mode": "",
|
||||
"Home": "",
|
||||
|
|
@ -711,7 +718,6 @@
|
|||
"Invalid file content": "",
|
||||
"Invalid file format.": "",
|
||||
"Invalid JSON file": "",
|
||||
"Invalid JSON schema": "",
|
||||
"Invalid Tag": "",
|
||||
"is typing...": "",
|
||||
"January": "",
|
||||
|
|
@ -726,7 +732,7 @@
|
|||
"JWT Expiration": "Pag-expire sa JWT",
|
||||
"JWT Token": "JWT token",
|
||||
"Kagi Search API Key": "",
|
||||
"Keep Alive": "Padayon nga aktibo",
|
||||
"Keep in Sidebar": "",
|
||||
"Key": "",
|
||||
"Keyboard shortcuts": "Mga shortcut sa keyboard",
|
||||
"Knowledge": "",
|
||||
|
|
@ -846,6 +852,7 @@
|
|||
"New Password": "Bag-ong Password",
|
||||
"New Tool": "",
|
||||
"new-channel": "",
|
||||
"Next message": "",
|
||||
"No chats found for this user.": "",
|
||||
"No chats found.": "",
|
||||
"No content": "",
|
||||
|
|
@ -913,6 +920,7 @@
|
|||
"OpenAI API settings updated": "",
|
||||
"OpenAI URL/Key required.": "",
|
||||
"openapi.json URL or Path": "",
|
||||
"Options for running a local vision-language model in the picture description. The parameters refer to a model hosted on Hugging Face. This parameter is mutually exclusive with picture_description_api.": "",
|
||||
"or": "O",
|
||||
"Organize your users": "",
|
||||
"Other": "",
|
||||
|
|
@ -928,6 +936,7 @@
|
|||
"PDF document (.pdf)": "",
|
||||
"PDF Extract Images (OCR)": "PDF Image Extraction (OCR)",
|
||||
"pending": "gipugngan",
|
||||
"Pending": "",
|
||||
"Pending User Overlay Content": "",
|
||||
"Pending User Overlay Title": "",
|
||||
"Permission denied when accessing media devices": "",
|
||||
|
|
@ -935,7 +944,12 @@
|
|||
"Permission denied when accessing microphone: {{error}}": "Gidili ang pagtugot sa dihang nag-access sa mikropono: {{error}}",
|
||||
"Permissions": "",
|
||||
"Perplexity API Key": "",
|
||||
"Perplexity Model": "",
|
||||
"Perplexity Search Context Usage": "",
|
||||
"Personalization": "",
|
||||
"Picture Description API Config": "",
|
||||
"Picture Description Local Config": "",
|
||||
"Picture Description Mode": "",
|
||||
"Pin": "",
|
||||
"Pinned": "",
|
||||
"Pioneer insights": "",
|
||||
|
|
@ -965,6 +979,7 @@
|
|||
"Preview": "",
|
||||
"Previous 30 days": "",
|
||||
"Previous 7 days": "",
|
||||
"Previous message": "",
|
||||
"Private": "",
|
||||
"Profile Image": "",
|
||||
"Prompt": "",
|
||||
|
|
@ -1006,7 +1021,6 @@
|
|||
"Rename": "",
|
||||
"Reorder Models": "",
|
||||
"Reply in Thread": "",
|
||||
"Request Mode": "Query mode",
|
||||
"Reranking Engine": "",
|
||||
"Reranking Model": "",
|
||||
"Reset": "",
|
||||
|
|
@ -1178,6 +1192,7 @@
|
|||
"The batch size determines how many text requests are processed together at once. A higher batch size can increase the performance and speed of the model, but it also requires more memory.": "",
|
||||
"The developers behind this plugin are passionate volunteers from the community. If you find this plugin helpful, please consider contributing to its development.": "",
|
||||
"The evaluation leaderboard is based on the Elo rating system and is updated in real-time.": "",
|
||||
"The format to return a response in. Format can be json or a JSON schema.": "",
|
||||
"The language of the input audio. Supplying the input language in ISO-639-1 (e.g. en) format will improve accuracy and latency. Leave blank to automatically detect the language.": "",
|
||||
"The LDAP attribute that maps to the mail that users use to sign in.": "",
|
||||
"The LDAP attribute that maps to the username that users use to sign in.": "",
|
||||
|
|
@ -1195,7 +1210,9 @@
|
|||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "Kini nagsiguro nga ang imong bililhon nga mga panag-istoryahanay luwas nga natipig sa imong backend database. ",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "",
|
||||
"This model is not publicly available. Please select another model.": "",
|
||||
"This option controls how long the model will stay loaded into memory following the request (default: 5m)": "",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
"This option enables or disables the use of the reasoning feature in Ollama, which allows the model to think before generating a response. When enabled, the model can take a moment to process the conversation context and generate a more thoughtful response.": "",
|
||||
"This option sets the maximum number of tokens the model can generate in its response. Increasing this limit allows the model to provide longer answers, but it may also increase the likelihood of unhelpful or irrelevant content being generated.": "",
|
||||
"This option will delete all existing files in the collection and replace them with newly uploaded files.": "",
|
||||
"This response was generated by \"{{model}}\"": "",
|
||||
|
|
|
|||
|
|
@ -90,7 +90,9 @@
|
|||
"and {{COUNT}} more": "a {{COUNT}} další/ch",
|
||||
"and create a new shared link.": "a vytvořit nový sdílený odkaz.",
|
||||
"Android": "",
|
||||
"API": "",
|
||||
"API Base URL": "Základní URL adresa API",
|
||||
"API details for using a vision-language model in the picture description. This parameter is mutually exclusive with picture_description_local.": "",
|
||||
"API Key": "Klíč API",
|
||||
"API Key created.": "API klíč byl vytvořen.",
|
||||
"API Key Endpoint Restrictions": "",
|
||||
|
|
@ -108,7 +110,6 @@
|
|||
"Are you sure you want to delete this channel?": "",
|
||||
"Are you sure you want to delete this message?": "",
|
||||
"Are you sure you want to unarchive all archived chats?": "",
|
||||
"Are you sure you want to update this user's role to **{{ROLE}}**?": "",
|
||||
"Are you sure?": "Jste si jistý?",
|
||||
"Arena Models": "Arena modely",
|
||||
"Artifacts": "Artefakty",
|
||||
|
|
@ -443,6 +444,7 @@
|
|||
"Enter Chunk Overlap": "Zadejte překryv části",
|
||||
"Enter Chunk Size": "Zadejte velikost bloku",
|
||||
"Enter comma-separated \"token:bias_value\" pairs (example: 5432:100, 413:-100)": "",
|
||||
"Enter Config in JSON format": "",
|
||||
"Enter content for the pending user info overlay. Leave empty for default.": "",
|
||||
"Enter Datalab Marker API Key": "",
|
||||
"Enter description": "Zadejte popis",
|
||||
|
|
@ -611,6 +613,10 @@
|
|||
"Folder deleted successfully": "Složka byla úspěšně smazána",
|
||||
"Folder name cannot be empty.": "Název složky nesmí být prázdný.",
|
||||
"Folder name updated successfully": "Název složky byl úspěšně aktualizován.",
|
||||
"Follow up": "",
|
||||
"Follow Up Generation": "",
|
||||
"Follow Up Generation Prompt": "",
|
||||
"Follow-Up Auto-Generation": "",
|
||||
"Followed instructions perfectly": "Dodržel pokyny dokonale.",
|
||||
"Force OCR": "",
|
||||
"Force OCR on all pages of the PDF. This can lead to worse results if you have good text in your PDFs. Defaults to False.": "",
|
||||
|
|
@ -663,6 +669,7 @@
|
|||
"Hex Color": "",
|
||||
"Hex Color - Leave empty for default color": "",
|
||||
"Hide": "Schovej",
|
||||
"Hide from Sidebar": "",
|
||||
"Hide Model": "",
|
||||
"High Contrast Mode": "",
|
||||
"Home": "",
|
||||
|
|
@ -711,7 +718,6 @@
|
|||
"Invalid file content": "",
|
||||
"Invalid file format.": "Neplatný formát souboru.",
|
||||
"Invalid JSON file": "",
|
||||
"Invalid JSON schema": "",
|
||||
"Invalid Tag": "Neplatný tag",
|
||||
"is typing...": "",
|
||||
"January": "Leden",
|
||||
|
|
@ -726,7 +732,7 @@
|
|||
"JWT Expiration": "Vypršení JWT",
|
||||
"JWT Token": "JWT Token (JSON Web Token)",
|
||||
"Kagi Search API Key": "",
|
||||
"Keep Alive": "Udržovat spojení",
|
||||
"Keep in Sidebar": "",
|
||||
"Key": "",
|
||||
"Keyboard shortcuts": "Klávesové zkratky",
|
||||
"Knowledge": "Znalosti",
|
||||
|
|
@ -846,6 +852,7 @@
|
|||
"New Password": "Nové heslo",
|
||||
"New Tool": "",
|
||||
"new-channel": "",
|
||||
"Next message": "",
|
||||
"No chats found for this user.": "",
|
||||
"No chats found.": "",
|
||||
"No content": "",
|
||||
|
|
@ -913,6 +920,7 @@
|
|||
"OpenAI API settings updated": "",
|
||||
"OpenAI URL/Key required.": "Je vyžadován odkaz/adresa URL nebo klíč OpenAI.",
|
||||
"openapi.json URL or Path": "",
|
||||
"Options for running a local vision-language model in the picture description. The parameters refer to a model hosted on Hugging Face. This parameter is mutually exclusive with picture_description_api.": "",
|
||||
"or": "nebo",
|
||||
"Organize your users": "",
|
||||
"Other": "Jiné",
|
||||
|
|
@ -928,6 +936,7 @@
|
|||
"PDF document (.pdf)": "PDF dokument (.pdf)",
|
||||
"PDF Extract Images (OCR)": "Extrahování obrázků z PDF (OCR)",
|
||||
"pending": "čeká na vyřízení",
|
||||
"Pending": "",
|
||||
"Pending User Overlay Content": "",
|
||||
"Pending User Overlay Title": "",
|
||||
"Permission denied when accessing media devices": "Odmítnutí povolení při přístupu k mediálním zařízením",
|
||||
|
|
@ -935,7 +944,12 @@
|
|||
"Permission denied when accessing microphone: {{error}}": "Oprávnění zamítnuto při přístupu k mikrofonu: {{error}}",
|
||||
"Permissions": "",
|
||||
"Perplexity API Key": "",
|
||||
"Perplexity Model": "",
|
||||
"Perplexity Search Context Usage": "",
|
||||
"Personalization": "Personalizace",
|
||||
"Picture Description API Config": "",
|
||||
"Picture Description Local Config": "",
|
||||
"Picture Description Mode": "",
|
||||
"Pin": "",
|
||||
"Pinned": "",
|
||||
"Pioneer insights": "",
|
||||
|
|
@ -965,6 +979,7 @@
|
|||
"Preview": "",
|
||||
"Previous 30 days": "Předchozích 30 dnů",
|
||||
"Previous 7 days": "Předchozích 7 dní",
|
||||
"Previous message": "",
|
||||
"Private": "",
|
||||
"Profile Image": "Profilový obrázek",
|
||||
"Prompt": "",
|
||||
|
|
@ -1006,7 +1021,6 @@
|
|||
"Rename": "Přejmenovat",
|
||||
"Reorder Models": "",
|
||||
"Reply in Thread": "",
|
||||
"Request Mode": "Režim žádosti",
|
||||
"Reranking Engine": "",
|
||||
"Reranking Model": "Model pro přehodnocení pořadí",
|
||||
"Reset": "režim Reset",
|
||||
|
|
@ -1178,6 +1192,7 @@
|
|||
"The batch size determines how many text requests are processed together at once. A higher batch size can increase the performance and speed of the model, but it also requires more memory.": "",
|
||||
"The developers behind this plugin are passionate volunteers from the community. If you find this plugin helpful, please consider contributing to its development.": "Vývojáři stojící za tímto pluginem jsou zapálení dobrovolníci z komunity. Pokud považujete tento plugin za užitečný, zvažte příspěvek k jeho vývoji.",
|
||||
"The evaluation leaderboard is based on the Elo rating system and is updated in real-time.": "Hodnotící žebříček je založen na systému hodnocení Elo a je aktualizován v reálném čase.",
|
||||
"The format to return a response in. Format can be json or a JSON schema.": "",
|
||||
"The language of the input audio. Supplying the input language in ISO-639-1 (e.g. en) format will improve accuracy and latency. Leave blank to automatically detect the language.": "",
|
||||
"The LDAP attribute that maps to the mail that users use to sign in.": "",
|
||||
"The LDAP attribute that maps to the username that users use to sign in.": "",
|
||||
|
|
@ -1195,7 +1210,9 @@
|
|||
"This ensures that your valuable conversations are securely saved to your backend database. Thank you!": "To zajišťuje, že vaše cenné konverzace jsou bezpečně uloženy ve vaší backendové databázi. Děkujeme!",
|
||||
"This is an experimental feature, it may not function as expected and is subject to change at any time.": "Jedná se o experimentální funkci, nemusí fungovat podle očekávání a může být kdykoliv změněna.",
|
||||
"This model is not publicly available. Please select another model.": "",
|
||||
"This option controls how long the model will stay loaded into memory following the request (default: 5m)": "",
|
||||
"This option controls how many tokens are preserved when refreshing the context. For example, if set to 2, the last 2 tokens of the conversation context will be retained. Preserving context can help maintain the continuity of a conversation, but it may reduce the ability to respond to new topics.": "",
|
||||
"This option enables or disables the use of the reasoning feature in Ollama, which allows the model to think before generating a response. When enabled, the model can take a moment to process the conversation context and generate a more thoughtful response.": "",
|
||||
"This option sets the maximum number of tokens the model can generate in its response. Increasing this limit allows the model to provide longer answers, but it may also increase the likelihood of unhelpful or irrelevant content being generated.": "",
|
||||
"This option will delete all existing files in the collection and replace them with newly uploaded files.": "Tato volba odstraní všechny existující soubory ve sbírce a nahradí je nově nahranými soubory.",
|
||||
"This response was generated by \"{{model}}\"": "Tato odpověď byla vygenerována pomocí \"{{model}}\"",
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue