diff --git a/CHANGELOG.md b/CHANGELOG.md index 76939def1a..d1da28a11e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,27 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.3.20] - 2024-09-07 + +### Added + +- **🌐 Translation Update**: Updated Catalan translations to improve user experience for Catalan speakers. + +### Fixed + +- **📄 PDF Download**: Resolved a configuration issue with fonts directory, ensuring PDFs are now downloaded with the correct formatting. +- **🛠️ Installation of Tools & Functions Requirements**: Fixed a bug where necessary requirements for tools and functions were not properly installing. +- **🔗 Inline Image Link Rendering**: Enabled rendering of images directly from links in chat. +- **📞 Post-Call User Interface Cleanup**: Adjusted UI behavior to automatically close chat controls after a voice call ends, reducing screen clutter. +- **🎙️ Microphone Deactivation Post-Call**: Addressed an issue where the microphone remained active after calls. +- **✍️ Markdown Spacing Correction**: Corrected spacing in Markdown rendering, ensuring text appears neatly and as expected. +- **🔄 Message Re-rendering**: Fixed an issue causing all response messages to re-render with each new message, now improving chat performance. + +### Changed + +- **🌐 Refined Web Search Integration**: Deprecated the Search Query Generation Prompt threshold; introduced a toggle button for "Enable Web Search Query Generation" allowing users to opt-in to using web search more judiciously. +- **📝 Default Prompt Templates Update**: Emptied environment variable templates for search and title generation now default to the Open WebUI default prompt templates, simplifying configuration efforts. + ## [0.3.19] - 2024-09-05 ### Added diff --git a/backend/.gitignore b/backend/.gitignore index ea83b34f43..614a5f7465 100644 --- a/backend/.gitignore +++ b/backend/.gitignore @@ -8,9 +8,5 @@ _test Pipfile !/data /data/* -!/data/litellm -/data/litellm/* -!data/litellm/config.yaml - -!data/config.json +/open_webui/data/* .webui_secret_key \ No newline at end of file diff --git a/backend/open_webui/__init__.py b/backend/open_webui/__init__.py index c6578c214b..5c40500056 100644 --- a/backend/open_webui/__init__.py +++ b/backend/open_webui/__init__.py @@ -10,6 +10,8 @@ app = typer.Typer() KEY_FILE = Path.cwd() / ".webui_secret_key" +os.environ["FROM_INIT_PY"] = "true" + @app.command() def serve( diff --git a/backend/open_webui/apps/ollama/main.py b/backend/open_webui/apps/ollama/main.py index 44b5667d5d..e45ea88971 100644 --- a/backend/open_webui/apps/ollama/main.py +++ b/backend/open_webui/apps/ollama/main.py @@ -28,11 +28,15 @@ from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import StreamingResponse from pydantic import BaseModel, ConfigDict from starlette.background import BackgroundTask + + from open_webui.utils.misc import ( + calculate_sha256, +) +from open_webui.utils.payload import ( apply_model_params_to_body_ollama, apply_model_params_to_body_openai, apply_model_system_prompt_to_body, - calculate_sha256, ) from open_webui.utils.utils import get_admin_user, get_verified_user diff --git a/backend/open_webui/apps/openai/main.py b/backend/open_webui/apps/openai/main.py index 53d1f4534c..e8fd81d459 100644 --- a/backend/open_webui/apps/openai/main.py +++ b/backend/open_webui/apps/openai/main.py @@ -26,10 +26,13 @@ from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import FileResponse, StreamingResponse from pydantic import BaseModel from starlette.background import BackgroundTask -from open_webui.utils.misc import ( + + +from open_webui.utils.payload import ( apply_model_params_to_body_openai, apply_model_system_prompt_to_body, ) + from open_webui.utils.utils import get_admin_user, get_verified_user log = logging.getLogger(__name__) diff --git a/backend/open_webui/apps/webui/main.py b/backend/open_webui/apps/webui/main.py index 45fe3cad9e..6c6f197ddb 100644 --- a/backend/open_webui/apps/webui/main.py +++ b/backend/open_webui/apps/webui/main.py @@ -51,11 +51,15 @@ from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import StreamingResponse from pydantic import BaseModel from open_webui.utils.misc import ( - apply_model_params_to_body_openai, - apply_model_system_prompt_to_body, openai_chat_chunk_message_template, openai_chat_completion_message_template, ) +from open_webui.utils.payload import ( + apply_model_params_to_body_openai, + apply_model_system_prompt_to_body, +) + + from open_webui.utils.tools import get_tools app = FastAPI() diff --git a/backend/open_webui/apps/webui/routers/utils.py b/backend/open_webui/apps/webui/routers/utils.py index 731f987843..82c294bd7b 100644 --- a/backend/open_webui/apps/webui/routers/utils.py +++ b/backend/open_webui/apps/webui/routers/utils.py @@ -4,6 +4,7 @@ from pathlib import Path import black import markdown from open_webui.config import DATA_DIR, ENABLE_ADMIN_EXPORT +from open_webui.env import FONTS_DIR from open_webui.constants import ERROR_MESSAGES from fastapi import APIRouter, Depends, HTTPException, Response, status from fpdf import FPDF @@ -57,14 +58,11 @@ class ChatForm(BaseModel): async def download_chat_as_pdf( form_data: ChatForm, ): + global FONTS_DIR + pdf = FPDF() pdf.add_page() - # When running in docker, workdir is /app/backend, so fonts is in /app/backend/static/fonts - FONTS_DIR = Path("./static/fonts") - - # Non Docker Installation - # When running using `pip install` the static directory is in the site packages. if not FONTS_DIR.exists(): FONTS_DIR = Path(site.getsitepackages()[0]) / "static/fonts" diff --git a/backend/open_webui/apps/webui/utils.py b/backend/open_webui/apps/webui/utils.py index 2b017bf436..2d537af519 100644 --- a/backend/open_webui/apps/webui/utils.py +++ b/backend/open_webui/apps/webui/utils.py @@ -65,6 +65,7 @@ def replace_imports(content): def load_toolkit_module_by_id(toolkit_id, content=None): + if content is None: tool = Tools.get_tool_by_id(toolkit_id) if not tool: @@ -74,6 +75,10 @@ def load_toolkit_module_by_id(toolkit_id, content=None): content = replace_imports(content) Tools.update_tool_by_id(toolkit_id, {"content": content}) + else: + frontmatter = extract_frontmatter(content) + # Install required packages found within the frontmatter + install_frontmatter_requirements(frontmatter.get("requirements", "")) module_name = f"tool_{toolkit_id}" module = types.ModuleType(module_name) @@ -82,16 +87,9 @@ def load_toolkit_module_by_id(toolkit_id, content=None): try: # Executing the modified content in the created module's namespace exec(content, module.__dict__) - - # Extract frontmatter, assuming content can be treated directly as a string - frontmatter = extract_frontmatter( - content - ) # Ensure this method is adaptable to handle content strings - - # Install required packages found within the frontmatter - install_frontmatter_requirements(frontmatter.get("requirements", "")) - + frontmatter = extract_frontmatter(content) print(f"Loaded module: {module.__name__}") + # Create and return the object if the class 'Tools' is found in the module if hasattr(module, "Tools"): return module.Tools(), frontmatter @@ -112,6 +110,9 @@ def load_function_module_by_id(function_id, content=None): content = replace_imports(content) Functions.update_function_by_id(function_id, {"content": content}) + else: + frontmatter = extract_frontmatter(content) + install_frontmatter_requirements(frontmatter.get("requirements", "")) module_name = f"function_{function_id}" module = types.ModuleType(module_name) @@ -120,15 +121,7 @@ def load_function_module_by_id(function_id, content=None): try: # Execute the modified content in the created module's namespace exec(content, module.__dict__) - - # Extract the frontmatter from the content, simulate file-like behaviour - frontmatter = extract_frontmatter( - content - ) # This function needs to handle string inputs - - # Install necessary requirements specified in frontmatter - install_frontmatter_requirements(frontmatter.get("requirements", "")) - + frontmatter = extract_frontmatter(content) print(f"Loaded module: {module.__name__}") # Create appropriate object based on available class type in the module diff --git a/backend/open_webui/config.py b/backend/open_webui/config.py index 2e3f1b2b8d..5ccb40d479 100644 --- a/backend/open_webui/config.py +++ b/backend/open_webui/config.py @@ -898,53 +898,27 @@ TASK_MODEL_EXTERNAL = PersistentConfig( TITLE_GENERATION_PROMPT_TEMPLATE = PersistentConfig( "TITLE_GENERATION_PROMPT_TEMPLATE", "task.title.prompt_template", - os.environ.get( - "TITLE_GENERATION_PROMPT_TEMPLATE", - """Create a concise, 3-5 word title with an emoji as a title for the prompt in the given language. Suitable Emojis for the summary can be used to enhance understanding but avoid quotation marks or special formatting. RESPOND ONLY WITH THE TITLE TEXT. + os.environ.get("TITLE_GENERATION_PROMPT_TEMPLATE", ""), +) -Examples of titles: -📉 Stock Market Trends -🍪 Perfect Chocolate Chip Recipe -Evolution of Music Streaming -Remote Work Productivity Tips -Artificial Intelligence in Healthcare -🎮 Video Game Development Insights - -Prompt: {{prompt:middletruncate:8000}}""", - ), +ENABLE_SEARCH_QUERY = PersistentConfig( + "ENABLE_SEARCH_QUERY", + "task.search.enable", + os.environ.get("ENABLE_SEARCH_QUERY", "True").lower() == "true", ) SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE = PersistentConfig( "SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE", "task.search.prompt_template", - os.environ.get( - "SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE", - """You are tasked with generating web search queries. Give me an appropriate query to answer my question for google search. Answer with only the query. Today is {{CURRENT_DATE}}. - -Question: -{{prompt:end:4000}}""", - ), + os.environ.get("SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE", ""), ) -SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD = PersistentConfig( - "SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD", - "task.search.prompt_length_threshold", - int( - os.environ.get( - "SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD", - 100, - ) - ), -) TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE = PersistentConfig( "TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE", "task.tools.prompt_template", - os.environ.get( - "TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE", - """Available Tools: {{TOOLS}}\nReturn an empty string if no tools match the query. If a function tool matches, construct and return a JSON object in the format {\"name\": \"functionName\", \"parameters\": {\"requiredFunctionParamKey\": \"requiredFunctionParamValue\"}} using the appropriate tool and its parameters. Only return the object and limit the response to the JSON object without additional text.""", - ), + os.environ.get("TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE", ""), ) diff --git a/backend/open_webui/env.py b/backend/open_webui/env.py index b0585db909..b716769c20 100644 --- a/backend/open_webui/env.py +++ b/backend/open_webui/env.py @@ -88,21 +88,9 @@ WEBUI_FAVICON_URL = "https://openwebui.com/favicon.png" ENV = os.environ.get("ENV", "dev") -PIP_INSTALL = False -try: - importlib.metadata.version("open-webui") - PIP_INSTALL = True -except importlib.metadata.PackageNotFoundError: - pass +FROM_INIT_PY = os.environ.get("FROM_INIT_PY", "False").lower() == "true" - -PIP_INSTALL = ( - os.environ.get("PIP_INSTALL", "False").lower() == "true" - if os.environ.get("PIP_INSTALL") - else PIP_INSTALL -) - -if PIP_INSTALL: +if FROM_INIT_PY: PACKAGE_DATA = {"version": importlib.metadata.version("open-webui")} else: try: @@ -193,7 +181,7 @@ WEBUI_BUILD_HASH = os.environ.get("WEBUI_BUILD_HASH", "dev-build") DATA_DIR = Path(os.getenv("DATA_DIR", BACKEND_DIR / "data")).resolve() -if PIP_INSTALL: +if FROM_INIT_PY: NEW_DATA_DIR = Path(os.getenv("DATA_DIR", OPEN_WEBUI_DIR / "data")).resolve() NEW_DATA_DIR.mkdir(parents=True, exist_ok=True) @@ -210,9 +198,11 @@ if PIP_INSTALL: DATA_DIR = Path(os.getenv("DATA_DIR", OPEN_WEBUI_DIR / "data")) +FONTS_DIR = Path(os.getenv("FONTS_DIR", OPEN_WEBUI_DIR / "static" / "fonts")) + FRONTEND_BUILD_DIR = Path(os.getenv("FRONTEND_BUILD_DIR", BASE_DIR / "build")).resolve() -if PIP_INSTALL: +if FROM_INIT_PY: FRONTEND_BUILD_DIR = Path( os.getenv("FRONTEND_BUILD_DIR", OPEN_WEBUI_DIR / "frontend") ).resolve() diff --git a/backend/open_webui/main.py b/backend/open_webui/main.py index 12d144342f..872adee2dd 100644 --- a/backend/open_webui/main.py +++ b/backend/open_webui/main.py @@ -63,8 +63,8 @@ from open_webui.config import ( MODEL_FILTER_LIST, OAUTH_MERGE_ACCOUNTS_BY_EMAIL, OAUTH_PROVIDERS, + ENABLE_SEARCH_QUERY, SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE, - SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD, STATIC_DIR, TASK_MODEL, TASK_MODEL_EXTERNAL, @@ -199,9 +199,7 @@ app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE = TITLE_GENERATION_PROMPT_TEMP app.state.config.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE = ( SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE ) -app.state.config.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD = ( - SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD -) +app.state.config.ENABLE_SEARCH_QUERY = ENABLE_SEARCH_QUERY app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE = ( TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE ) @@ -397,8 +395,13 @@ async def chat_completion_tools_handler( specs = [tool["spec"] for tool in tools.values()] tools_specs = json.dumps(specs) + if app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE != "": + template = app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE + else: + template = """Available Tools: {{TOOLS}}\nReturn an empty string if no tools match the query. If a function tool matches, construct and return a JSON object in the format {\"name\": \"functionName\", \"parameters\": {\"requiredFunctionParamKey\": \"requiredFunctionParamValue\"}} using the appropriate tool and its parameters. Only return the object and limit the response to the JSON object without additional text.""" + tools_function_calling_prompt = tools_function_calling_generation_template( - app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE, tools_specs + template, tools_specs ) log.info(f"{tools_function_calling_prompt=}") payload = get_tools_function_calling_payload( @@ -1312,8 +1315,8 @@ async def get_task_config(user=Depends(get_verified_user)): "TASK_MODEL": app.state.config.TASK_MODEL, "TASK_MODEL_EXTERNAL": app.state.config.TASK_MODEL_EXTERNAL, "TITLE_GENERATION_PROMPT_TEMPLATE": app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE, + "ENABLE_SEARCH_QUERY": app.state.config.ENABLE_SEARCH_QUERY, "SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE": app.state.config.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE, - "SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD": app.state.config.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD, "TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE": app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE, } @@ -1323,7 +1326,7 @@ class TaskConfigForm(BaseModel): TASK_MODEL_EXTERNAL: Optional[str] TITLE_GENERATION_PROMPT_TEMPLATE: str SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE: str - SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD: int + ENABLE_SEARCH_QUERY: bool TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE: str @@ -1337,9 +1340,7 @@ async def update_task_config(form_data: TaskConfigForm, user=Depends(get_admin_u app.state.config.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE = ( form_data.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE ) - app.state.config.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD = ( - form_data.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD - ) + app.state.config.ENABLE_SEARCH_QUERY = form_data.ENABLE_SEARCH_QUERY app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE = ( form_data.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE ) @@ -1349,7 +1350,7 @@ async def update_task_config(form_data: TaskConfigForm, user=Depends(get_admin_u "TASK_MODEL_EXTERNAL": app.state.config.TASK_MODEL_EXTERNAL, "TITLE_GENERATION_PROMPT_TEMPLATE": app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE, "SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE": app.state.config.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE, - "SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD": app.state.config.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD, + "ENABLE_SEARCH_QUERY": app.state.config.ENABLE_SEARCH_QUERY, "TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE": app.state.config.TOOLS_FUNCTION_CALLING_PROMPT_TEMPLATE, } @@ -1371,7 +1372,20 @@ async def generate_title(form_data: dict, user=Depends(get_verified_user)): print(model_id) - template = app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE + if app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE != "": + template = app.state.config.TITLE_GENERATION_PROMPT_TEMPLATE + else: + template = """Create a concise, 3-5 word title with an emoji as a title for the prompt in the given language. Suitable Emojis for the summary can be used to enhance understanding but avoid quotation marks or special formatting. RESPOND ONLY WITH THE TITLE TEXT. + +Examples of titles: +📉 Stock Market Trends +🍪 Perfect Chocolate Chip Recipe +Evolution of Music Streaming +Remote Work Productivity Tips +Artificial Intelligence in Healthcare +🎮 Video Game Development Insights + +Prompt: {{prompt:middletruncate:8000}}""" content = title_generation_template( template, @@ -1416,11 +1430,10 @@ async def generate_title(form_data: dict, user=Depends(get_verified_user)): @app.post("/api/task/query/completions") async def generate_search_query(form_data: dict, user=Depends(get_verified_user)): print("generate_search_query") - - if len(form_data["prompt"]) < app.state.config.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD: + if not app.state.config.ENABLE_SEARCH_QUERY: raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, - detail=f"Skip search query generation for short prompts (< {app.state.config.SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD} characters)", + detail=f"Search query generation is disabled", ) model_id = form_data["model"] @@ -1436,12 +1449,22 @@ async def generate_search_query(form_data: dict, user=Depends(get_verified_user) print(model_id) - template = app.state.config.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE + if app.state.config.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE != "": + template = app.state.config.SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE + else: + template = """You are tasked with assessing the need for a web search based on the current question and the context provided by the previous interactions. If the question requires a web search, generate an appropriate query for a Google search and respond with only the query. If the question can be answered without a web search or does not require further information, return an empty string. Today's date is {{CURRENT_DATE}}. + +Interaction History: +{{MESSAGES:END:6}} +Current Question: +{{prompt:end:4000}}""" content = search_query_generation_template( - template, form_data["prompt"], {"name": user.name} + template, form_data["messages"], {"name": user.name} ) + print("content", content) + payload = { "model": model_id, "messages": [{"role": "user", "content": content}], diff --git a/backend/open_webui/migrations/scripts/revision.py b/backend/open_webui/migrations/scripts/revision.py new file mode 100644 index 0000000000..32ebc9e35c --- /dev/null +++ b/backend/open_webui/migrations/scripts/revision.py @@ -0,0 +1,19 @@ +from alembic import command +from alembic.config import Config + +from open_webui.env import OPEN_WEBUI_DIR + +alembic_cfg = Config(OPEN_WEBUI_DIR / "alembic.ini") + +# Set the script location dynamically +migrations_path = OPEN_WEBUI_DIR / "migrations" +alembic_cfg.set_main_option("script_location", str(migrations_path)) + + +def revision(message: str) -> None: + command.revision(alembic_cfg, message=message, autogenerate=False) + + +if __name__ == "__main__": + input_message = input("Enter the revision message: ") + revision(input_message) diff --git a/backend/open_webui/migrations/versions/7e5b5dc7342b_init.py b/backend/open_webui/migrations/versions/7e5b5dc7342b_init.py index 62d0f8588b..607a7b2c91 100644 --- a/backend/open_webui/migrations/versions/7e5b5dc7342b_init.py +++ b/backend/open_webui/migrations/versions/7e5b5dc7342b_init.py @@ -11,8 +11,8 @@ from typing import Sequence, Union import sqlalchemy as sa from alembic import op - import open_webui.apps.webui.internal.db +from open_webui.apps.webui.internal.db import JSONField from open_webui.migrations.util import get_existing_tables # revision identifiers, used by Alembic. @@ -82,9 +82,7 @@ def upgrade() -> None: sa.Column("id", sa.String(), nullable=False), sa.Column("user_id", sa.String(), nullable=True), sa.Column("filename", sa.Text(), nullable=True), - sa.Column( - "meta", open_webui.apps.webui.internal.db.JSONField(), nullable=True - ), + sa.Column("meta", JSONField(), nullable=True), sa.Column("created_at", sa.BigInteger(), nullable=True), sa.PrimaryKeyConstraint("id"), ) @@ -97,12 +95,8 @@ def upgrade() -> None: sa.Column("name", sa.Text(), nullable=True), sa.Column("type", sa.Text(), nullable=True), sa.Column("content", sa.Text(), nullable=True), - sa.Column( - "meta", open_webui.apps.webui.internal.db.JSONField(), nullable=True - ), - sa.Column( - "valves", open_webui.apps.webui.internal.db.JSONField(), nullable=True - ), + sa.Column("meta", JSONField(), nullable=True), + sa.Column("valves", JSONField(), nullable=True), sa.Column("is_active", sa.Boolean(), nullable=True), sa.Column("is_global", sa.Boolean(), nullable=True), sa.Column("updated_at", sa.BigInteger(), nullable=True), @@ -128,12 +122,8 @@ def upgrade() -> None: sa.Column("user_id", sa.Text(), nullable=True), sa.Column("base_model_id", sa.Text(), nullable=True), sa.Column("name", sa.Text(), nullable=True), - sa.Column( - "params", open_webui.apps.webui.internal.db.JSONField(), nullable=True - ), - sa.Column( - "meta", open_webui.apps.webui.internal.db.JSONField(), nullable=True - ), + sa.Column("params", JSONField(), nullable=True), + sa.Column("meta", JSONField(), nullable=True), sa.Column("updated_at", sa.BigInteger(), nullable=True), sa.Column("created_at", sa.BigInteger(), nullable=True), sa.PrimaryKeyConstraint("id"), @@ -167,15 +157,9 @@ def upgrade() -> None: sa.Column("user_id", sa.String(), nullable=True), sa.Column("name", sa.Text(), nullable=True), sa.Column("content", sa.Text(), nullable=True), - sa.Column( - "specs", open_webui.apps.webui.internal.db.JSONField(), nullable=True - ), - sa.Column( - "meta", open_webui.apps.webui.internal.db.JSONField(), nullable=True - ), - sa.Column( - "valves", open_webui.apps.webui.internal.db.JSONField(), nullable=True - ), + sa.Column("specs", JSONField(), nullable=True), + sa.Column("meta", JSONField(), nullable=True), + sa.Column("valves", JSONField(), nullable=True), sa.Column("updated_at", sa.BigInteger(), nullable=True), sa.Column("created_at", sa.BigInteger(), nullable=True), sa.PrimaryKeyConstraint("id"), @@ -193,12 +177,8 @@ def upgrade() -> None: sa.Column("updated_at", sa.BigInteger(), nullable=True), sa.Column("created_at", sa.BigInteger(), nullable=True), sa.Column("api_key", sa.String(), nullable=True), - sa.Column( - "settings", open_webui.apps.webui.internal.db.JSONField(), nullable=True - ), - sa.Column( - "info", open_webui.apps.webui.internal.db.JSONField(), nullable=True - ), + sa.Column("settings", JSONField(), nullable=True), + sa.Column("info", JSONField(), nullable=True), sa.Column("oauth_sub", sa.Text(), nullable=True), sa.PrimaryKeyConstraint("id"), sa.UniqueConstraint("api_key"), diff --git a/backend/open_webui/utils/misc.py b/backend/open_webui/utils/misc.py index 8b72983f17..d1b3400444 100644 --- a/backend/open_webui/utils/misc.py +++ b/backend/open_webui/utils/misc.py @@ -6,7 +6,14 @@ from datetime import timedelta from pathlib import Path from typing import Callable, Optional -from open_webui.utils.task import prompt_template + +def get_messages_content(messages: list[dict]) -> str: + return "\n".join( + [ + f"{message['role'].upper()}: {get_content_from_message(message)}" + for message in messages + ] + ) def get_last_user_message_item(messages: list[dict]) -> Optional[dict]: @@ -30,7 +37,6 @@ def get_last_user_message(messages: list[dict]) -> Optional[str]: message = get_last_user_message_item(messages) if message is None: return None - return get_content_from_message(message) @@ -114,88 +120,6 @@ def openai_chat_completion_message_template(model: str, message: str) -> dict: return template -# inplace function: form_data is modified -def apply_model_system_prompt_to_body(params: dict, form_data: dict, user) -> dict: - system = params.get("system", None) - if not system: - return form_data - - if user: - template_params = { - "user_name": user.name, - "user_location": user.info.get("location") if user.info else None, - } - else: - template_params = {} - system = prompt_template(system, **template_params) - form_data["messages"] = add_or_update_system_message( - system, form_data.get("messages", []) - ) - return form_data - - -# inplace function: form_data is modified -def apply_model_params_to_body( - params: dict, form_data: dict, mappings: dict[str, Callable] -) -> dict: - if not params: - return form_data - - for key, cast_func in mappings.items(): - if (value := params.get(key)) is not None: - form_data[key] = cast_func(value) - - return form_data - - -# inplace function: form_data is modified -def apply_model_params_to_body_openai(params: dict, form_data: dict) -> dict: - mappings = { - "temperature": float, - "top_p": int, - "max_tokens": int, - "frequency_penalty": int, - "seed": lambda x: x, - "stop": lambda x: [bytes(s, "utf-8").decode("unicode_escape") for s in x], - } - return apply_model_params_to_body(params, form_data, mappings) - - -def apply_model_params_to_body_ollama(params: dict, form_data: dict) -> dict: - opts = [ - "temperature", - "top_p", - "seed", - "mirostat", - "mirostat_eta", - "mirostat_tau", - "num_ctx", - "num_batch", - "num_keep", - "repeat_last_n", - "tfs_z", - "top_k", - "min_p", - "use_mmap", - "use_mlock", - "num_thread", - "num_gpu", - ] - mappings = {i: lambda x: x for i in opts} - form_data = apply_model_params_to_body(params, form_data, mappings) - - name_differences = { - "max_tokens": "num_predict", - "frequency_penalty": "repeat_penalty", - } - - for key, value in name_differences.items(): - if (param := params.get(key, None)) is not None: - form_data[value] = param - - return form_data - - def get_gravatar_url(email): # Trim leading and trailing whitespace from # an email address and force all characters diff --git a/backend/open_webui/utils/payload.py b/backend/open_webui/utils/payload.py new file mode 100644 index 0000000000..227cca45f2 --- /dev/null +++ b/backend/open_webui/utils/payload.py @@ -0,0 +1,88 @@ +from open_webui.utils.task import prompt_template +from open_webui.utils.misc import ( + add_or_update_system_message, +) + +from typing import Callable, Optional + + +# inplace function: form_data is modified +def apply_model_system_prompt_to_body(params: dict, form_data: dict, user) -> dict: + system = params.get("system", None) + if not system: + return form_data + + if user: + template_params = { + "user_name": user.name, + "user_location": user.info.get("location") if user.info else None, + } + else: + template_params = {} + system = prompt_template(system, **template_params) + form_data["messages"] = add_or_update_system_message( + system, form_data.get("messages", []) + ) + return form_data + + +# inplace function: form_data is modified +def apply_model_params_to_body( + params: dict, form_data: dict, mappings: dict[str, Callable] +) -> dict: + if not params: + return form_data + + for key, cast_func in mappings.items(): + if (value := params.get(key)) is not None: + form_data[key] = cast_func(value) + + return form_data + + +# inplace function: form_data is modified +def apply_model_params_to_body_openai(params: dict, form_data: dict) -> dict: + mappings = { + "temperature": float, + "top_p": int, + "max_tokens": int, + "frequency_penalty": int, + "seed": lambda x: x, + "stop": lambda x: [bytes(s, "utf-8").decode("unicode_escape") for s in x], + } + return apply_model_params_to_body(params, form_data, mappings) + + +def apply_model_params_to_body_ollama(params: dict, form_data: dict) -> dict: + opts = [ + "temperature", + "top_p", + "seed", + "mirostat", + "mirostat_eta", + "mirostat_tau", + "num_ctx", + "num_batch", + "num_keep", + "repeat_last_n", + "tfs_z", + "top_k", + "min_p", + "use_mmap", + "use_mlock", + "num_thread", + "num_gpu", + ] + mappings = {i: lambda x: x for i in opts} + form_data = apply_model_params_to_body(params, form_data, mappings) + + name_differences = { + "max_tokens": "num_predict", + "frequency_penalty": "repeat_penalty", + } + + for key, value in name_differences.items(): + if (param := params.get(key, None)) is not None: + form_data[value] = param + + return form_data diff --git a/backend/open_webui/utils/task.py b/backend/open_webui/utils/task.py index cf3d8a10c2..e7cab76cf7 100644 --- a/backend/open_webui/utils/task.py +++ b/backend/open_webui/utils/task.py @@ -4,6 +4,9 @@ from datetime import datetime from typing import Optional +from open_webui.utils.misc import get_last_user_message, get_messages_content + + def prompt_template( template: str, user_name: Optional[str] = None, user_location: Optional[str] = None ) -> str: @@ -37,9 +40,7 @@ def prompt_template( return template -def title_generation_template( - template: str, prompt: str, user: Optional[dict] = None -) -> str: +def replace_prompt_variable(template: str, prompt: str) -> str: def replacement_function(match): full_match = match.group(0) start_length = match.group(1) @@ -66,7 +67,13 @@ def title_generation_template( replacement_function, template, ) + return template + +def title_generation_template( + template: str, prompt: str, user: Optional[dict] = None +) -> str: + template = replace_prompt_variable(template, prompt) template = prompt_template( template, **( @@ -79,36 +86,50 @@ def title_generation_template( return template -def search_query_generation_template( - template: str, prompt: str, user: Optional[dict] = None -) -> str: +def replace_messages_variable(template: str, messages: list[str]) -> str: def replacement_function(match): full_match = match.group(0) start_length = match.group(1) end_length = match.group(2) middle_length = match.group(3) - if full_match == "{{prompt}}": - return prompt + # Process messages based on the number of messages required + if full_match == "{{MESSAGES}}": + return get_messages_content(messages) elif start_length is not None: - return prompt[: int(start_length)] + return get_messages_content(messages[: int(start_length)]) elif end_length is not None: - return prompt[-int(end_length) :] + return get_messages_content(messages[-int(end_length) :]) elif middle_length is not None: - middle_length = int(middle_length) - if len(prompt) <= middle_length: - return prompt - start = prompt[: math.ceil(middle_length / 2)] - end = prompt[-math.floor(middle_length / 2) :] - return f"{start}...{end}" + mid = int(middle_length) + + if len(messages) <= mid: + return get_messages_content(messages) + # Handle middle truncation: split to get start and end portions of the messages list + half = mid // 2 + start_msgs = messages[:half] + end_msgs = messages[-half:] if mid % 2 == 0 else messages[-(half + 1) :] + formatted_start = get_messages_content(start_msgs) + formatted_end = get_messages_content(end_msgs) + return f"{formatted_start}\n{formatted_end}" return "" template = re.sub( - r"{{prompt}}|{{prompt:start:(\d+)}}|{{prompt:end:(\d+)}}|{{prompt:middletruncate:(\d+)}}", + r"{{MESSAGES}}|{{MESSAGES:START:(\d+)}}|{{MESSAGES:END:(\d+)}}|{{MESSAGES:MIDDLETRUNCATE:(\d+)}}", replacement_function, template, ) + return template + + +def search_query_generation_template( + template: str, messages: list[dict], user: Optional[dict] = None +) -> str: + prompt = get_last_user_message(messages) + template = replace_prompt_variable(template, prompt) + template = replace_messages_variable(template, messages) + template = prompt_template( template, **( diff --git a/package-lock.json b/package-lock.json index dc39b8a628..68ddca2afc 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "open-webui", - "version": "0.3.19", + "version": "0.3.20", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "open-webui", - "version": "0.3.19", + "version": "0.3.20", "dependencies": { "@codemirror/lang-javascript": "^6.2.2", "@codemirror/lang-python": "^6.1.6", diff --git a/package.json b/package.json index bfa1334f48..05a5cf9d1c 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "open-webui", - "version": "0.3.19", + "version": "0.3.20", "private": true, "scripts": { "dev": "npm run pyodide:fetch && vite dev --host", diff --git a/src/lib/components/admin/Settings/Interface.svelte b/src/lib/components/admin/Settings/Interface.svelte index a5147a3757..295f3d2089 100644 --- a/src/lib/components/admin/Settings/Interface.svelte +++ b/src/lib/components/admin/Settings/Interface.svelte @@ -23,8 +23,8 @@ TASK_MODEL: '', TASK_MODEL_EXTERNAL: '', TITLE_GENERATION_PROMPT_TEMPLATE: '', - SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE: '', - SEARCH_QUERY_PROMPT_LENGTH_THRESHOLD: 0 + ENABLE_SEARCH_QUERY: true, + SEARCH_QUERY_GENERATION_PROMPT_TEMPLATE: '' }; let promptSuggestions = []; @@ -43,7 +43,6 @@ taskConfig = await getTaskConfig(localStorage.token); promptSuggestions = $config?.default_prompt_suggestions; - banners = await getBanners(localStorage.token); }); @@ -119,33 +118,50 @@