From 3e8d3b08fa59ac9352f792ab76f19cb5d907e8f1 Mon Sep 17 00:00:00 2001 From: Timothy Jaeryang Baek Date: Thu, 14 Aug 2025 04:04:20 +0400 Subject: [PATCH] refac --- backend/open_webui/utils/middleware.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/backend/open_webui/utils/middleware.py b/backend/open_webui/utils/middleware.py index b459f211b1..15fd08d2dd 100644 --- a/backend/open_webui/utils/middleware.py +++ b/backend/open_webui/utils/middleware.py @@ -989,25 +989,24 @@ async def process_chat_payload(request, form_data, user, metadata, model): if prompt is None: raise Exception("No user message found") - if context_string == "": - if request.app.state.config.RELEVANCE_THRESHOLD == 0: - log.debug( - f"With a 0 relevancy threshold for RAG, the context cannot be empty" - ) - else: + if context_string != "": # Workaround for Ollama 2.0+ system prompt issue # TODO: replace with add_or_update_system_message if model.get("owned_by") == "ollama": form_data["messages"] = prepend_to_first_user_message_content( rag_template( - request.app.state.config.RAG_TEMPLATE, context_string, prompt + request.app.state.config.RAG_TEMPLATE, + context_string, + prompt, ), form_data["messages"], ) else: form_data["messages"] = add_or_update_system_message( rag_template( - request.app.state.config.RAG_TEMPLATE, context_string, prompt + request.app.state.config.RAG_TEMPLATE, + context_string, + prompt, ), form_data["messages"], )