From b7a91b1963c6ad4fb9068ca0ed4d81be467ea026 Mon Sep 17 00:00:00 2001 From: Timothy Jaeryang Baek Date: Tue, 10 Jun 2025 13:10:31 +0400 Subject: [PATCH] refac: ollama response --- backend/open_webui/utils/middleware.py | 8 +++++--- backend/open_webui/utils/misc.py | 8 +++++++- backend/open_webui/utils/response.py | 3 ++- 3 files changed, 14 insertions(+), 5 deletions(-) diff --git a/backend/open_webui/utils/middleware.py b/backend/open_webui/utils/middleware.py index 52061f5b95..0106779a87 100644 --- a/backend/open_webui/utils/middleware.py +++ b/backend/open_webui/utils/middleware.py @@ -1866,9 +1866,11 @@ async def process_chat_response( value = delta.get("content") - reasoning_content = delta.get( - "reasoning_content" - ) or delta.get("reasoning") + reasoning_content = ( + delta.get("reasoning_content") + or delta.get("reasoning") + or delta.get("thinking") + ) if reasoning_content: if ( not content_blocks diff --git a/backend/open_webui/utils/misc.py b/backend/open_webui/utils/misc.py index ffc8c93ca4..710195eac0 100644 --- a/backend/open_webui/utils/misc.py +++ b/backend/open_webui/utils/misc.py @@ -208,6 +208,7 @@ def openai_chat_message_template(model: str): def openai_chat_chunk_message_template( model: str, content: Optional[str] = None, + reasoning_content: Optional[str] = None, tool_calls: Optional[list[dict]] = None, usage: Optional[dict] = None, ) -> dict: @@ -220,6 +221,9 @@ def openai_chat_chunk_message_template( if content: template["choices"][0]["delta"]["content"] = content + if reasoning_content: + template["choices"][0]["delta"]["reasonsing_content"] = reasoning_content + if tool_calls: template["choices"][0]["delta"]["tool_calls"] = tool_calls @@ -234,6 +238,7 @@ def openai_chat_chunk_message_template( def openai_chat_completion_message_template( model: str, message: Optional[str] = None, + reasoning_content: Optional[str] = None, tool_calls: Optional[list[dict]] = None, usage: Optional[dict] = None, ) -> dict: @@ -241,8 +246,9 @@ def openai_chat_completion_message_template( template["object"] = "chat.completion" if message is not None: template["choices"][0]["message"] = { - "content": message, "role": "assistant", + "content": message, + **({"reasoning_content": reasoning_content} if reasoning_content else {}), **({"tool_calls": tool_calls} if tool_calls else {}), } diff --git a/backend/open_webui/utils/response.py b/backend/open_webui/utils/response.py index f71087e4ff..662151d3a3 100644 --- a/backend/open_webui/utils/response.py +++ b/backend/open_webui/utils/response.py @@ -105,6 +105,7 @@ async def convert_streaming_response_ollama_to_openai(ollama_streaming_response) model = data.get("model", "ollama") message_content = data.get("message", {}).get("content", None) + reasoning_content = data.get("message", {}).get("thinking", None) tool_calls = data.get("message", {}).get("tool_calls", None) openai_tool_calls = None @@ -118,7 +119,7 @@ async def convert_streaming_response_ollama_to_openai(ollama_streaming_response) usage = convert_ollama_usage_to_openai(data) data = openai_chat_chunk_message_template( - model, message_content, openai_tool_calls, usage + model, message_content, reasoning_content, openai_tool_calls, usage ) line = f"data: {json.dumps(data)}\n\n"