fix: openai response propagation issue

This commit is contained in:
Timothy Jaeryang Baek 2025-08-11 00:37:06 +04:00
parent b581536a66
commit 059cc636f6

View file

@ -19,7 +19,7 @@ from concurrent.futures import ThreadPoolExecutor
from fastapi import Request, HTTPException from fastapi import Request, HTTPException
from starlette.responses import Response, StreamingResponse from starlette.responses import Response, StreamingResponse, JSONResponse
from open_webui.models.chats import Chats from open_webui.models.chats import Chats
@ -1254,8 +1254,13 @@ async def process_chat_response(
# Non-streaming response # Non-streaming response
if not isinstance(response, StreamingResponse): if not isinstance(response, StreamingResponse):
if event_emitter: if event_emitter:
if "error" in response: if isinstance(response, dict) or isinstance(response, JSONResponse):
error = response["error"].get("detail", response["error"]) response_data = (
response if isinstance(response, dict) else response.content
)
if "error" in response_data:
error = response_data["error"].get("detail", response_data["error"])
Chats.upsert_message_to_chat_by_id_and_message_id( Chats.upsert_message_to_chat_by_id_and_message_id(
metadata["chat_id"], metadata["chat_id"],
metadata["message_id"], metadata["message_id"],
@ -1264,25 +1269,24 @@ async def process_chat_response(
}, },
) )
if "selected_model_id" in response: if "selected_model_id" in response_data:
Chats.upsert_message_to_chat_by_id_and_message_id( Chats.upsert_message_to_chat_by_id_and_message_id(
metadata["chat_id"], metadata["chat_id"],
metadata["message_id"], metadata["message_id"],
{ {
"selectedModelId": response["selected_model_id"], "selectedModelId": response_data["selected_model_id"],
}, },
) )
choices = response.get("choices", []) choices = response_data.get("choices", [])
if choices and choices[0].get("message", {}).get("content"): if choices and choices[0].get("message", {}).get("content"):
content = response["choices"][0]["message"]["content"] content = response_data["choices"][0]["message"]["content"]
if content: if content:
await event_emitter( await event_emitter(
{ {
"type": "chat:completion", "type": "chat:completion",
"data": response, "data": response_data,
} }
) )
@ -1327,7 +1331,7 @@ async def process_chat_response(
await background_tasks_handler() await background_tasks_handler()
if events and isinstance(events, list) and isinstance(response, dict): if events and isinstance(events, list):
extra_response = {} extra_response = {}
for event in events: for event in events:
if isinstance(event, dict): if isinstance(event, dict):
@ -1335,11 +1339,20 @@ async def process_chat_response(
else: else:
extra_response[event] = True extra_response[event] = True
response = { response_data = {
**extra_response, **extra_response,
**response, **response_data,
} }
if isinstance(response, JSONResponse):
response = JSONResponse(
content=response_data,
headers=response.headers,
status_code=response.status_code,
)
else:
response = response_data
return response return response
else: else:
if events and isinstance(events, list) and isinstance(response, dict): if events and isinstance(events, list) and isinstance(response, dict):