fix: openai response propagation issue

This commit is contained in:
Timothy Jaeryang Baek 2025-08-11 00:37:06 +04:00
parent b581536a66
commit 059cc636f6

View file

@ -19,7 +19,7 @@ from concurrent.futures import ThreadPoolExecutor
from fastapi import Request, HTTPException from fastapi import Request, HTTPException
from starlette.responses import Response, StreamingResponse from starlette.responses import Response, StreamingResponse, JSONResponse
from open_webui.models.chats import Chats from open_webui.models.chats import Chats
@ -1254,91 +1254,104 @@ async def process_chat_response(
# Non-streaming response # Non-streaming response
if not isinstance(response, StreamingResponse): if not isinstance(response, StreamingResponse):
if event_emitter: if event_emitter:
if "error" in response: if isinstance(response, dict) or isinstance(response, JSONResponse):
error = response["error"].get("detail", response["error"]) response_data = (
Chats.upsert_message_to_chat_by_id_and_message_id( response if isinstance(response, dict) else response.content
metadata["chat_id"],
metadata["message_id"],
{
"error": {"content": error},
},
) )
if "selected_model_id" in response: if "error" in response_data:
Chats.upsert_message_to_chat_by_id_and_message_id( error = response_data["error"].get("detail", response_data["error"])
metadata["chat_id"],
metadata["message_id"],
{
"selectedModelId": response["selected_model_id"],
},
)
choices = response.get("choices", [])
if choices and choices[0].get("message", {}).get("content"):
content = response["choices"][0]["message"]["content"]
if content:
await event_emitter(
{
"type": "chat:completion",
"data": response,
}
)
title = Chats.get_chat_title_by_id(metadata["chat_id"])
await event_emitter(
{
"type": "chat:completion",
"data": {
"done": True,
"content": content,
"title": title,
},
}
)
# Save message in the database
Chats.upsert_message_to_chat_by_id_and_message_id( Chats.upsert_message_to_chat_by_id_and_message_id(
metadata["chat_id"], metadata["chat_id"],
metadata["message_id"], metadata["message_id"],
{ {
"role": "assistant", "error": {"content": error},
"content": content,
}, },
) )
# Send a webhook notification if the user is not active if "selected_model_id" in response_data:
if not get_active_status_by_user_id(user.id): Chats.upsert_message_to_chat_by_id_and_message_id(
webhook_url = Users.get_user_webhook_url_by_id(user.id) metadata["chat_id"],
if webhook_url: metadata["message_id"],
post_webhook( {
request.app.state.WEBUI_NAME, "selectedModelId": response_data["selected_model_id"],
webhook_url, },
f"{title} - {request.app.state.config.WEBUI_URL}/c/{metadata['chat_id']}\n\n{content}", )
{
"action": "chat", choices = response_data.get("choices", [])
"message": content, if choices and choices[0].get("message", {}).get("content"):
content = response_data["choices"][0]["message"]["content"]
if content:
await event_emitter(
{
"type": "chat:completion",
"data": response_data,
}
)
title = Chats.get_chat_title_by_id(metadata["chat_id"])
await event_emitter(
{
"type": "chat:completion",
"data": {
"done": True,
"content": content,
"title": title, "title": title,
"url": f"{request.app.state.config.WEBUI_URL}/c/{metadata['chat_id']}",
}, },
) }
)
await background_tasks_handler() # Save message in the database
Chats.upsert_message_to_chat_by_id_and_message_id(
metadata["chat_id"],
metadata["message_id"],
{
"role": "assistant",
"content": content,
},
)
if events and isinstance(events, list) and isinstance(response, dict): # Send a webhook notification if the user is not active
extra_response = {} if not get_active_status_by_user_id(user.id):
for event in events: webhook_url = Users.get_user_webhook_url_by_id(user.id)
if isinstance(event, dict): if webhook_url:
extra_response.update(event) post_webhook(
else: request.app.state.WEBUI_NAME,
extra_response[event] = True webhook_url,
f"{title} - {request.app.state.config.WEBUI_URL}/c/{metadata['chat_id']}\n\n{content}",
{
"action": "chat",
"message": content,
"title": title,
"url": f"{request.app.state.config.WEBUI_URL}/c/{metadata['chat_id']}",
},
)
response = { await background_tasks_handler()
**extra_response,
**response, if events and isinstance(events, list):
} extra_response = {}
for event in events:
if isinstance(event, dict):
extra_response.update(event)
else:
extra_response[event] = True
response_data = {
**extra_response,
**response_data,
}
if isinstance(response, JSONResponse):
response = JSONResponse(
content=response_data,
headers=response.headers,
status_code=response.status_code,
)
else:
response = response_data
return response return response
else: else: