From 17f0bef2e204072c07daacc5c63f7c2d6e357312 Mon Sep 17 00:00:00 2001 From: Sihyeon Jang Date: Tue, 15 Jul 2025 10:11:22 +0900 Subject: [PATCH 1/2] refactor: use cleanup_response on openai Signed-off-by: Sihyeon Jang --- backend/open_webui/routers/openai.py | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/backend/open_webui/routers/openai.py b/backend/open_webui/routers/openai.py index a759ec7eee..68ae460ebb 100644 --- a/backend/open_webui/routers/openai.py +++ b/backend/open_webui/routers/openai.py @@ -893,10 +893,8 @@ async def generate_chat_completion( detail=detail if detail else "Open WebUI: Server Connection Error", ) finally: - if not streaming and session: - if r: - r.close() - await session.close() + if not streaming: + await cleanup_response(r, session) async def embeddings(request: Request, form_data: dict, user): @@ -975,10 +973,8 @@ async def embeddings(request: Request, form_data: dict, user): detail=detail if detail else "Open WebUI: Server Connection Error", ) finally: - if not streaming and session: - if r: - r.close() - await session.close() + if not streaming: + await cleanup_response(r, session) @router.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"]) @@ -1074,7 +1070,5 @@ async def proxy(path: str, request: Request, user=Depends(get_verified_user)): detail=detail if detail else "Open WebUI: Server Connection Error", ) finally: - if not streaming and session: - if r: - r.close() - await session.close() + if not streaming: + await cleanup_response(r, session) From 058369adeaf6873801d89c6a30ca16da2a320e06 Mon Sep 17 00:00:00 2001 From: Sihyeon Jang Date: Tue, 15 Jul 2025 10:14:30 +0900 Subject: [PATCH 2/2] fix: improve cleanup_response positioning for better resource management Signed-off-by: Sihyeon Jang --- backend/open_webui/routers/ollama.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/backend/open_webui/routers/ollama.py b/backend/open_webui/routers/ollama.py index 000f4b48d2..7fa7aef249 100644 --- a/backend/open_webui/routers/ollama.py +++ b/backend/open_webui/routers/ollama.py @@ -184,7 +184,6 @@ async def send_post_request( ) else: res = await r.json() - await cleanup_response(r, session) return res except HTTPException as e: @@ -196,6 +195,9 @@ async def send_post_request( status_code=r.status if r else 500, detail=detail if e else "Open WebUI: Server Connection Error", ) + finally: + if not stream: + await cleanup_response(r, session) def get_api_key(idx, url, configs):