diff --git a/backend/open_webui/routers/ollama.py b/backend/open_webui/routers/ollama.py index 000f4b48d2..7fa7aef249 100644 --- a/backend/open_webui/routers/ollama.py +++ b/backend/open_webui/routers/ollama.py @@ -184,7 +184,6 @@ async def send_post_request( ) else: res = await r.json() - await cleanup_response(r, session) return res except HTTPException as e: @@ -196,6 +195,9 @@ async def send_post_request( status_code=r.status if r else 500, detail=detail if e else "Open WebUI: Server Connection Error", ) + finally: + if not stream: + await cleanup_response(r, session) def get_api_key(idx, url, configs): diff --git a/backend/open_webui/routers/openai.py b/backend/open_webui/routers/openai.py index a759ec7eee..68ae460ebb 100644 --- a/backend/open_webui/routers/openai.py +++ b/backend/open_webui/routers/openai.py @@ -893,10 +893,8 @@ async def generate_chat_completion( detail=detail if detail else "Open WebUI: Server Connection Error", ) finally: - if not streaming and session: - if r: - r.close() - await session.close() + if not streaming: + await cleanup_response(r, session) async def embeddings(request: Request, form_data: dict, user): @@ -975,10 +973,8 @@ async def embeddings(request: Request, form_data: dict, user): detail=detail if detail else "Open WebUI: Server Connection Error", ) finally: - if not streaming and session: - if r: - r.close() - await session.close() + if not streaming: + await cleanup_response(r, session) @router.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"]) @@ -1074,7 +1070,5 @@ async def proxy(path: str, request: Request, user=Depends(get_verified_user)): detail=detail if detail else "Open WebUI: Server Connection Error", ) finally: - if not streaming and session: - if r: - r.close() - await session.close() + if not streaming: + await cleanup_response(r, session)