From 2d71193bc02548c99c1b5c78adeb407ae5dd025c Mon Sep 17 00:00:00 2001 From: Timothy Jaeryang Baek Date: Tue, 7 Oct 2025 10:59:53 -0500 Subject: [PATCH] refac --- backend/open_webui/utils/middleware.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/backend/open_webui/utils/middleware.py b/backend/open_webui/utils/middleware.py index 5cdf62b2d9..bbfdd6a368 100644 --- a/backend/open_webui/utils/middleware.py +++ b/backend/open_webui/utils/middleware.py @@ -1176,11 +1176,12 @@ async def process_chat_payload(request, form_data, user, metadata, model): files = form_data.pop("files", None) prompt = get_last_user_message(form_data["messages"]) - urls = [] - if prompt and len(prompt or "") < 500 and (not files or len(files) == 0): - urls = extract_urls(prompt) + # TODO: re-enable URL extraction from prompt + # urls = [] + # if prompt and len(prompt or "") < 500 and (not files or len(files) == 0): + # urls = extract_urls(prompt) - if files or urls: + if files: if not files: files = [] @@ -1194,7 +1195,7 @@ async def process_chat_payload(request, form_data, user, metadata, model): files = [f for f in files if f.get("id", None) != folder_id] files = [*files, *folder.data["files"]] - files = [*files, *[{"type": "url", "url": url, "name": url} for url in urls]] + # files = [*files, *[{"type": "url", "url": url, "name": url} for url in urls]] # Remove duplicate files based on their content files = list({json.dumps(f, sort_keys=True): f for f in files}.values())