From 9e37d6edcc0dff72889763b023a5c4b691e03d64 Mon Sep 17 00:00:00 2001 From: "Timothy J. Baek" Date: Fri, 22 Mar 2024 00:55:59 -0700 Subject: [PATCH] refac: gguf upload ui --- backend/apps/ollama/main.py | 37 +++++++++++++++++++ .../components/chat/Settings/Models.svelte | 27 +++++++++++++- 2 files changed, 62 insertions(+), 2 deletions(-) diff --git a/backend/apps/ollama/main.py b/backend/apps/ollama/main.py index 80be539a89..57b4bbed01 100644 --- a/backend/apps/ollama/main.py +++ b/backend/apps/ollama/main.py @@ -16,6 +16,7 @@ from fastapi.concurrency import run_in_threadpool from pydantic import BaseModel, ConfigDict import os +import copy import random import requests import json @@ -1082,6 +1083,42 @@ def upload_model(file: UploadFile = File(...), url_idx: Optional[int] = None): return StreamingResponse(file_process_stream(), media_type="text/event-stream") +# async def upload_model(file: UploadFile = File(), url_idx: Optional[int] = None): +# if url_idx == None: +# url_idx = 0 +# url = app.state.OLLAMA_BASE_URLS[url_idx] + +# file_location = os.path.join(UPLOAD_DIR, file.filename) +# total_size = file.size + +# async def file_upload_generator(file): +# print(file) +# try: +# async with aiofiles.open(file_location, "wb") as f: +# completed_size = 0 +# while True: +# chunk = await file.read(1024*1024) +# if not chunk: +# break +# await f.write(chunk) +# completed_size += len(chunk) +# progress = (completed_size / total_size) * 100 + +# print(progress) +# yield f'data: {json.dumps({"status": "uploading", "percentage": progress, "total": total_size, "completed": completed_size, "done": False})}\n' +# except Exception as e: +# print(e) +# yield f"data: {json.dumps({'status': 'error', 'message': str(e)})}\n" +# finally: +# await file.close() +# print("done") +# yield f'data: {json.dumps({"status": "completed", "percentage": 100, "total": total_size, "completed": completed_size, "done": True})}\n' + +# return StreamingResponse( +# file_upload_generator(copy.deepcopy(file)), media_type="text/event-stream" +# ) + + @app.api_route("/{path:path}", methods=["GET", "POST", "PUT", "DELETE"]) async def deprecated_proxy(path: str, request: Request, user=Depends(get_current_user)): url = app.state.OLLAMA_BASE_URLS[0] diff --git a/src/lib/components/chat/Settings/Models.svelte b/src/lib/components/chat/Settings/Models.svelte index 9b319b951f..2fe3c289df 100644 --- a/src/lib/components/chat/Settings/Models.svelte +++ b/src/lib/components/chat/Settings/Models.svelte @@ -66,7 +66,9 @@ let modelFileUrl = ''; let modelFileContent = `TEMPLATE """{{ .System }}\nUSER: {{ .Prompt }}\nASSISTANT: """\nPARAMETER num_ctx 4096\nPARAMETER stop ""\nPARAMETER stop "USER:"\nPARAMETER stop "ASSISTANT:"`; let modelFileDigest = ''; + let uploadProgress = null; + let uploadMessage = ''; let deleteModelTag = ''; @@ -186,7 +188,6 @@ const uploadModelHandler = async () => { modelTransferring = true; - uploadProgress = 0; let uploaded = false; let fileResponse = null; @@ -196,6 +197,8 @@ const file = modelInputFile ? modelInputFile[0] : null; if (file) { + uploadMessage = 'Uploading...'; + fileResponse = await uploadModel(localStorage.token, file, selectedOllamaUrlIdx).catch( (error) => { toast.error(error); @@ -204,6 +207,7 @@ ); } } else { + uploadProgress = 0; fileResponse = await downloadModel( localStorage.token, modelFileUrl, @@ -232,6 +236,9 @@ let data = JSON.parse(line.replace(/^data: /, '')); if (data.progress) { + if (uploadMessage) { + uploadMessage = ''; + } uploadProgress = data.progress; } @@ -816,7 +823,23 @@ > - {#if uploadProgress !== null} + {#if uploadMessage} +
+
{$i18n.t('Upload Progress')}
+ +
+
+ {uploadMessage} +
+
+
+ {modelFileDigest} +
+
+ {:else if uploadProgress !== null}
{$i18n.t('Upload Progress')}