diff --git a/.github/workflows/format-backend.yaml b/.github/workflows/format-backend.yaml index 9d767fa5b4..f2ba6e8159 100644 --- a/.github/workflows/format-backend.yaml +++ b/.github/workflows/format-backend.yaml @@ -25,3 +25,5 @@ jobs: pip install yapf - name: Format backend run: bun run format:backend + - name: Check for changes after format + run: git diff --exit-code diff --git a/.github/workflows/format-build-frontend.yaml b/.github/workflows/format-build-frontend.yaml index 65916cf44b..d14aa6aec9 100644 --- a/.github/workflows/format-build-frontend.yaml +++ b/.github/workflows/format-build-frontend.yaml @@ -18,5 +18,8 @@ jobs: run: bun install - name: Format frontend run: bun run format + - name: Check for changes after format + run: git diff --exit-code - name: Build frontend + if: always() run: bun run build diff --git a/CHANGELOG.md b/CHANGELOG.md index da3e289dca..e48f8dc7a7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,24 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## [0.1.116] - 2024-03-31 + +### Added + +- **๐Ÿ”„ Enhanced UI**: Model selector now conveniently located in the navbar, enabling seamless switching between multiple models during conversations. +- **๐Ÿ” Improved Model Selector**: Directly pull a model from the selector/Models now display detailed information for better understanding. +- **๐Ÿ’ฌ Webhook Support**: Now compatible with Google Chat and Microsoft Teams. +- **๐ŸŒ Localization**: Korean translation (I18n) now available. +- **๐ŸŒ‘ Dark Theme**: OLED dark theme introduced for reduced strain during prolonged usage. +- **๐Ÿท๏ธ Tag Autocomplete**: Dropdown feature added for effortless chat tagging. + +### Fixed + +- **๐Ÿ”ฝ Auto-Scrolling**: Addressed OpenAI auto-scrolling issue. +- **๐Ÿท๏ธ Tag Validation**: Implemented tag validation to prevent empty string tags. +- **๐Ÿšซ Model Whitelisting**: Resolved LiteLLM model whitelisting issue. +- **โœ… Spelling**: Corrected various spelling issues for improved readability. + ## [0.1.115] - 2024-03-24 ### Added diff --git a/backend/apps/audio/main.py b/backend/apps/audio/main.py index 10cc567169..bb3cd05368 100644 --- a/backend/apps/audio/main.py +++ b/backend/apps/audio/main.py @@ -22,7 +22,13 @@ from utils.utils import ( ) from utils.misc import calculate_sha256 -from config import SRC_LOG_LEVELS, CACHE_DIR, UPLOAD_DIR, WHISPER_MODEL, WHISPER_MODEL_DIR +from config import ( + SRC_LOG_LEVELS, + CACHE_DIR, + UPLOAD_DIR, + WHISPER_MODEL, + WHISPER_MODEL_DIR, +) log = logging.getLogger(__name__) log.setLevel(SRC_LOG_LEVELS["AUDIO"]) diff --git a/backend/apps/ollama/main.py b/backend/apps/ollama/main.py index f8701c43e3..405f34dd27 100644 --- a/backend/apps/ollama/main.py +++ b/backend/apps/ollama/main.py @@ -33,7 +33,13 @@ from constants import ERROR_MESSAGES from utils.utils import decode_token, get_current_user, get_admin_user -from config import SRC_LOG_LEVELS, OLLAMA_BASE_URLS, MODEL_FILTER_ENABLED, MODEL_FILTER_LIST, UPLOAD_DIR +from config import ( + SRC_LOG_LEVELS, + OLLAMA_BASE_URLS, + MODEL_FILTER_ENABLED, + MODEL_FILTER_LIST, + UPLOAD_DIR, +) from utils.misc import calculate_sha256 log = logging.getLogger(__name__) @@ -770,7 +776,11 @@ async def generate_chat_completion( r = None - log.debug("form_data.model_dump_json(exclude_none=True).encode(): {0} ".format(form_data.model_dump_json(exclude_none=True).encode())) + log.debug( + "form_data.model_dump_json(exclude_none=True).encode(): {0} ".format( + form_data.model_dump_json(exclude_none=True).encode() + ) + ) def get_request(): nonlocal form_data diff --git a/backend/apps/rag/main.py b/backend/apps/rag/main.py index 48ca616668..da7bb307db 100644 --- a/backend/apps/rag/main.py +++ b/backend/apps/rag/main.py @@ -21,6 +21,7 @@ from langchain_community.document_loaders import ( TextLoader, PyPDFLoader, CSVLoader, + BSHTMLLoader, Docx2txtLoader, UnstructuredEPubLoader, UnstructuredWordDocumentLoader, @@ -114,6 +115,7 @@ class CollectionNameForm(BaseModel): class StoreWebForm(CollectionNameForm): url: str + @app.get("/") async def get_status(): return { @@ -296,13 +298,18 @@ def store_web(form_data: StoreWebForm, user=Depends(get_current_user)): def store_data_in_vector_db(data, collection_name, overwrite: bool = False) -> bool: + text_splitter = RecursiveCharacterTextSplitter( chunk_size=app.state.CHUNK_SIZE, chunk_overlap=app.state.CHUNK_OVERLAP, add_start_index=True, ) docs = text_splitter.split_documents(data) - return store_docs_in_vector_db(docs, collection_name, overwrite) + + if len(docs) > 0: + return store_docs_in_vector_db(docs, collection_name, overwrite), None + else: + raise ValueError(ERROR_MESSAGES.EMPTY_CONTENT) def store_text_in_vector_db( @@ -318,6 +325,7 @@ def store_text_in_vector_db( def store_docs_in_vector_db(docs, collection_name, overwrite: bool = False) -> bool: + texts = [doc.page_content for doc in docs] metadatas = [doc.metadata for doc in docs] @@ -402,6 +410,8 @@ def get_loader(filename: str, file_content_type: str, file_path: str): loader = UnstructuredRSTLoader(file_path, mode="elements") elif file_ext == "xml": loader = UnstructuredXMLLoader(file_path) + elif file_ext in ["htm", "html"]: + loader = BSHTMLLoader(file_path, open_encoding="unicode_escape") elif file_ext == "md": loader = UnstructuredMarkdownLoader(file_path) elif file_content_type == "application/epub+zip": @@ -452,19 +462,21 @@ def store_doc( loader, known_type = get_loader(file.filename, file.content_type, file_path) data = loader.load() - result = store_data_in_vector_db(data, collection_name) - if result: - return { - "status": True, - "collection_name": collection_name, - "filename": filename, - "known_type": known_type, - } - else: + try: + result = store_data_in_vector_db(data, collection_name) + + if result: + return { + "status": True, + "collection_name": collection_name, + "filename": filename, + "known_type": known_type, + } + except Exception as e: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=ERROR_MESSAGES.DEFAULT(), + detail=e, ) except Exception as e: log.exception(e) @@ -529,38 +541,42 @@ def scan_docs_dir(user=Depends(get_admin_user)): ) data = loader.load() - result = store_data_in_vector_db(data, collection_name) + try: + result = store_data_in_vector_db(data, collection_name) - if result: - sanitized_filename = sanitize_filename(filename) - doc = Documents.get_doc_by_name(sanitized_filename) + if result: + sanitized_filename = sanitize_filename(filename) + doc = Documents.get_doc_by_name(sanitized_filename) - if doc == None: - doc = Documents.insert_new_doc( - user.id, - DocumentForm( - **{ - "name": sanitized_filename, - "title": filename, - "collection_name": collection_name, - "filename": filename, - "content": ( - json.dumps( - { - "tags": list( - map( - lambda name: {"name": name}, - tags, + if doc == None: + doc = Documents.insert_new_doc( + user.id, + DocumentForm( + **{ + "name": sanitized_filename, + "title": filename, + "collection_name": collection_name, + "filename": filename, + "content": ( + json.dumps( + { + "tags": list( + map( + lambda name: {"name": name}, + tags, + ) ) - ) - } - ) - if len(tags) - else "{}" - ), - } - ), - ) + } + ) + if len(tags) + else "{}" + ), + } + ), + ) + except Exception as e: + print(e) + pass except Exception as e: log.exception(e) diff --git a/backend/apps/web/models/auths.py b/backend/apps/web/models/auths.py index b26236ef88..75637700d5 100644 --- a/backend/apps/web/models/auths.py +++ b/backend/apps/web/models/auths.py @@ -11,6 +11,7 @@ from utils.utils import verify_password from apps.web.internal.db import DB from config import SRC_LOG_LEVELS + log = logging.getLogger(__name__) log.setLevel(SRC_LOG_LEVELS["MODELS"]) diff --git a/backend/apps/web/models/documents.py b/backend/apps/web/models/documents.py index f399e7ae4c..91e721a487 100644 --- a/backend/apps/web/models/documents.py +++ b/backend/apps/web/models/documents.py @@ -13,6 +13,7 @@ from apps.web.internal.db import DB import json from config import SRC_LOG_LEVELS + log = logging.getLogger(__name__) log.setLevel(SRC_LOG_LEVELS["MODELS"]) diff --git a/backend/apps/web/models/modelfiles.py b/backend/apps/web/models/modelfiles.py index 51eccfb5e1..50439a8089 100644 --- a/backend/apps/web/models/modelfiles.py +++ b/backend/apps/web/models/modelfiles.py @@ -64,8 +64,8 @@ class ModelfilesTable: self.db.create_tables([Modelfile]) def insert_new_modelfile( - self, user_id: str, - form_data: ModelfileForm) -> Optional[ModelfileModel]: + self, user_id: str, form_data: ModelfileForm + ) -> Optional[ModelfileModel]: if "tagName" in form_data.modelfile: modelfile = ModelfileModel( **{ @@ -73,7 +73,8 @@ class ModelfilesTable: "tag_name": form_data.modelfile["tagName"], "modelfile": json.dumps(form_data.modelfile), "timestamp": int(time.time()), - }) + } + ) try: result = Modelfile.create(**modelfile.model_dump()) @@ -87,29 +88,28 @@ class ModelfilesTable: else: return None - def get_modelfile_by_tag_name(self, - tag_name: str) -> Optional[ModelfileModel]: + def get_modelfile_by_tag_name(self, tag_name: str) -> Optional[ModelfileModel]: try: modelfile = Modelfile.get(Modelfile.tag_name == tag_name) return ModelfileModel(**model_to_dict(modelfile)) except: return None - def get_modelfiles(self, - skip: int = 0, - limit: int = 50) -> List[ModelfileResponse]: + def get_modelfiles(self, skip: int = 0, limit: int = 50) -> List[ModelfileResponse]: return [ ModelfileResponse( **{ **model_to_dict(modelfile), - "modelfile": - json.loads(modelfile.modelfile), - }) for modelfile in Modelfile.select() + "modelfile": json.loads(modelfile.modelfile), + } + ) + for modelfile in Modelfile.select() # .limit(limit).offset(skip) ] def update_modelfile_by_tag_name( - self, tag_name: str, modelfile: dict) -> Optional[ModelfileModel]: + self, tag_name: str, modelfile: dict + ) -> Optional[ModelfileModel]: try: query = Modelfile.update( modelfile=json.dumps(modelfile), diff --git a/backend/apps/web/models/prompts.py b/backend/apps/web/models/prompts.py index 044a3697ce..e6b663c045 100644 --- a/backend/apps/web/models/prompts.py +++ b/backend/apps/web/models/prompts.py @@ -52,8 +52,9 @@ class PromptsTable: self.db = db self.db.create_tables([Prompt]) - def insert_new_prompt(self, user_id: str, - form_data: PromptForm) -> Optional[PromptModel]: + def insert_new_prompt( + self, user_id: str, form_data: PromptForm + ) -> Optional[PromptModel]: prompt = PromptModel( **{ "user_id": user_id, @@ -61,7 +62,8 @@ class PromptsTable: "title": form_data.title, "content": form_data.content, "timestamp": int(time.time()), - }) + } + ) try: result = Prompt.create(**prompt.model_dump()) @@ -81,13 +83,14 @@ class PromptsTable: def get_prompts(self) -> List[PromptModel]: return [ - PromptModel(**model_to_dict(prompt)) for prompt in Prompt.select() + PromptModel(**model_to_dict(prompt)) + for prompt in Prompt.select() # .limit(limit).offset(skip) ] def update_prompt_by_command( - self, command: str, - form_data: PromptForm) -> Optional[PromptModel]: + self, command: str, form_data: PromptForm + ) -> Optional[PromptModel]: try: query = Prompt.update( title=form_data.title, diff --git a/backend/apps/web/models/tags.py b/backend/apps/web/models/tags.py index 476e6693d9..196551b7b5 100644 --- a/backend/apps/web/models/tags.py +++ b/backend/apps/web/models/tags.py @@ -11,6 +11,7 @@ import logging from apps.web.internal.db import DB from config import SRC_LOG_LEVELS + log = logging.getLogger(__name__) log.setLevel(SRC_LOG_LEVELS["MODELS"]) diff --git a/backend/apps/web/routers/chats.py b/backend/apps/web/routers/chats.py index d018b31be5..5f8c61b702 100644 --- a/backend/apps/web/routers/chats.py +++ b/backend/apps/web/routers/chats.py @@ -29,6 +29,7 @@ from apps.web.models.tags import ( from constants import ERROR_MESSAGES from config import SRC_LOG_LEVELS + log = logging.getLogger(__name__) log.setLevel(SRC_LOG_LEVELS["MODELS"]) diff --git a/backend/apps/web/routers/configs.py b/backend/apps/web/routers/configs.py index b293a398eb..0bad55a6a1 100644 --- a/backend/apps/web/routers/configs.py +++ b/backend/apps/web/routers/configs.py @@ -10,7 +10,12 @@ import uuid from apps.web.models.users import Users -from utils.utils import get_password_hash, get_current_user, get_admin_user, create_token +from utils.utils import ( + get_password_hash, + get_current_user, + get_admin_user, + create_token, +) from utils.misc import get_gravatar_url, validate_email_format from constants import ERROR_MESSAGES @@ -43,7 +48,6 @@ async def set_global_default_models( return request.app.state.DEFAULT_MODELS - @router.post("/default/suggestions", response_model=List[PromptSuggestion]) async def set_global_default_suggestions( request: Request, diff --git a/backend/apps/web/routers/modelfiles.py b/backend/apps/web/routers/modelfiles.py index 0c5c121663..3cdbf8a74a 100644 --- a/backend/apps/web/routers/modelfiles.py +++ b/backend/apps/web/routers/modelfiles.py @@ -24,9 +24,9 @@ router = APIRouter() @router.get("/", response_model=List[ModelfileResponse]) -async def get_modelfiles(skip: int = 0, - limit: int = 50, - user=Depends(get_current_user)): +async def get_modelfiles( + skip: int = 0, limit: int = 50, user=Depends(get_current_user) +): return Modelfiles.get_modelfiles(skip, limit) @@ -36,17 +36,16 @@ async def get_modelfiles(skip: int = 0, @router.post("/create", response_model=Optional[ModelfileResponse]) -async def create_new_modelfile(form_data: ModelfileForm, - user=Depends(get_admin_user)): +async def create_new_modelfile(form_data: ModelfileForm, user=Depends(get_admin_user)): modelfile = Modelfiles.insert_new_modelfile(user.id, form_data) if modelfile: return ModelfileResponse( **{ **modelfile.model_dump(), - "modelfile": - json.loads(modelfile.modelfile), - }) + "modelfile": json.loads(modelfile.modelfile), + } + ) else: raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, @@ -60,17 +59,18 @@ async def create_new_modelfile(form_data: ModelfileForm, @router.post("/", response_model=Optional[ModelfileResponse]) -async def get_modelfile_by_tag_name(form_data: ModelfileTagNameForm, - user=Depends(get_current_user)): +async def get_modelfile_by_tag_name( + form_data: ModelfileTagNameForm, user=Depends(get_current_user) +): modelfile = Modelfiles.get_modelfile_by_tag_name(form_data.tag_name) if modelfile: return ModelfileResponse( **{ **modelfile.model_dump(), - "modelfile": - json.loads(modelfile.modelfile), - }) + "modelfile": json.loads(modelfile.modelfile), + } + ) else: raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, @@ -84,8 +84,9 @@ async def get_modelfile_by_tag_name(form_data: ModelfileTagNameForm, @router.post("/update", response_model=Optional[ModelfileResponse]) -async def update_modelfile_by_tag_name(form_data: ModelfileUpdateForm, - user=Depends(get_admin_user)): +async def update_modelfile_by_tag_name( + form_data: ModelfileUpdateForm, user=Depends(get_admin_user) +): modelfile = Modelfiles.get_modelfile_by_tag_name(form_data.tag_name) if modelfile: updated_modelfile = { @@ -94,14 +95,15 @@ async def update_modelfile_by_tag_name(form_data: ModelfileUpdateForm, } modelfile = Modelfiles.update_modelfile_by_tag_name( - form_data.tag_name, updated_modelfile) + form_data.tag_name, updated_modelfile + ) return ModelfileResponse( **{ **modelfile.model_dump(), - "modelfile": - json.loads(modelfile.modelfile), - }) + "modelfile": json.loads(modelfile.modelfile), + } + ) else: raise HTTPException( status_code=status.HTTP_401_UNAUTHORIZED, @@ -115,7 +117,8 @@ async def update_modelfile_by_tag_name(form_data: ModelfileUpdateForm, @router.delete("/delete", response_model=bool) -async def delete_modelfile_by_tag_name(form_data: ModelfileTagNameForm, - user=Depends(get_admin_user)): +async def delete_modelfile_by_tag_name( + form_data: ModelfileTagNameForm, user=Depends(get_admin_user) +): result = Modelfiles.delete_modelfile_by_tag_name(form_data.tag_name) return result diff --git a/backend/apps/web/routers/users.py b/backend/apps/web/routers/users.py index 67c136daaa..6f1020ec74 100644 --- a/backend/apps/web/routers/users.py +++ b/backend/apps/web/routers/users.py @@ -16,6 +16,7 @@ from utils.utils import get_current_user, get_password_hash, get_admin_user from constants import ERROR_MESSAGES from config import SRC_LOG_LEVELS + log = logging.getLogger(__name__) log.setLevel(SRC_LOG_LEVELS["MODELS"]) diff --git a/backend/config.py b/backend/config.py index ec580c2ec4..ff73baf751 100644 --- a/backend/config.py +++ b/backend/config.py @@ -26,6 +26,7 @@ except ImportError: log.warning("dotenv not installed, skipping...") WEBUI_NAME = "Open WebUI" +WEBUI_FAVICON_URL = "https://openwebui.com/favicon.png" shutil.copyfile("../build/favicon.png", "./static/favicon.png") #################################### @@ -116,7 +117,18 @@ else: log = logging.getLogger(__name__) log.info(f"GLOBAL_LOG_LEVEL: {GLOBAL_LOG_LEVEL}") -log_sources = ["AUDIO", "CONFIG", "DB", "IMAGES", "LITELLM", "MAIN", "MODELS", "OLLAMA", "OPENAI", "RAG"] +log_sources = [ + "AUDIO", + "CONFIG", + "DB", + "IMAGES", + "LITELLM", + "MAIN", + "MODELS", + "OLLAMA", + "OPENAI", + "RAG", +] SRC_LOG_LEVELS = {} @@ -141,7 +153,7 @@ if CUSTOM_NAME: data = r.json() if r.ok: if "logo" in data: - url = ( + WEBUI_FAVICON_URL = url = ( f"https://api.openwebui.com{data['logo']}" if data["logo"][0] == "/" else data["logo"] @@ -238,7 +250,7 @@ OLLAMA_API_BASE_URL = os.environ.get( ) OLLAMA_BASE_URL = os.environ.get("OLLAMA_BASE_URL", "") - +K8S_FLAG = os.environ.get("K8S_FLAG", "") if OLLAMA_BASE_URL == "" and OLLAMA_API_BASE_URL != "": OLLAMA_BASE_URL = ( @@ -251,6 +263,9 @@ if ENV == "prod": if OLLAMA_BASE_URL == "/ollama": OLLAMA_BASE_URL = "http://host.docker.internal:11434" + elif K8S_FLAG: + OLLAMA_BASE_URL = "http://ollama-service.open-webui.svc.cluster.local:11434" + OLLAMA_BASE_URLS = os.environ.get("OLLAMA_BASE_URLS", "") OLLAMA_BASE_URLS = OLLAMA_BASE_URLS if OLLAMA_BASE_URLS != "" else OLLAMA_BASE_URL diff --git a/backend/constants.py b/backend/constants.py index 42c5c85eb7..8bcdd07898 100644 --- a/backend/constants.py +++ b/backend/constants.py @@ -60,3 +60,5 @@ class ERROR_MESSAGES(str, Enum): MODEL_NOT_FOUND = lambda name="": f"Model '{name}' was not found" OPENAI_NOT_FOUND = lambda name="": f"OpenAI API was not found" OLLAMA_NOT_FOUND = "WebUI could not connect to Ollama" + + EMPTY_CONTENT = "The content provided is empty. Please ensure that there is text or data present before proceeding." diff --git a/backend/data/config.json b/backend/data/config.json index cd6687d798..91259c3f17 100644 --- a/backend/data/config.json +++ b/backend/data/config.json @@ -1,6 +1,7 @@ { "version": 0, "ui": { + "default_locale": "en-US", "prompt_suggestions": [ { "title": [ diff --git a/backend/main.py b/backend/main.py index d4b67079cb..fa35e55a9b 100644 --- a/backend/main.py +++ b/backend/main.py @@ -32,6 +32,7 @@ from utils.utils import get_admin_user from apps.rag.utils import rag_messages from config import ( + CONFIG_DATA, WEBUI_NAME, ENV, VERSION, @@ -49,6 +50,7 @@ logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL) log = logging.getLogger(__name__) log.setLevel(SRC_LOG_LEVELS["MAIN"]) + class SPAStaticFiles(StaticFiles): async def get_response(self, path: str, scope): try: @@ -88,7 +90,6 @@ class RAGMiddleware(BaseHTTPMiddleware): # Example: Add a new key-value pair or modify existing ones # data["modified"] = True # Example modification if "docs" in data: - data = {**data} data["messages"] = rag_messages( data["docs"], @@ -163,11 +164,15 @@ app.mount("/rag/api/v1", rag_app) @app.get("/api/config") async def get_app_config(): - return { "status": True, "name": WEBUI_NAME, "version": VERSION, + "default_locale": ( + CONFIG_DATA["ui"]["default_locale"] + if "default_locale" in CONFIG_DATA["ui"] + else "en-US" + ), "images": images_app.state.ENABLED, "default_models": webui_app.state.DEFAULT_MODELS, "default_prompt_suggestions": webui_app.state.DEFAULT_PROMPT_SUGGESTIONS, @@ -191,7 +196,6 @@ class ModelFilterConfigForm(BaseModel): async def update_model_filter_config( form_data: ModelFilterConfigForm, user=Depends(get_admin_user) ): - app.state.MODEL_FILTER_ENABLED = form_data.enabled app.state.MODEL_FILTER_LIST = form_data.models @@ -201,6 +205,9 @@ async def update_model_filter_config( openai_app.state.MODEL_FILTER_ENABLED = app.state.MODEL_FILTER_ENABLED openai_app.state.MODEL_FILTER_LIST = app.state.MODEL_FILTER_LIST + litellm_app.state.MODEL_FILTER_ENABLED = app.state.MODEL_FILTER_ENABLED + litellm_app.state.MODEL_FILTER_LIST = app.state.MODEL_FILTER_LIST + return { "enabled": app.state.MODEL_FILTER_ENABLED, "models": app.state.MODEL_FILTER_LIST, @@ -231,7 +238,6 @@ async def update_webhook_url(form_data: UrlForm, user=Depends(get_admin_user)): @app.get("/api/version") async def get_app_config(): - return { "version": VERSION, } @@ -239,7 +245,7 @@ async def get_app_config(): @app.get("/api/changelog") async def get_app_changelog(): - return CHANGELOG + return {key: CHANGELOG[key] for idx, key in enumerate(CHANGELOG) if idx < 5} @app.get("/api/version/updates") diff --git a/backend/utils/webhook.py b/backend/utils/webhook.py index 1bc5a60487..e700b5031a 100644 --- a/backend/utils/webhook.py +++ b/backend/utils/webhook.py @@ -1,14 +1,41 @@ +import json import requests +from config import VERSION, WEBUI_FAVICON_URL, WEBUI_NAME def post_webhook(url: str, message: str, event_data: dict) -> bool: try: payload = {} - if "https://hooks.slack.com" in url: + # Slack and Google Chat Webhooks + if "https://hooks.slack.com" in url or "https://chat.googleapis.com" in url: payload["text"] = message + # Discord Webhooks elif "https://discord.com/api/webhooks" in url: payload["content"] = message + # Microsoft Teams Webhooks + elif "webhook.office.com" in url: + action = event_data.get("action", "undefined") + facts = [ + {"name": name, "value": value} + for name, value in json.loads(event_data.get("user", {})).items() + ] + payload = { + "@type": "MessageCard", + "@context": "http://schema.org/extensions", + "themeColor": "0076D7", + "summary": message, + "sections": [ + { + "activityTitle": message, + "activitySubtitle": f"{WEBUI_NAME} ({VERSION}) - {action}", + "activityImage": WEBUI_FAVICON_URL, + "facts": facts, + "markdown": True, + } + ], + } + # Default Payload else: payload = {**event_data} diff --git a/kubernetes/helm/templates/ollama-statefulset.yaml b/kubernetes/helm/templates/ollama-statefulset.yaml index a87aeab098..c348b04c0c 100644 --- a/kubernetes/helm/templates/ollama-statefulset.yaml +++ b/kubernetes/helm/templates/ollama-statefulset.yaml @@ -88,7 +88,7 @@ spec: resources: requests: storage: {{ .Values.ollama.persistence.size | quote }} - storageClass: {{ .Values.ollama.persistence.storageClass }} + storageClassName: {{ .Values.ollama.persistence.storageClass }} {{- with .Values.ollama.persistence.selector }} selector: {{- toYaml . | nindent 8 }} diff --git a/kubernetes/helm/templates/webui-pvc.yaml b/kubernetes/helm/templates/webui-pvc.yaml index 06b2cc4a4e..f06454e7ae 100644 --- a/kubernetes/helm/templates/webui-pvc.yaml +++ b/kubernetes/helm/templates/webui-pvc.yaml @@ -17,7 +17,7 @@ spec: resources: requests: storage: {{ .Values.webui.persistence.size }} - storageClass: {{ .Values.webui.persistence.storageClass }} + storageClassName: {{ .Values.webui.persistence.storageClass }} {{- with .Values.webui.persistence.selector }} selector: {{- toYaml . | nindent 4 }} diff --git a/kubernetes/manifest/base/webui-deployment.yaml b/kubernetes/manifest/base/webui-deployment.yaml index 38efd55493..79a0a9a23c 100644 --- a/kubernetes/manifest/base/webui-deployment.yaml +++ b/kubernetes/manifest/base/webui-deployment.yaml @@ -35,4 +35,4 @@ spec: volumes: - name: webui-volume persistentVolumeClaim: - claimName: ollama-webui-pvc \ No newline at end of file + claimName: open-webui-pvc \ No newline at end of file diff --git a/kubernetes/manifest/base/webui-pvc.yaml b/kubernetes/manifest/base/webui-pvc.yaml index 5c75283ad4..97fb761d42 100644 --- a/kubernetes/manifest/base/webui-pvc.yaml +++ b/kubernetes/manifest/base/webui-pvc.yaml @@ -2,8 +2,8 @@ apiVersion: v1 kind: PersistentVolumeClaim metadata: labels: - app: ollama-webui - name: ollama-webui-pvc + app: open-webui + name: open-webui-pvc namespace: open-webui spec: accessModes: ["ReadWriteOnce"] diff --git a/package.json b/package.json index 1ad61223bc..ace1cb29cf 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "open-webui", - "version": "0.1.115", + "version": "0.1.116", "private": true, "scripts": { "dev": "vite dev --host", diff --git a/src/app.html b/src/app.html index c52cff98c0..f731761cfc 100644 --- a/src/app.html +++ b/src/app.html @@ -9,7 +9,11 @@ -
+
{#each selectedModels as selectedModel, selectedModelIdx}
-
- +
+ +
+ {/if} + +
+ {#each filteredItems as item} + +
+
+ {item.label} + + {item.info?.details?.parameter_size ?? ''} +
+ + + + {#if item.info.external} + +
+ + + + +
+
+ {:else} + +
+ + + +
+
+ {/if} +
+ + {#if value === item.value} +
+ +
+ {/if} +
+ {:else} +
+
+ No results found +
+
+ {/each} + + {#if !(searchValue.trim() in $MODEL_DOWNLOAD_POOL) && searchValue && ollamaVersion && $user.role === 'admin'} + + {/if} + + {#each Object.keys($MODEL_DOWNLOAD_POOL) as model} +
+
+
+ +
+ +
+
+ Downloading "{model}" {'pullProgress' in $MODEL_DOWNLOAD_POOL[model] + ? `(${$MODEL_DOWNLOAD_POOL[model].pullProgress}%)` + : ''} +
+ + {#if 'digest' in $MODEL_DOWNLOAD_POOL[model] && $MODEL_DOWNLOAD_POOL[model].digest} +
+ {$MODEL_DOWNLOAD_POOL[model].digest} +
+ {/if} +
+
+ +
+ + + +
+
+ {/each} +
+ + + diff --git a/src/lib/components/chat/Settings/General.svelte b/src/lib/components/chat/Settings/General.svelte index 35f777a780..3b7126d855 100644 --- a/src/lib/components/chat/Settings/General.svelte +++ b/src/lib/components/chat/Settings/General.svelte @@ -14,7 +14,7 @@ export let getModels: Function; // General - let themes = ['dark', 'light', 'rose-pine dark', 'rose-pine-dawn light']; + let themes = ['dark', 'light', 'rose-pine dark', 'rose-pine-dawn light', 'oled-dark']; let selectedTheme = 'system'; let languages = []; @@ -91,12 +91,17 @@ }); const applyTheme = (_theme: string) => { - let themeToApply = _theme; + let themeToApply = _theme === 'oled-dark' ? 'dark' : _theme; if (_theme === 'system') { themeToApply = window.matchMedia('(prefers-color-scheme: dark)').matches ? 'dark' : 'light'; } + if (themeToApply === 'dark' && !_theme.includes('oled')) { + document.documentElement.style.setProperty('--color-gray-900', '#171717'); + document.documentElement.style.setProperty('--color-gray-950', '#0d0d0d'); + } + themes .filter((e) => e !== themeToApply) .forEach((e) => { @@ -115,7 +120,11 @@ const themeChangeHandler = (_theme: string) => { theme.set(_theme); localStorage.setItem('theme', _theme); - + if (_theme.includes('oled')) { + document.documentElement.style.setProperty('--color-gray-900', '#000000'); + document.documentElement.style.setProperty('--color-gray-950', '#000000'); + document.documentElement.classList.add('dark'); + } applyTheme(_theme); }; @@ -136,6 +145,7 @@ > + diff --git a/src/lib/components/chat/Settings/Interface.svelte b/src/lib/components/chat/Settings/Interface.svelte index 2d5862f404..ad9e05e7f2 100644 --- a/src/lib/components/chat/Settings/Interface.svelte +++ b/src/lib/components/chat/Settings/Interface.svelte @@ -1,7 +1,7 @@ @@ -190,8 +203,9 @@
{$i18n.t('Set Title Auto-Generation Model')}
-
-
+
+
+
Local Models
+ + {#each $models as model} + {#if model.name !== 'hr'} + + {/if} + {/each} + +
diff --git a/src/lib/components/chat/TagChatModal.svelte b/src/lib/components/chat/TagChatModal.svelte new file mode 100644 index 0000000000..53adac40e9 --- /dev/null +++ b/src/lib/components/chat/TagChatModal.svelte @@ -0,0 +1,20 @@ + + + +
+ +
+
diff --git a/src/lib/components/common/Select.svelte b/src/lib/components/common/Selector.svelte similarity index 57% rename from src/lib/components/common/Select.svelte rename to src/lib/components/common/Selector.svelte index a8563cedb4..9a56f30bb7 100644 --- a/src/lib/components/common/Select.svelte +++ b/src/lib/components/common/Selector.svelte @@ -12,6 +12,9 @@ export let value = ''; export let placeholder = 'Select a model'; + export let searchEnabled = true; + export let searchPlaceholder = 'Search a model'; + export let items = [ { value: 'mango', label: 'Mango' }, { value: 'watermelon', label: 'Watermelon' }, @@ -49,39 +52,44 @@ transition={flyAndScale} sideOffset={4} > -
- + + {#if searchEnabled} +
+ - -
+ +
-
+
+ {/if} -
- {#each filteredItems as item} - - {item.label} +
+ {#each filteredItems as item} + + {item.label} - {#if value === item.value} -
- + {#if value === item.value} +
+ +
+ {/if} + + {:else} +
+
+ No results found
- {/if} - - {:else} - - No results found - - {/each} -
+
+ {/each} +
+ - diff --git a/src/lib/components/common/Tags.svelte b/src/lib/components/common/Tags.svelte index 426678fb59..c8e12063f5 100644 --- a/src/lib/components/common/Tags.svelte +++ b/src/lib/components/common/Tags.svelte @@ -8,7 +8,7 @@ export let addTag: Function; -
+
{ @@ -17,6 +17,7 @@ /> { addTag(e.detail); }} diff --git a/src/lib/components/common/Tags/TagInput.svelte b/src/lib/components/common/Tags/TagInput.svelte index 215e6b0df6..94a298ddae 100644 --- a/src/lib/components/common/Tags/TagInput.svelte +++ b/src/lib/components/common/Tags/TagInput.svelte @@ -1,24 +1,31 @@
{#if showTagInput}
-
- - {/if}
+ + {#if label && !showTagInput} + {label} + {/if}
diff --git a/src/lib/components/common/Tags/TagList.svelte b/src/lib/components/common/Tags/TagList.svelte index 66a0b060ab..09773a2dc4 100644 --- a/src/lib/components/common/Tags/TagList.svelte +++ b/src/lib/components/common/Tags/TagList.svelte @@ -7,7 +7,7 @@ {#each tags as tag}
{tag.name} diff --git a/src/lib/components/common/Tooltip.svelte b/src/lib/components/common/Tooltip.svelte index cd6cbed8a0..92a909b228 100644 --- a/src/lib/components/common/Tooltip.svelte +++ b/src/lib/components/common/Tooltip.svelte @@ -29,6 +29,6 @@ }); -
+
diff --git a/src/lib/components/icons/ChevronUpDown.svelte b/src/lib/components/icons/ChevronUpDown.svelte new file mode 100644 index 0000000000..7f23435a2d --- /dev/null +++ b/src/lib/components/icons/ChevronUpDown.svelte @@ -0,0 +1,19 @@ + + + + + diff --git a/src/lib/components/icons/EllipsisVertical.svelte b/src/lib/components/icons/EllipsisVertical.svelte new file mode 100644 index 0000000000..7ccbea2947 --- /dev/null +++ b/src/lib/components/icons/EllipsisVertical.svelte @@ -0,0 +1,19 @@ + + + + + diff --git a/src/lib/components/layout/Navbar.svelte b/src/lib/components/layout/Navbar.svelte index 3a5fd2da4b..a986896185 100644 --- a/src/lib/components/layout/Navbar.svelte +++ b/src/lib/components/layout/Navbar.svelte @@ -4,11 +4,21 @@ import fileSaver from 'file-saver'; const { saveAs } = fileSaver; + import { Separator } from 'bits-ui'; import { getChatById } from '$lib/apis/chats'; - import { WEBUI_NAME, chatId, modelfiles, settings } from '$lib/stores'; + import { WEBUI_NAME, chatId, modelfiles, settings, showSettings } from '$lib/stores'; + + import { slide } from 'svelte/transition'; import ShareChatModal from '../chat/ShareChatModal.svelte'; import TagInput from '../common/Tags/TagInput.svelte'; - import Tags from '../common/Tags.svelte'; + import ModelSelector from '../chat/ModelSelector.svelte'; + import Tooltip from '../common/Tooltip.svelte'; + + import EllipsisVertical from '../icons/EllipsisVertical.svelte'; + import ChevronDown from '../icons/ChevronDown.svelte'; + import ChevronUpDown from '../icons/ChevronUpDown.svelte'; + import Menu from './Navbar/Menu.svelte'; + import TagChatModal from '../chat/TagChatModal.svelte'; const i18n = getContext('i18n'); @@ -16,14 +26,16 @@ export let title: string = $WEBUI_NAME; export let shareEnabled: boolean = false; + export let selectedModels; + export let tags = []; export let addTag: Function; export let deleteTag: Function; - let showShareChatModal = false; + export let showModelSelector = false; - let tagName = ''; - let showTagInput = false; + let showShareChatModal = false; + let showTagChatModal = false; const shareChat = async () => { const chat = (await getChatById(localStorage.token, $chatId)).chat; @@ -69,70 +81,154 @@ +