diff --git a/.github/workflows/integration-test.yml b/.github/workflows/integration-test.yml index 85810c2ed3..3b455820d0 100644 --- a/.github/workflows/integration-test.yml +++ b/.github/workflows/integration-test.yml @@ -67,6 +67,28 @@ jobs: path: compose-logs.txt if-no-files-found: ignore + pytest: + name: Run backend tests + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install -r backend/requirements.txt + + - name: pytest run + run: | + ls -al + cd backend + PYTHONPATH=. pytest . -o log_cli=true -o log_cli_level=INFO + migration_test: name: Run Migration Tests runs-on: ubuntu-latest @@ -171,7 +193,7 @@ jobs: fi # Check that service will reconnect to postgres when connection will be closed - status_code=$(curl --write-out %{http_code} -s --output /dev/null http://localhost:8081/health) + status_code=$(curl --write-out %{http_code} -s --output /dev/null http://localhost:8081/health/db) if [[ "$status_code" -ne 200 ]] ; then echo "Server has failed before postgres reconnect check" exit 1 @@ -183,7 +205,7 @@ jobs: cur = conn.cursor(); \ cur.execute('SELECT pg_terminate_backend(psa.pid) FROM pg_stat_activity psa WHERE datname = current_database() AND pid <> pg_backend_pid();')" - status_code=$(curl --write-out %{http_code} -s --output /dev/null http://localhost:8081/health) + status_code=$(curl --write-out %{http_code} -s --output /dev/null http://localhost:8081/health/db) if [[ "$status_code" -ne 200 ]] ; then echo "Server has not reconnected to postgres after connection was closed: returned status $status_code" exit 1 diff --git a/backend/alembic.ini b/backend/alembic.ini new file mode 100644 index 0000000000..4eff85f0c6 --- /dev/null +++ b/backend/alembic.ini @@ -0,0 +1,114 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = migrations + +# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s +# Uncomment the line below if you want the files to be prepended with date and time +# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path path, will be prepended to sys.path if present. +# defaults to the current working directory. +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +# If specified, requires the python>=3.9 or backports.zoneinfo library. +# Any required deps can installed by adding `alembic[tz]` to the pip requirements +# string value is passed to ZoneInfo() +# leave blank for localtime +# timezone = + +# max length of characters to apply to the +# "slug" field +# truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +# version location specification; This defaults +# to migrations/versions. When using multiple version +# directories, initial revisions must be specified with --version-path. +# The path separator used here should be the separator specified by "version_path_separator" below. +# version_locations = %(here)s/bar:%(here)s/bat:migrations/versions + +# version path separator; As mentioned above, this is the character used to split +# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. +# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. +# Valid values for version_path_separator are: +# +# version_path_separator = : +# version_path_separator = ; +# version_path_separator = space +version_path_separator = os # Use os.pathsep. Default configuration used for new projects. + +# set to 'true' to search source files recursively +# in each "version_locations" directory +# new in Alembic version 1.10 +# recursive_version_locations = false + +# the output encoding used when revision files +# are written from script.py.mako +# output_encoding = utf-8 + +# sqlalchemy.url = REPLACE_WITH_DATABASE_URL + + +[post_write_hooks] +# post_write_hooks defines scripts or Python functions that are run +# on newly generated revision scripts. See the documentation for further +# detail and examples + +# format using "black" - use the console_scripts runner, against the "black" entrypoint +# hooks = black +# black.type = console_scripts +# black.entrypoint = black +# black.options = -l 79 REVISION_SCRIPT_FILENAME + +# lint with attempts to fix using "ruff" - use the exec runner, execute a binary +# hooks = ruff +# ruff.type = exec +# ruff.executable = %(here)s/.venv/bin/ruff +# ruff.options = --fix REVISION_SCRIPT_FILENAME + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/backend/apps/webui/internal/db.py b/backend/apps/webui/internal/db.py index 80c30d652a..320ab3e074 100644 --- a/backend/apps/webui/internal/db.py +++ b/backend/apps/webui/internal/db.py @@ -1,18 +1,35 @@ import os import logging import json +from contextlib import contextmanager +from typing import Optional, Any +from typing_extensions import Self -from peewee import * -from peewee_migrate import Router +from sqlalchemy import create_engine, types, Dialect +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker, scoped_session +from sqlalchemy.sql.type_api import _T -from apps.webui.internal.wrappers import register_connection from config import SRC_LOG_LEVELS, DATA_DIR, DATABASE_URL, BACKEND_DIR log = logging.getLogger(__name__) log.setLevel(SRC_LOG_LEVELS["DB"]) -class JSONField(TextField): +class JSONField(types.TypeDecorator): + impl = types.Text + cache_ok = True + + def process_bind_param(self, value: Optional[_T], dialect: Dialect) -> Any: + return json.dumps(value) + + def process_result_value(self, value: Optional[_T], dialect: Dialect) -> Any: + if value is not None: + return json.loads(value) + + def copy(self, **kw: Any) -> Self: + return JSONField(self.impl.length) + def db_value(self, value): return json.dumps(value) @@ -29,26 +46,15 @@ if os.path.exists(f"{DATA_DIR}/ollama.db"): else: pass - -# The `register_connection` function encapsulates the logic for setting up -# the database connection based on the connection string, while `connect` -# is a Peewee-specific method to manage the connection state and avoid errors -# when a connection is already open. -try: - DB = register_connection(DATABASE_URL) - log.info(f"Connected to a {DB.__class__.__name__} database.") -except Exception as e: - log.error(f"Failed to initialize the database connection: {e}") - raise - -router = Router( - DB, - migrate_dir=BACKEND_DIR / "apps" / "webui" / "internal" / "migrations", - logger=log, +SQLALCHEMY_DATABASE_URL = DATABASE_URL +if "sqlite" in SQLALCHEMY_DATABASE_URL: + engine = create_engine( + SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False} + ) +else: + engine = create_engine(SQLALCHEMY_DATABASE_URL, pool_pre_ping=True) +SessionLocal = sessionmaker( + autocommit=False, autoflush=False, bind=engine, expire_on_commit=False ) -router.run() -try: - DB.connect(reuse_if_open=True) -except OperationalError as e: - log.info(f"Failed to connect to database again due to: {e}") - pass +Base = declarative_base() +Session = scoped_session(SessionLocal) diff --git a/backend/apps/webui/internal/migrations/001_initial_schema.py b/backend/apps/webui/internal/migrations/001_initial_schema.py deleted file mode 100644 index 93f278f15b..0000000000 --- a/backend/apps/webui/internal/migrations/001_initial_schema.py +++ /dev/null @@ -1,254 +0,0 @@ -"""Peewee migrations -- 001_initial_schema.py. - -Some examples (model - class or model name):: - - > Model = migrator.orm['table_name'] # Return model in current state by name - > Model = migrator.ModelClass # Return model in current state by name - - > migrator.sql(sql) # Run custom SQL - > migrator.run(func, *args, **kwargs) # Run python function with the given args - > migrator.create_model(Model) # Create a model (could be used as decorator) - > migrator.remove_model(model, cascade=True) # Remove a model - > migrator.add_fields(model, **fields) # Add fields to a model - > migrator.change_fields(model, **fields) # Change fields - > migrator.remove_fields(model, *field_names, cascade=True) - > migrator.rename_field(model, old_field_name, new_field_name) - > migrator.rename_table(model, new_table_name) - > migrator.add_index(model, *col_names, unique=False) - > migrator.add_not_null(model, *field_names) - > migrator.add_default(model, field_name, default) - > migrator.add_constraint(model, name, sql) - > migrator.drop_index(model, *col_names) - > migrator.drop_not_null(model, *field_names) - > migrator.drop_constraints(model, *constraints) - -""" - -from contextlib import suppress - -import peewee as pw -from peewee_migrate import Migrator - - -with suppress(ImportError): - import playhouse.postgres_ext as pw_pext - - -def migrate(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your migrations here.""" - - # We perform different migrations for SQLite and other databases - # This is because SQLite is very loose with enforcing its schema, and trying to migrate other databases like SQLite - # will require per-database SQL queries. - # Instead, we assume that because external DB support was added at a later date, it is safe to assume a newer base - # schema instead of trying to migrate from an older schema. - if isinstance(database, pw.SqliteDatabase): - migrate_sqlite(migrator, database, fake=fake) - else: - migrate_external(migrator, database, fake=fake) - - -def migrate_sqlite(migrator: Migrator, database: pw.Database, *, fake=False): - @migrator.create_model - class Auth(pw.Model): - id = pw.CharField(max_length=255, unique=True) - email = pw.CharField(max_length=255) - password = pw.CharField(max_length=255) - active = pw.BooleanField() - - class Meta: - table_name = "auth" - - @migrator.create_model - class Chat(pw.Model): - id = pw.CharField(max_length=255, unique=True) - user_id = pw.CharField(max_length=255) - title = pw.CharField() - chat = pw.TextField() - timestamp = pw.BigIntegerField() - - class Meta: - table_name = "chat" - - @migrator.create_model - class ChatIdTag(pw.Model): - id = pw.CharField(max_length=255, unique=True) - tag_name = pw.CharField(max_length=255) - chat_id = pw.CharField(max_length=255) - user_id = pw.CharField(max_length=255) - timestamp = pw.BigIntegerField() - - class Meta: - table_name = "chatidtag" - - @migrator.create_model - class Document(pw.Model): - id = pw.AutoField() - collection_name = pw.CharField(max_length=255, unique=True) - name = pw.CharField(max_length=255, unique=True) - title = pw.CharField() - filename = pw.CharField() - content = pw.TextField(null=True) - user_id = pw.CharField(max_length=255) - timestamp = pw.BigIntegerField() - - class Meta: - table_name = "document" - - @migrator.create_model - class Modelfile(pw.Model): - id = pw.AutoField() - tag_name = pw.CharField(max_length=255, unique=True) - user_id = pw.CharField(max_length=255) - modelfile = pw.TextField() - timestamp = pw.BigIntegerField() - - class Meta: - table_name = "modelfile" - - @migrator.create_model - class Prompt(pw.Model): - id = pw.AutoField() - command = pw.CharField(max_length=255, unique=True) - user_id = pw.CharField(max_length=255) - title = pw.CharField() - content = pw.TextField() - timestamp = pw.BigIntegerField() - - class Meta: - table_name = "prompt" - - @migrator.create_model - class Tag(pw.Model): - id = pw.CharField(max_length=255, unique=True) - name = pw.CharField(max_length=255) - user_id = pw.CharField(max_length=255) - data = pw.TextField(null=True) - - class Meta: - table_name = "tag" - - @migrator.create_model - class User(pw.Model): - id = pw.CharField(max_length=255, unique=True) - name = pw.CharField(max_length=255) - email = pw.CharField(max_length=255) - role = pw.CharField(max_length=255) - profile_image_url = pw.CharField(max_length=255) - timestamp = pw.BigIntegerField() - - class Meta: - table_name = "user" - - -def migrate_external(migrator: Migrator, database: pw.Database, *, fake=False): - @migrator.create_model - class Auth(pw.Model): - id = pw.CharField(max_length=255, unique=True) - email = pw.CharField(max_length=255) - password = pw.TextField() - active = pw.BooleanField() - - class Meta: - table_name = "auth" - - @migrator.create_model - class Chat(pw.Model): - id = pw.CharField(max_length=255, unique=True) - user_id = pw.CharField(max_length=255) - title = pw.TextField() - chat = pw.TextField() - timestamp = pw.BigIntegerField() - - class Meta: - table_name = "chat" - - @migrator.create_model - class ChatIdTag(pw.Model): - id = pw.CharField(max_length=255, unique=True) - tag_name = pw.CharField(max_length=255) - chat_id = pw.CharField(max_length=255) - user_id = pw.CharField(max_length=255) - timestamp = pw.BigIntegerField() - - class Meta: - table_name = "chatidtag" - - @migrator.create_model - class Document(pw.Model): - id = pw.AutoField() - collection_name = pw.CharField(max_length=255, unique=True) - name = pw.CharField(max_length=255, unique=True) - title = pw.TextField() - filename = pw.TextField() - content = pw.TextField(null=True) - user_id = pw.CharField(max_length=255) - timestamp = pw.BigIntegerField() - - class Meta: - table_name = "document" - - @migrator.create_model - class Modelfile(pw.Model): - id = pw.AutoField() - tag_name = pw.CharField(max_length=255, unique=True) - user_id = pw.CharField(max_length=255) - modelfile = pw.TextField() - timestamp = pw.BigIntegerField() - - class Meta: - table_name = "modelfile" - - @migrator.create_model - class Prompt(pw.Model): - id = pw.AutoField() - command = pw.CharField(max_length=255, unique=True) - user_id = pw.CharField(max_length=255) - title = pw.TextField() - content = pw.TextField() - timestamp = pw.BigIntegerField() - - class Meta: - table_name = "prompt" - - @migrator.create_model - class Tag(pw.Model): - id = pw.CharField(max_length=255, unique=True) - name = pw.CharField(max_length=255) - user_id = pw.CharField(max_length=255) - data = pw.TextField(null=True) - - class Meta: - table_name = "tag" - - @migrator.create_model - class User(pw.Model): - id = pw.CharField(max_length=255, unique=True) - name = pw.CharField(max_length=255) - email = pw.CharField(max_length=255) - role = pw.CharField(max_length=255) - profile_image_url = pw.TextField() - timestamp = pw.BigIntegerField() - - class Meta: - table_name = "user" - - -def rollback(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your rollback migrations here.""" - - migrator.remove_model("user") - - migrator.remove_model("tag") - - migrator.remove_model("prompt") - - migrator.remove_model("modelfile") - - migrator.remove_model("document") - - migrator.remove_model("chatidtag") - - migrator.remove_model("chat") - - migrator.remove_model("auth") diff --git a/backend/apps/webui/internal/migrations/002_add_local_sharing.py b/backend/apps/webui/internal/migrations/002_add_local_sharing.py deleted file mode 100644 index e93501aeec..0000000000 --- a/backend/apps/webui/internal/migrations/002_add_local_sharing.py +++ /dev/null @@ -1,48 +0,0 @@ -"""Peewee migrations -- 002_add_local_sharing.py. - -Some examples (model - class or model name):: - - > Model = migrator.orm['table_name'] # Return model in current state by name - > Model = migrator.ModelClass # Return model in current state by name - - > migrator.sql(sql) # Run custom SQL - > migrator.run(func, *args, **kwargs) # Run python function with the given args - > migrator.create_model(Model) # Create a model (could be used as decorator) - > migrator.remove_model(model, cascade=True) # Remove a model - > migrator.add_fields(model, **fields) # Add fields to a model - > migrator.change_fields(model, **fields) # Change fields - > migrator.remove_fields(model, *field_names, cascade=True) - > migrator.rename_field(model, old_field_name, new_field_name) - > migrator.rename_table(model, new_table_name) - > migrator.add_index(model, *col_names, unique=False) - > migrator.add_not_null(model, *field_names) - > migrator.add_default(model, field_name, default) - > migrator.add_constraint(model, name, sql) - > migrator.drop_index(model, *col_names) - > migrator.drop_not_null(model, *field_names) - > migrator.drop_constraints(model, *constraints) - -""" - -from contextlib import suppress - -import peewee as pw -from peewee_migrate import Migrator - - -with suppress(ImportError): - import playhouse.postgres_ext as pw_pext - - -def migrate(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your migrations here.""" - - migrator.add_fields( - "chat", share_id=pw.CharField(max_length=255, null=True, unique=True) - ) - - -def rollback(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your rollback migrations here.""" - - migrator.remove_fields("chat", "share_id") diff --git a/backend/apps/webui/internal/migrations/003_add_auth_api_key.py b/backend/apps/webui/internal/migrations/003_add_auth_api_key.py deleted file mode 100644 index 07144f3aca..0000000000 --- a/backend/apps/webui/internal/migrations/003_add_auth_api_key.py +++ /dev/null @@ -1,48 +0,0 @@ -"""Peewee migrations -- 002_add_local_sharing.py. - -Some examples (model - class or model name):: - - > Model = migrator.orm['table_name'] # Return model in current state by name - > Model = migrator.ModelClass # Return model in current state by name - - > migrator.sql(sql) # Run custom SQL - > migrator.run(func, *args, **kwargs) # Run python function with the given args - > migrator.create_model(Model) # Create a model (could be used as decorator) - > migrator.remove_model(model, cascade=True) # Remove a model - > migrator.add_fields(model, **fields) # Add fields to a model - > migrator.change_fields(model, **fields) # Change fields - > migrator.remove_fields(model, *field_names, cascade=True) - > migrator.rename_field(model, old_field_name, new_field_name) - > migrator.rename_table(model, new_table_name) - > migrator.add_index(model, *col_names, unique=False) - > migrator.add_not_null(model, *field_names) - > migrator.add_default(model, field_name, default) - > migrator.add_constraint(model, name, sql) - > migrator.drop_index(model, *col_names) - > migrator.drop_not_null(model, *field_names) - > migrator.drop_constraints(model, *constraints) - -""" - -from contextlib import suppress - -import peewee as pw -from peewee_migrate import Migrator - - -with suppress(ImportError): - import playhouse.postgres_ext as pw_pext - - -def migrate(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your migrations here.""" - - migrator.add_fields( - "user", api_key=pw.CharField(max_length=255, null=True, unique=True) - ) - - -def rollback(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your rollback migrations here.""" - - migrator.remove_fields("user", "api_key") diff --git a/backend/apps/webui/internal/migrations/004_add_archived.py b/backend/apps/webui/internal/migrations/004_add_archived.py deleted file mode 100644 index d01c06b4e6..0000000000 --- a/backend/apps/webui/internal/migrations/004_add_archived.py +++ /dev/null @@ -1,46 +0,0 @@ -"""Peewee migrations -- 002_add_local_sharing.py. - -Some examples (model - class or model name):: - - > Model = migrator.orm['table_name'] # Return model in current state by name - > Model = migrator.ModelClass # Return model in current state by name - - > migrator.sql(sql) # Run custom SQL - > migrator.run(func, *args, **kwargs) # Run python function with the given args - > migrator.create_model(Model) # Create a model (could be used as decorator) - > migrator.remove_model(model, cascade=True) # Remove a model - > migrator.add_fields(model, **fields) # Add fields to a model - > migrator.change_fields(model, **fields) # Change fields - > migrator.remove_fields(model, *field_names, cascade=True) - > migrator.rename_field(model, old_field_name, new_field_name) - > migrator.rename_table(model, new_table_name) - > migrator.add_index(model, *col_names, unique=False) - > migrator.add_not_null(model, *field_names) - > migrator.add_default(model, field_name, default) - > migrator.add_constraint(model, name, sql) - > migrator.drop_index(model, *col_names) - > migrator.drop_not_null(model, *field_names) - > migrator.drop_constraints(model, *constraints) - -""" - -from contextlib import suppress - -import peewee as pw -from peewee_migrate import Migrator - - -with suppress(ImportError): - import playhouse.postgres_ext as pw_pext - - -def migrate(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your migrations here.""" - - migrator.add_fields("chat", archived=pw.BooleanField(default=False)) - - -def rollback(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your rollback migrations here.""" - - migrator.remove_fields("chat", "archived") diff --git a/backend/apps/webui/internal/migrations/005_add_updated_at.py b/backend/apps/webui/internal/migrations/005_add_updated_at.py deleted file mode 100644 index 950866ef02..0000000000 --- a/backend/apps/webui/internal/migrations/005_add_updated_at.py +++ /dev/null @@ -1,130 +0,0 @@ -"""Peewee migrations -- 002_add_local_sharing.py. - -Some examples (model - class or model name):: - - > Model = migrator.orm['table_name'] # Return model in current state by name - > Model = migrator.ModelClass # Return model in current state by name - - > migrator.sql(sql) # Run custom SQL - > migrator.run(func, *args, **kwargs) # Run python function with the given args - > migrator.create_model(Model) # Create a model (could be used as decorator) - > migrator.remove_model(model, cascade=True) # Remove a model - > migrator.add_fields(model, **fields) # Add fields to a model - > migrator.change_fields(model, **fields) # Change fields - > migrator.remove_fields(model, *field_names, cascade=True) - > migrator.rename_field(model, old_field_name, new_field_name) - > migrator.rename_table(model, new_table_name) - > migrator.add_index(model, *col_names, unique=False) - > migrator.add_not_null(model, *field_names) - > migrator.add_default(model, field_name, default) - > migrator.add_constraint(model, name, sql) - > migrator.drop_index(model, *col_names) - > migrator.drop_not_null(model, *field_names) - > migrator.drop_constraints(model, *constraints) - -""" - -from contextlib import suppress - -import peewee as pw -from peewee_migrate import Migrator - - -with suppress(ImportError): - import playhouse.postgres_ext as pw_pext - - -def migrate(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your migrations here.""" - - if isinstance(database, pw.SqliteDatabase): - migrate_sqlite(migrator, database, fake=fake) - else: - migrate_external(migrator, database, fake=fake) - - -def migrate_sqlite(migrator: Migrator, database: pw.Database, *, fake=False): - # Adding fields created_at and updated_at to the 'chat' table - migrator.add_fields( - "chat", - created_at=pw.DateTimeField(null=True), # Allow null for transition - updated_at=pw.DateTimeField(null=True), # Allow null for transition - ) - - # Populate the new fields from an existing 'timestamp' field - migrator.sql( - "UPDATE chat SET created_at = timestamp, updated_at = timestamp WHERE timestamp IS NOT NULL" - ) - - # Now that the data has been copied, remove the original 'timestamp' field - migrator.remove_fields("chat", "timestamp") - - # Update the fields to be not null now that they are populated - migrator.change_fields( - "chat", - created_at=pw.DateTimeField(null=False), - updated_at=pw.DateTimeField(null=False), - ) - - -def migrate_external(migrator: Migrator, database: pw.Database, *, fake=False): - # Adding fields created_at and updated_at to the 'chat' table - migrator.add_fields( - "chat", - created_at=pw.BigIntegerField(null=True), # Allow null for transition - updated_at=pw.BigIntegerField(null=True), # Allow null for transition - ) - - # Populate the new fields from an existing 'timestamp' field - migrator.sql( - "UPDATE chat SET created_at = timestamp, updated_at = timestamp WHERE timestamp IS NOT NULL" - ) - - # Now that the data has been copied, remove the original 'timestamp' field - migrator.remove_fields("chat", "timestamp") - - # Update the fields to be not null now that they are populated - migrator.change_fields( - "chat", - created_at=pw.BigIntegerField(null=False), - updated_at=pw.BigIntegerField(null=False), - ) - - -def rollback(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your rollback migrations here.""" - - if isinstance(database, pw.SqliteDatabase): - rollback_sqlite(migrator, database, fake=fake) - else: - rollback_external(migrator, database, fake=fake) - - -def rollback_sqlite(migrator: Migrator, database: pw.Database, *, fake=False): - # Recreate the timestamp field initially allowing null values for safe transition - migrator.add_fields("chat", timestamp=pw.DateTimeField(null=True)) - - # Copy the earliest created_at date back into the new timestamp field - # This assumes created_at was originally a copy of timestamp - migrator.sql("UPDATE chat SET timestamp = created_at") - - # Remove the created_at and updated_at fields - migrator.remove_fields("chat", "created_at", "updated_at") - - # Finally, alter the timestamp field to not allow nulls if that was the original setting - migrator.change_fields("chat", timestamp=pw.DateTimeField(null=False)) - - -def rollback_external(migrator: Migrator, database: pw.Database, *, fake=False): - # Recreate the timestamp field initially allowing null values for safe transition - migrator.add_fields("chat", timestamp=pw.BigIntegerField(null=True)) - - # Copy the earliest created_at date back into the new timestamp field - # This assumes created_at was originally a copy of timestamp - migrator.sql("UPDATE chat SET timestamp = created_at") - - # Remove the created_at and updated_at fields - migrator.remove_fields("chat", "created_at", "updated_at") - - # Finally, alter the timestamp field to not allow nulls if that was the original setting - migrator.change_fields("chat", timestamp=pw.BigIntegerField(null=False)) diff --git a/backend/apps/webui/internal/migrations/006_migrate_timestamps_and_charfields.py b/backend/apps/webui/internal/migrations/006_migrate_timestamps_and_charfields.py deleted file mode 100644 index caca14d323..0000000000 --- a/backend/apps/webui/internal/migrations/006_migrate_timestamps_and_charfields.py +++ /dev/null @@ -1,130 +0,0 @@ -"""Peewee migrations -- 006_migrate_timestamps_and_charfields.py. - -Some examples (model - class or model name):: - - > Model = migrator.orm['table_name'] # Return model in current state by name - > Model = migrator.ModelClass # Return model in current state by name - - > migrator.sql(sql) # Run custom SQL - > migrator.run(func, *args, **kwargs) # Run python function with the given args - > migrator.create_model(Model) # Create a model (could be used as decorator) - > migrator.remove_model(model, cascade=True) # Remove a model - > migrator.add_fields(model, **fields) # Add fields to a model - > migrator.change_fields(model, **fields) # Change fields - > migrator.remove_fields(model, *field_names, cascade=True) - > migrator.rename_field(model, old_field_name, new_field_name) - > migrator.rename_table(model, new_table_name) - > migrator.add_index(model, *col_names, unique=False) - > migrator.add_not_null(model, *field_names) - > migrator.add_default(model, field_name, default) - > migrator.add_constraint(model, name, sql) - > migrator.drop_index(model, *col_names) - > migrator.drop_not_null(model, *field_names) - > migrator.drop_constraints(model, *constraints) - -""" - -from contextlib import suppress - -import peewee as pw -from peewee_migrate import Migrator - - -with suppress(ImportError): - import playhouse.postgres_ext as pw_pext - - -def migrate(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your migrations here.""" - - # Alter the tables with timestamps - migrator.change_fields( - "chatidtag", - timestamp=pw.BigIntegerField(), - ) - migrator.change_fields( - "document", - timestamp=pw.BigIntegerField(), - ) - migrator.change_fields( - "modelfile", - timestamp=pw.BigIntegerField(), - ) - migrator.change_fields( - "prompt", - timestamp=pw.BigIntegerField(), - ) - migrator.change_fields( - "user", - timestamp=pw.BigIntegerField(), - ) - # Alter the tables with varchar to text where necessary - migrator.change_fields( - "auth", - password=pw.TextField(), - ) - migrator.change_fields( - "chat", - title=pw.TextField(), - ) - migrator.change_fields( - "document", - title=pw.TextField(), - filename=pw.TextField(), - ) - migrator.change_fields( - "prompt", - title=pw.TextField(), - ) - migrator.change_fields( - "user", - profile_image_url=pw.TextField(), - ) - - -def rollback(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your rollback migrations here.""" - - if isinstance(database, pw.SqliteDatabase): - # Alter the tables with timestamps - migrator.change_fields( - "chatidtag", - timestamp=pw.DateField(), - ) - migrator.change_fields( - "document", - timestamp=pw.DateField(), - ) - migrator.change_fields( - "modelfile", - timestamp=pw.DateField(), - ) - migrator.change_fields( - "prompt", - timestamp=pw.DateField(), - ) - migrator.change_fields( - "user", - timestamp=pw.DateField(), - ) - migrator.change_fields( - "auth", - password=pw.CharField(max_length=255), - ) - migrator.change_fields( - "chat", - title=pw.CharField(), - ) - migrator.change_fields( - "document", - title=pw.CharField(), - filename=pw.CharField(), - ) - migrator.change_fields( - "prompt", - title=pw.CharField(), - ) - migrator.change_fields( - "user", - profile_image_url=pw.CharField(), - ) diff --git a/backend/apps/webui/internal/migrations/007_add_user_last_active_at.py b/backend/apps/webui/internal/migrations/007_add_user_last_active_at.py deleted file mode 100644 index dd176ba73e..0000000000 --- a/backend/apps/webui/internal/migrations/007_add_user_last_active_at.py +++ /dev/null @@ -1,79 +0,0 @@ -"""Peewee migrations -- 002_add_local_sharing.py. - -Some examples (model - class or model name):: - - > Model = migrator.orm['table_name'] # Return model in current state by name - > Model = migrator.ModelClass # Return model in current state by name - - > migrator.sql(sql) # Run custom SQL - > migrator.run(func, *args, **kwargs) # Run python function with the given args - > migrator.create_model(Model) # Create a model (could be used as decorator) - > migrator.remove_model(model, cascade=True) # Remove a model - > migrator.add_fields(model, **fields) # Add fields to a model - > migrator.change_fields(model, **fields) # Change fields - > migrator.remove_fields(model, *field_names, cascade=True) - > migrator.rename_field(model, old_field_name, new_field_name) - > migrator.rename_table(model, new_table_name) - > migrator.add_index(model, *col_names, unique=False) - > migrator.add_not_null(model, *field_names) - > migrator.add_default(model, field_name, default) - > migrator.add_constraint(model, name, sql) - > migrator.drop_index(model, *col_names) - > migrator.drop_not_null(model, *field_names) - > migrator.drop_constraints(model, *constraints) - -""" - -from contextlib import suppress - -import peewee as pw -from peewee_migrate import Migrator - - -with suppress(ImportError): - import playhouse.postgres_ext as pw_pext - - -def migrate(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your migrations here.""" - - # Adding fields created_at and updated_at to the 'user' table - migrator.add_fields( - "user", - created_at=pw.BigIntegerField(null=True), # Allow null for transition - updated_at=pw.BigIntegerField(null=True), # Allow null for transition - last_active_at=pw.BigIntegerField(null=True), # Allow null for transition - ) - - # Populate the new fields from an existing 'timestamp' field - migrator.sql( - 'UPDATE "user" SET created_at = timestamp, updated_at = timestamp, last_active_at = timestamp WHERE timestamp IS NOT NULL' - ) - - # Now that the data has been copied, remove the original 'timestamp' field - migrator.remove_fields("user", "timestamp") - - # Update the fields to be not null now that they are populated - migrator.change_fields( - "user", - created_at=pw.BigIntegerField(null=False), - updated_at=pw.BigIntegerField(null=False), - last_active_at=pw.BigIntegerField(null=False), - ) - - -def rollback(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your rollback migrations here.""" - - # Recreate the timestamp field initially allowing null values for safe transition - migrator.add_fields("user", timestamp=pw.BigIntegerField(null=True)) - - # Copy the earliest created_at date back into the new timestamp field - # This assumes created_at was originally a copy of timestamp - migrator.sql('UPDATE "user" SET timestamp = created_at') - - # Remove the created_at and updated_at fields - migrator.remove_fields("user", "created_at", "updated_at", "last_active_at") - - # Finally, alter the timestamp field to not allow nulls if that was the original setting - migrator.change_fields("user", timestamp=pw.BigIntegerField(null=False)) diff --git a/backend/apps/webui/internal/migrations/008_add_memory.py b/backend/apps/webui/internal/migrations/008_add_memory.py deleted file mode 100644 index 9307aa4d5c..0000000000 --- a/backend/apps/webui/internal/migrations/008_add_memory.py +++ /dev/null @@ -1,53 +0,0 @@ -"""Peewee migrations -- 002_add_local_sharing.py. - -Some examples (model - class or model name):: - - > Model = migrator.orm['table_name'] # Return model in current state by name - > Model = migrator.ModelClass # Return model in current state by name - - > migrator.sql(sql) # Run custom SQL - > migrator.run(func, *args, **kwargs) # Run python function with the given args - > migrator.create_model(Model) # Create a model (could be used as decorator) - > migrator.remove_model(model, cascade=True) # Remove a model - > migrator.add_fields(model, **fields) # Add fields to a model - > migrator.change_fields(model, **fields) # Change fields - > migrator.remove_fields(model, *field_names, cascade=True) - > migrator.rename_field(model, old_field_name, new_field_name) - > migrator.rename_table(model, new_table_name) - > migrator.add_index(model, *col_names, unique=False) - > migrator.add_not_null(model, *field_names) - > migrator.add_default(model, field_name, default) - > migrator.add_constraint(model, name, sql) - > migrator.drop_index(model, *col_names) - > migrator.drop_not_null(model, *field_names) - > migrator.drop_constraints(model, *constraints) - -""" - -from contextlib import suppress - -import peewee as pw -from peewee_migrate import Migrator - - -with suppress(ImportError): - import playhouse.postgres_ext as pw_pext - - -def migrate(migrator: Migrator, database: pw.Database, *, fake=False): - @migrator.create_model - class Memory(pw.Model): - id = pw.CharField(max_length=255, unique=True) - user_id = pw.CharField(max_length=255) - content = pw.TextField(null=False) - updated_at = pw.BigIntegerField(null=False) - created_at = pw.BigIntegerField(null=False) - - class Meta: - table_name = "memory" - - -def rollback(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your rollback migrations here.""" - - migrator.remove_model("memory") diff --git a/backend/apps/webui/internal/migrations/009_add_models.py b/backend/apps/webui/internal/migrations/009_add_models.py deleted file mode 100644 index 548ec7cdca..0000000000 --- a/backend/apps/webui/internal/migrations/009_add_models.py +++ /dev/null @@ -1,61 +0,0 @@ -"""Peewee migrations -- 009_add_models.py. - -Some examples (model - class or model name):: - - > Model = migrator.orm['table_name'] # Return model in current state by name - > Model = migrator.ModelClass # Return model in current state by name - - > migrator.sql(sql) # Run custom SQL - > migrator.run(func, *args, **kwargs) # Run python function with the given args - > migrator.create_model(Model) # Create a model (could be used as decorator) - > migrator.remove_model(model, cascade=True) # Remove a model - > migrator.add_fields(model, **fields) # Add fields to a model - > migrator.change_fields(model, **fields) # Change fields - > migrator.remove_fields(model, *field_names, cascade=True) - > migrator.rename_field(model, old_field_name, new_field_name) - > migrator.rename_table(model, new_table_name) - > migrator.add_index(model, *col_names, unique=False) - > migrator.add_not_null(model, *field_names) - > migrator.add_default(model, field_name, default) - > migrator.add_constraint(model, name, sql) - > migrator.drop_index(model, *col_names) - > migrator.drop_not_null(model, *field_names) - > migrator.drop_constraints(model, *constraints) - -""" - -from contextlib import suppress - -import peewee as pw -from peewee_migrate import Migrator - - -with suppress(ImportError): - import playhouse.postgres_ext as pw_pext - - -def migrate(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your migrations here.""" - - @migrator.create_model - class Model(pw.Model): - id = pw.TextField(unique=True) - user_id = pw.TextField() - base_model_id = pw.TextField(null=True) - - name = pw.TextField() - - meta = pw.TextField() - params = pw.TextField() - - created_at = pw.BigIntegerField(null=False) - updated_at = pw.BigIntegerField(null=False) - - class Meta: - table_name = "model" - - -def rollback(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your rollback migrations here.""" - - migrator.remove_model("model") diff --git a/backend/apps/webui/internal/migrations/010_migrate_modelfiles_to_models.py b/backend/apps/webui/internal/migrations/010_migrate_modelfiles_to_models.py deleted file mode 100644 index 2ef814c06b..0000000000 --- a/backend/apps/webui/internal/migrations/010_migrate_modelfiles_to_models.py +++ /dev/null @@ -1,130 +0,0 @@ -"""Peewee migrations -- 009_add_models.py. - -Some examples (model - class or model name):: - - > Model = migrator.orm['table_name'] # Return model in current state by name - > Model = migrator.ModelClass # Return model in current state by name - - > migrator.sql(sql) # Run custom SQL - > migrator.run(func, *args, **kwargs) # Run python function with the given args - > migrator.create_model(Model) # Create a model (could be used as decorator) - > migrator.remove_model(model, cascade=True) # Remove a model - > migrator.add_fields(model, **fields) # Add fields to a model - > migrator.change_fields(model, **fields) # Change fields - > migrator.remove_fields(model, *field_names, cascade=True) - > migrator.rename_field(model, old_field_name, new_field_name) - > migrator.rename_table(model, new_table_name) - > migrator.add_index(model, *col_names, unique=False) - > migrator.add_not_null(model, *field_names) - > migrator.add_default(model, field_name, default) - > migrator.add_constraint(model, name, sql) - > migrator.drop_index(model, *col_names) - > migrator.drop_not_null(model, *field_names) - > migrator.drop_constraints(model, *constraints) - -""" - -from contextlib import suppress - -import peewee as pw -from peewee_migrate import Migrator -import json - -from utils.misc import parse_ollama_modelfile - -with suppress(ImportError): - import playhouse.postgres_ext as pw_pext - - -def migrate(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your migrations here.""" - - # Fetch data from 'modelfile' table and insert into 'model' table - migrate_modelfile_to_model(migrator, database) - # Drop the 'modelfile' table - migrator.remove_model("modelfile") - - -def migrate_modelfile_to_model(migrator: Migrator, database: pw.Database): - ModelFile = migrator.orm["modelfile"] - Model = migrator.orm["model"] - - modelfiles = ModelFile.select() - - for modelfile in modelfiles: - # Extract and transform data in Python - - modelfile.modelfile = json.loads(modelfile.modelfile) - meta = json.dumps( - { - "description": modelfile.modelfile.get("desc"), - "profile_image_url": modelfile.modelfile.get("imageUrl"), - "ollama": {"modelfile": modelfile.modelfile.get("content")}, - "suggestion_prompts": modelfile.modelfile.get("suggestionPrompts"), - "categories": modelfile.modelfile.get("categories"), - "user": {**modelfile.modelfile.get("user", {}), "community": True}, - } - ) - - info = parse_ollama_modelfile(modelfile.modelfile.get("content")) - - # Insert the processed data into the 'model' table - Model.create( - id=f"ollama-{modelfile.tag_name}", - user_id=modelfile.user_id, - base_model_id=info.get("base_model_id"), - name=modelfile.modelfile.get("title"), - meta=meta, - params=json.dumps(info.get("params", {})), - created_at=modelfile.timestamp, - updated_at=modelfile.timestamp, - ) - - -def rollback(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your rollback migrations here.""" - - recreate_modelfile_table(migrator, database) - move_data_back_to_modelfile(migrator, database) - migrator.remove_model("model") - - -def recreate_modelfile_table(migrator: Migrator, database: pw.Database): - query = """ - CREATE TABLE IF NOT EXISTS modelfile ( - user_id TEXT, - tag_name TEXT, - modelfile JSON, - timestamp BIGINT - ) - """ - migrator.sql(query) - - -def move_data_back_to_modelfile(migrator: Migrator, database: pw.Database): - Model = migrator.orm["model"] - Modelfile = migrator.orm["modelfile"] - - models = Model.select() - - for model in models: - # Extract and transform data in Python - meta = json.loads(model.meta) - - modelfile_data = { - "title": model.name, - "desc": meta.get("description"), - "imageUrl": meta.get("profile_image_url"), - "content": meta.get("ollama", {}).get("modelfile"), - "suggestionPrompts": meta.get("suggestion_prompts"), - "categories": meta.get("categories"), - "user": {k: v for k, v in meta.get("user", {}).items() if k != "community"}, - } - - # Insert the processed data back into the 'modelfile' table - Modelfile.create( - user_id=model.user_id, - tag_name=model.id, - modelfile=modelfile_data, - timestamp=model.created_at, - ) diff --git a/backend/apps/webui/internal/migrations/011_add_user_settings.py b/backend/apps/webui/internal/migrations/011_add_user_settings.py deleted file mode 100644 index a1620dcada..0000000000 --- a/backend/apps/webui/internal/migrations/011_add_user_settings.py +++ /dev/null @@ -1,48 +0,0 @@ -"""Peewee migrations -- 002_add_local_sharing.py. - -Some examples (model - class or model name):: - - > Model = migrator.orm['table_name'] # Return model in current state by name - > Model = migrator.ModelClass # Return model in current state by name - - > migrator.sql(sql) # Run custom SQL - > migrator.run(func, *args, **kwargs) # Run python function with the given args - > migrator.create_model(Model) # Create a model (could be used as decorator) - > migrator.remove_model(model, cascade=True) # Remove a model - > migrator.add_fields(model, **fields) # Add fields to a model - > migrator.change_fields(model, **fields) # Change fields - > migrator.remove_fields(model, *field_names, cascade=True) - > migrator.rename_field(model, old_field_name, new_field_name) - > migrator.rename_table(model, new_table_name) - > migrator.add_index(model, *col_names, unique=False) - > migrator.add_not_null(model, *field_names) - > migrator.add_default(model, field_name, default) - > migrator.add_constraint(model, name, sql) - > migrator.drop_index(model, *col_names) - > migrator.drop_not_null(model, *field_names) - > migrator.drop_constraints(model, *constraints) - -""" - -from contextlib import suppress - -import peewee as pw -from peewee_migrate import Migrator - - -with suppress(ImportError): - import playhouse.postgres_ext as pw_pext - - -def migrate(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your migrations here.""" - - # Adding fields settings to the 'user' table - migrator.add_fields("user", settings=pw.TextField(null=True)) - - -def rollback(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your rollback migrations here.""" - - # Remove the settings field - migrator.remove_fields("user", "settings") diff --git a/backend/apps/webui/internal/migrations/012_add_tools.py b/backend/apps/webui/internal/migrations/012_add_tools.py deleted file mode 100644 index 4a68eea552..0000000000 --- a/backend/apps/webui/internal/migrations/012_add_tools.py +++ /dev/null @@ -1,61 +0,0 @@ -"""Peewee migrations -- 009_add_models.py. - -Some examples (model - class or model name):: - - > Model = migrator.orm['table_name'] # Return model in current state by name - > Model = migrator.ModelClass # Return model in current state by name - - > migrator.sql(sql) # Run custom SQL - > migrator.run(func, *args, **kwargs) # Run python function with the given args - > migrator.create_model(Model) # Create a model (could be used as decorator) - > migrator.remove_model(model, cascade=True) # Remove a model - > migrator.add_fields(model, **fields) # Add fields to a model - > migrator.change_fields(model, **fields) # Change fields - > migrator.remove_fields(model, *field_names, cascade=True) - > migrator.rename_field(model, old_field_name, new_field_name) - > migrator.rename_table(model, new_table_name) - > migrator.add_index(model, *col_names, unique=False) - > migrator.add_not_null(model, *field_names) - > migrator.add_default(model, field_name, default) - > migrator.add_constraint(model, name, sql) - > migrator.drop_index(model, *col_names) - > migrator.drop_not_null(model, *field_names) - > migrator.drop_constraints(model, *constraints) - -""" - -from contextlib import suppress - -import peewee as pw -from peewee_migrate import Migrator - - -with suppress(ImportError): - import playhouse.postgres_ext as pw_pext - - -def migrate(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your migrations here.""" - - @migrator.create_model - class Tool(pw.Model): - id = pw.TextField(unique=True) - user_id = pw.TextField() - - name = pw.TextField() - content = pw.TextField() - specs = pw.TextField() - - meta = pw.TextField() - - created_at = pw.BigIntegerField(null=False) - updated_at = pw.BigIntegerField(null=False) - - class Meta: - table_name = "tool" - - -def rollback(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your rollback migrations here.""" - - migrator.remove_model("tool") diff --git a/backend/apps/webui/internal/migrations/013_add_user_info.py b/backend/apps/webui/internal/migrations/013_add_user_info.py deleted file mode 100644 index 0f68669cca..0000000000 --- a/backend/apps/webui/internal/migrations/013_add_user_info.py +++ /dev/null @@ -1,48 +0,0 @@ -"""Peewee migrations -- 002_add_local_sharing.py. - -Some examples (model - class or model name):: - - > Model = migrator.orm['table_name'] # Return model in current state by name - > Model = migrator.ModelClass # Return model in current state by name - - > migrator.sql(sql) # Run custom SQL - > migrator.run(func, *args, **kwargs) # Run python function with the given args - > migrator.create_model(Model) # Create a model (could be used as decorator) - > migrator.remove_model(model, cascade=True) # Remove a model - > migrator.add_fields(model, **fields) # Add fields to a model - > migrator.change_fields(model, **fields) # Change fields - > migrator.remove_fields(model, *field_names, cascade=True) - > migrator.rename_field(model, old_field_name, new_field_name) - > migrator.rename_table(model, new_table_name) - > migrator.add_index(model, *col_names, unique=False) - > migrator.add_not_null(model, *field_names) - > migrator.add_default(model, field_name, default) - > migrator.add_constraint(model, name, sql) - > migrator.drop_index(model, *col_names) - > migrator.drop_not_null(model, *field_names) - > migrator.drop_constraints(model, *constraints) - -""" - -from contextlib import suppress - -import peewee as pw -from peewee_migrate import Migrator - - -with suppress(ImportError): - import playhouse.postgres_ext as pw_pext - - -def migrate(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your migrations here.""" - - # Adding fields info to the 'user' table - migrator.add_fields("user", info=pw.TextField(null=True)) - - -def rollback(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your rollback migrations here.""" - - # Remove the settings field - migrator.remove_fields("user", "info") diff --git a/backend/apps/webui/internal/migrations/014_add_files.py b/backend/apps/webui/internal/migrations/014_add_files.py deleted file mode 100644 index 5e1acf0ad8..0000000000 --- a/backend/apps/webui/internal/migrations/014_add_files.py +++ /dev/null @@ -1,55 +0,0 @@ -"""Peewee migrations -- 009_add_models.py. - -Some examples (model - class or model name):: - - > Model = migrator.orm['table_name'] # Return model in current state by name - > Model = migrator.ModelClass # Return model in current state by name - - > migrator.sql(sql) # Run custom SQL - > migrator.run(func, *args, **kwargs) # Run python function with the given args - > migrator.create_model(Model) # Create a model (could be used as decorator) - > migrator.remove_model(model, cascade=True) # Remove a model - > migrator.add_fields(model, **fields) # Add fields to a model - > migrator.change_fields(model, **fields) # Change fields - > migrator.remove_fields(model, *field_names, cascade=True) - > migrator.rename_field(model, old_field_name, new_field_name) - > migrator.rename_table(model, new_table_name) - > migrator.add_index(model, *col_names, unique=False) - > migrator.add_not_null(model, *field_names) - > migrator.add_default(model, field_name, default) - > migrator.add_constraint(model, name, sql) - > migrator.drop_index(model, *col_names) - > migrator.drop_not_null(model, *field_names) - > migrator.drop_constraints(model, *constraints) - -""" - -from contextlib import suppress - -import peewee as pw -from peewee_migrate import Migrator - - -with suppress(ImportError): - import playhouse.postgres_ext as pw_pext - - -def migrate(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your migrations here.""" - - @migrator.create_model - class File(pw.Model): - id = pw.TextField(unique=True) - user_id = pw.TextField() - filename = pw.TextField() - meta = pw.TextField() - created_at = pw.BigIntegerField(null=False) - - class Meta: - table_name = "file" - - -def rollback(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your rollback migrations here.""" - - migrator.remove_model("file") diff --git a/backend/apps/webui/internal/migrations/015_add_functions.py b/backend/apps/webui/internal/migrations/015_add_functions.py deleted file mode 100644 index 8316a9333b..0000000000 --- a/backend/apps/webui/internal/migrations/015_add_functions.py +++ /dev/null @@ -1,61 +0,0 @@ -"""Peewee migrations -- 009_add_models.py. - -Some examples (model - class or model name):: - - > Model = migrator.orm['table_name'] # Return model in current state by name - > Model = migrator.ModelClass # Return model in current state by name - - > migrator.sql(sql) # Run custom SQL - > migrator.run(func, *args, **kwargs) # Run python function with the given args - > migrator.create_model(Model) # Create a model (could be used as decorator) - > migrator.remove_model(model, cascade=True) # Remove a model - > migrator.add_fields(model, **fields) # Add fields to a model - > migrator.change_fields(model, **fields) # Change fields - > migrator.remove_fields(model, *field_names, cascade=True) - > migrator.rename_field(model, old_field_name, new_field_name) - > migrator.rename_table(model, new_table_name) - > migrator.add_index(model, *col_names, unique=False) - > migrator.add_not_null(model, *field_names) - > migrator.add_default(model, field_name, default) - > migrator.add_constraint(model, name, sql) - > migrator.drop_index(model, *col_names) - > migrator.drop_not_null(model, *field_names) - > migrator.drop_constraints(model, *constraints) - -""" - -from contextlib import suppress - -import peewee as pw -from peewee_migrate import Migrator - - -with suppress(ImportError): - import playhouse.postgres_ext as pw_pext - - -def migrate(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your migrations here.""" - - @migrator.create_model - class Function(pw.Model): - id = pw.TextField(unique=True) - user_id = pw.TextField() - - name = pw.TextField() - type = pw.TextField() - - content = pw.TextField() - meta = pw.TextField() - - created_at = pw.BigIntegerField(null=False) - updated_at = pw.BigIntegerField(null=False) - - class Meta: - table_name = "function" - - -def rollback(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your rollback migrations here.""" - - migrator.remove_model("function") diff --git a/backend/apps/webui/internal/migrations/016_add_valves_and_is_active.py b/backend/apps/webui/internal/migrations/016_add_valves_and_is_active.py deleted file mode 100644 index e3af521b7e..0000000000 --- a/backend/apps/webui/internal/migrations/016_add_valves_and_is_active.py +++ /dev/null @@ -1,50 +0,0 @@ -"""Peewee migrations -- 009_add_models.py. - -Some examples (model - class or model name):: - - > Model = migrator.orm['table_name'] # Return model in current state by name - > Model = migrator.ModelClass # Return model in current state by name - - > migrator.sql(sql) # Run custom SQL - > migrator.run(func, *args, **kwargs) # Run python function with the given args - > migrator.create_model(Model) # Create a model (could be used as decorator) - > migrator.remove_model(model, cascade=True) # Remove a model - > migrator.add_fields(model, **fields) # Add fields to a model - > migrator.change_fields(model, **fields) # Change fields - > migrator.remove_fields(model, *field_names, cascade=True) - > migrator.rename_field(model, old_field_name, new_field_name) - > migrator.rename_table(model, new_table_name) - > migrator.add_index(model, *col_names, unique=False) - > migrator.add_not_null(model, *field_names) - > migrator.add_default(model, field_name, default) - > migrator.add_constraint(model, name, sql) - > migrator.drop_index(model, *col_names) - > migrator.drop_not_null(model, *field_names) - > migrator.drop_constraints(model, *constraints) - -""" - -from contextlib import suppress - -import peewee as pw -from peewee_migrate import Migrator - - -with suppress(ImportError): - import playhouse.postgres_ext as pw_pext - - -def migrate(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your migrations here.""" - - migrator.add_fields("tool", valves=pw.TextField(null=True)) - migrator.add_fields("function", valves=pw.TextField(null=True)) - migrator.add_fields("function", is_active=pw.BooleanField(default=False)) - - -def rollback(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your rollback migrations here.""" - - migrator.remove_fields("tool", "valves") - migrator.remove_fields("function", "valves") - migrator.remove_fields("function", "is_active") diff --git a/backend/apps/webui/internal/migrations/017_add_user_oauth_sub.py b/backend/apps/webui/internal/migrations/017_add_user_oauth_sub.py deleted file mode 100644 index fd1d9b5606..0000000000 --- a/backend/apps/webui/internal/migrations/017_add_user_oauth_sub.py +++ /dev/null @@ -1,49 +0,0 @@ -"""Peewee migrations -- 017_add_user_oauth_sub.py. - -Some examples (model - class or model name):: - - > Model = migrator.orm['table_name'] # Return model in current state by name - > Model = migrator.ModelClass # Return model in current state by name - - > migrator.sql(sql) # Run custom SQL - > migrator.run(func, *args, **kwargs) # Run python function with the given args - > migrator.create_model(Model) # Create a model (could be used as decorator) - > migrator.remove_model(model, cascade=True) # Remove a model - > migrator.add_fields(model, **fields) # Add fields to a model - > migrator.change_fields(model, **fields) # Change fields - > migrator.remove_fields(model, *field_names, cascade=True) - > migrator.rename_field(model, old_field_name, new_field_name) - > migrator.rename_table(model, new_table_name) - > migrator.add_index(model, *col_names, unique=False) - > migrator.add_not_null(model, *field_names) - > migrator.add_default(model, field_name, default) - > migrator.add_constraint(model, name, sql) - > migrator.drop_index(model, *col_names) - > migrator.drop_not_null(model, *field_names) - > migrator.drop_constraints(model, *constraints) - -""" - -from contextlib import suppress - -import peewee as pw -from peewee_migrate import Migrator - - -with suppress(ImportError): - import playhouse.postgres_ext as pw_pext - - -def migrate(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your migrations here.""" - - migrator.add_fields( - "user", - oauth_sub=pw.TextField(null=True, unique=True), - ) - - -def rollback(migrator: Migrator, database: pw.Database, *, fake=False): - """Write your rollback migrations here.""" - - migrator.remove_fields("user", "oauth_sub") diff --git a/backend/apps/webui/internal/migrations/README.md b/backend/apps/webui/internal/migrations/README.md deleted file mode 100644 index 2602141137..0000000000 --- a/backend/apps/webui/internal/migrations/README.md +++ /dev/null @@ -1,21 +0,0 @@ -# Database Migrations - -This directory contains all the database migrations for the web app. -Migrations are done using the [`peewee-migrate`](https://github.com/klen/peewee_migrate) library. - -Migrations are automatically ran at app startup. - -## Creating a migration - -Have you made a change to the schema of an existing model? -You will need to create a migration file to ensure that existing databases are updated for backwards compatibility. - -1. Have a database file (`webui.db`) that has the old schema prior to any of your changes. -2. Make your changes to the models. -3. From the `backend` directory, run the following command: - ```bash - pw_migrate create --auto --auto-source apps.webui.models --database sqlite:///${SQLITE_DB} --directory apps/web/internal/migrations ${MIGRATION_NAME} - ``` - - `$SQLITE_DB` should be the path to the database file. - - `$MIGRATION_NAME` should be a descriptive name for the migration. -4. The migration file will be created in the `apps/web/internal/migrations` directory. diff --git a/backend/apps/webui/internal/wrappers.py b/backend/apps/webui/internal/wrappers.py deleted file mode 100644 index 2b5551ce2b..0000000000 --- a/backend/apps/webui/internal/wrappers.py +++ /dev/null @@ -1,72 +0,0 @@ -from contextvars import ContextVar -from peewee import * -from peewee import PostgresqlDatabase, InterfaceError as PeeWeeInterfaceError - -import logging -from playhouse.db_url import connect, parse -from playhouse.shortcuts import ReconnectMixin - -from config import SRC_LOG_LEVELS - -log = logging.getLogger(__name__) -log.setLevel(SRC_LOG_LEVELS["DB"]) - -db_state_default = {"closed": None, "conn": None, "ctx": None, "transactions": None} -db_state = ContextVar("db_state", default=db_state_default.copy()) - - -class PeeweeConnectionState(object): - def __init__(self, **kwargs): - super().__setattr__("_state", db_state) - super().__init__(**kwargs) - - def __setattr__(self, name, value): - self._state.get()[name] = value - - def __getattr__(self, name): - value = self._state.get()[name] - return value - - -class CustomReconnectMixin(ReconnectMixin): - reconnect_errors = ( - # psycopg2 - (OperationalError, "termin"), - (InterfaceError, "closed"), - # peewee - (PeeWeeInterfaceError, "closed"), - ) - - -class ReconnectingPostgresqlDatabase(CustomReconnectMixin, PostgresqlDatabase): - pass - - -def register_connection(db_url): - db = connect(db_url) - if isinstance(db, PostgresqlDatabase): - # Enable autoconnect for SQLite databases, managed by Peewee - db.autoconnect = True - db.reuse_if_open = True - log.info("Connected to PostgreSQL database") - - # Get the connection details - connection = parse(db_url) - - # Use our custom database class that supports reconnection - db = ReconnectingPostgresqlDatabase( - connection["database"], - user=connection["user"], - password=connection["password"], - host=connection["host"], - port=connection["port"], - ) - db.connect(reuse_if_open=True) - elif isinstance(db, SqliteDatabase): - # Enable autoconnect for SQLite databases, managed by Peewee - db.autoconnect = True - db.reuse_if_open = True - log.info("Connected to SQLite database") - else: - raise ValueError("Unsupported database connection") - return db diff --git a/backend/apps/webui/main.py b/backend/apps/webui/main.py index b23ac782bb..629055ec36 100644 --- a/backend/apps/webui/main.py +++ b/backend/apps/webui/main.py @@ -3,7 +3,7 @@ from fastapi.routing import APIRoute from fastapi.responses import StreamingResponse from fastapi.middleware.cors import CORSMiddleware from starlette.middleware.sessions import SessionMiddleware - +from sqlalchemy.orm import Session from apps.webui.routers import ( auths, users, diff --git a/backend/apps/webui/models/auths.py b/backend/apps/webui/models/auths.py index 9ea38abcb1..560d9a6860 100644 --- a/backend/apps/webui/models/auths.py +++ b/backend/apps/webui/models/auths.py @@ -1,14 +1,13 @@ from pydantic import BaseModel -from typing import List, Union, Optional -import time +from typing import Optional import uuid import logging -from peewee import * +from sqlalchemy import String, Column, Boolean, Text from apps.webui.models.users import UserModel, Users from utils.utils import verify_password -from apps.webui.internal.db import DB +from apps.webui.internal.db import Base, Session from config import SRC_LOG_LEVELS @@ -20,14 +19,13 @@ log.setLevel(SRC_LOG_LEVELS["MODELS"]) #################### -class Auth(Model): - id = CharField(unique=True) - email = CharField() - password = TextField() - active = BooleanField() +class Auth(Base): + __tablename__ = "auth" - class Meta: - database = DB + id = Column(String, primary_key=True) + email = Column(String) + password = Column(Text) + active = Column(Boolean) class AuthModel(BaseModel): @@ -94,9 +92,6 @@ class AddUserForm(SignupForm): class AuthsTable: - def __init__(self, db): - self.db = db - self.db.create_tables([Auth]) def insert_new_auth( self, @@ -114,12 +109,16 @@ class AuthsTable: auth = AuthModel( **{"id": id, "email": email, "password": password, "active": True} ) - result = Auth.create(**auth.model_dump()) + result = Auth(**auth.model_dump()) + Session.add(result) user = Users.insert_new_user( id, name, email, profile_image_url, role, oauth_sub ) + Session.commit() + Session.refresh(result) + if result and user: return user else: @@ -128,7 +127,7 @@ class AuthsTable: def authenticate_user(self, email: str, password: str) -> Optional[UserModel]: log.info(f"authenticate_user: {email}") try: - auth = Auth.get(Auth.email == email, Auth.active == True) + auth = Session.query(Auth).filter_by(email=email, active=True).first() if auth: if verify_password(password, auth.password): user = Users.get_user_by_id(auth.id) @@ -155,7 +154,7 @@ class AuthsTable: def authenticate_user_by_trusted_header(self, email: str) -> Optional[UserModel]: log.info(f"authenticate_user_by_trusted_header: {email}") try: - auth = Auth.get(Auth.email == email, Auth.active == True) + auth = Session.query(Auth).filter(email=email, active=True).first() if auth: user = Users.get_user_by_id(auth.id) return user @@ -164,18 +163,16 @@ class AuthsTable: def update_user_password_by_id(self, id: str, new_password: str) -> bool: try: - query = Auth.update(password=new_password).where(Auth.id == id) - result = query.execute() - + result = ( + Session.query(Auth).filter_by(id=id).update({"password": new_password}) + ) return True if result == 1 else False except: return False def update_email_by_id(self, id: str, email: str) -> bool: try: - query = Auth.update(email=email).where(Auth.id == id) - result = query.execute() - + result = Session.query(Auth).filter_by(id=id).update({"email": email}) return True if result == 1 else False except: return False @@ -186,9 +183,7 @@ class AuthsTable: result = Users.delete_user_by_id(id) if result: - # Delete Auth - query = Auth.delete().where(Auth.id == id) - query.execute() # Remove the rows, return number of rows removed. + Session.query(Auth).filter_by(id=id).delete() return True else: @@ -197,4 +192,4 @@ class AuthsTable: return False -Auths = AuthsTable(DB) +Auths = AuthsTable() diff --git a/backend/apps/webui/models/chats.py b/backend/apps/webui/models/chats.py index a6f1ae9233..d6829ee7ba 100644 --- a/backend/apps/webui/models/chats.py +++ b/backend/apps/webui/models/chats.py @@ -1,36 +1,38 @@ -from pydantic import BaseModel +from pydantic import BaseModel, ConfigDict from typing import List, Union, Optional -from peewee import * -from playhouse.shortcuts import model_to_dict import json import uuid import time -from apps.webui.internal.db import DB +from sqlalchemy import Column, String, BigInteger, Boolean, Text + +from apps.webui.internal.db import Base, Session + #################### # Chat DB Schema #################### -class Chat(Model): - id = CharField(unique=True) - user_id = CharField() - title = TextField() - chat = TextField() # Save Chat JSON as Text +class Chat(Base): + __tablename__ = "chat" - created_at = BigIntegerField() - updated_at = BigIntegerField() + id = Column(String, primary_key=True) + user_id = Column(String) + title = Column(Text) + chat = Column(Text) # Save Chat JSON as Text - share_id = CharField(null=True, unique=True) - archived = BooleanField(default=False) + created_at = Column(BigInteger) + updated_at = Column(BigInteger) - class Meta: - database = DB + share_id = Column(Text, unique=True, nullable=True) + archived = Column(Boolean, default=False) class ChatModel(BaseModel): + model_config = ConfigDict(from_attributes=True) + id: str user_id: str title: str @@ -75,9 +77,6 @@ class ChatTitleIdResponse(BaseModel): class ChatTable: - def __init__(self, db): - self.db = db - db.create_tables([Chat]) def insert_new_chat(self, user_id: str, form_data: ChatForm) -> Optional[ChatModel]: id = str(uuid.uuid4()) @@ -94,26 +93,28 @@ class ChatTable: } ) - result = Chat.create(**chat.model_dump()) - return chat if result else None + result = Chat(**chat.model_dump()) + Session.add(result) + Session.commit() + Session.refresh(result) + return ChatModel.model_validate(result) if result else None def update_chat_by_id(self, id: str, chat: dict) -> Optional[ChatModel]: try: - query = Chat.update( - chat=json.dumps(chat), - title=chat["title"] if "title" in chat else "New Chat", - updated_at=int(time.time()), - ).where(Chat.id == id) - query.execute() + chat_obj = Session.get(Chat, id) + chat_obj.chat = json.dumps(chat) + chat_obj.title = chat["title"] if "title" in chat else "New Chat" + chat_obj.updated_at = int(time.time()) + Session.commit() + Session.refresh(chat_obj) - chat = Chat.get(Chat.id == id) - return ChatModel(**model_to_dict(chat)) - except: + return ChatModel.model_validate(chat_obj) + except Exception as e: return None def insert_shared_chat_by_chat_id(self, chat_id: str) -> Optional[ChatModel]: # Get the existing chat to share - chat = Chat.get(Chat.id == chat_id) + chat = Session.get(Chat, chat_id) # Check if the chat is already shared if chat.share_id: return self.get_chat_by_id_and_user_id(chat.share_id, "shared") @@ -128,10 +129,15 @@ class ChatTable: "updated_at": int(time.time()), } ) - shared_result = Chat.create(**shared_chat.model_dump()) + shared_result = Chat(**shared_chat.model_dump()) + Session.add(shared_result) + Session.commit() + Session.refresh(shared_result) # Update the original chat with the share_id result = ( - Chat.update(share_id=shared_chat.id).where(Chat.id == chat_id).execute() + Session.query(Chat) + .filter_by(id=chat_id) + .update({"share_id": shared_chat.id}) ) return shared_chat if (shared_result and result) else None @@ -139,26 +145,20 @@ class ChatTable: def update_shared_chat_by_chat_id(self, chat_id: str) -> Optional[ChatModel]: try: print("update_shared_chat_by_id") - chat = Chat.get(Chat.id == chat_id) + chat = Session.get(Chat, chat_id) print(chat) + chat.title = chat.title + chat.chat = chat.chat + Session.commit() + Session.refresh(chat) - query = Chat.update( - title=chat.title, - chat=chat.chat, - ).where(Chat.id == chat.share_id) - - query.execute() - - chat = Chat.get(Chat.id == chat.share_id) - return ChatModel(**model_to_dict(chat)) + return self.get_chat_by_id(chat.share_id) except: return None def delete_shared_chat_by_chat_id(self, chat_id: str) -> bool: try: - query = Chat.delete().where(Chat.user_id == f"shared-{chat_id}") - query.execute() # Remove the rows, return number of rows removed. - + Session.query(Chat).filter_by(user_id=f"shared-{chat_id}").delete() return True except: return False @@ -167,40 +167,27 @@ class ChatTable: self, id: str, share_id: Optional[str] ) -> Optional[ChatModel]: try: - query = Chat.update( - share_id=share_id, - ).where(Chat.id == id) - query.execute() - - chat = Chat.get(Chat.id == id) - return ChatModel(**model_to_dict(chat)) + chat = Session.get(Chat, id) + chat.share_id = share_id + Session.commit() + Session.refresh(chat) + return ChatModel.model_validate(chat) except: return None def toggle_chat_archive_by_id(self, id: str) -> Optional[ChatModel]: try: - chat = self.get_chat_by_id(id) - query = Chat.update( - archived=(not chat.archived), - ).where(Chat.id == id) - - query.execute() - - chat = Chat.get(Chat.id == id) - return ChatModel(**model_to_dict(chat)) + chat = Session.get(Chat, id) + chat.archived = not chat.archived + Session.commit() + Session.refresh(chat) + return ChatModel.model_validate(chat) except: return None def archive_all_chats_by_user_id(self, user_id: str) -> bool: try: - chats = self.get_chats_by_user_id(user_id) - for chat in chats: - query = Chat.update( - archived=True, - ).where(Chat.id == chat.id) - - query.execute() - + Session.query(Chat).filter_by(user_id=user_id).update({"archived": True}) return True except: return False @@ -208,15 +195,14 @@ class ChatTable: def get_archived_chat_list_by_user_id( self, user_id: str, skip: int = 0, limit: int = 50 ) -> List[ChatModel]: - return [ - ChatModel(**model_to_dict(chat)) - for chat in Chat.select() - .where(Chat.archived == True) - .where(Chat.user_id == user_id) + all_chats = ( + Session.query(Chat) + .filter_by(user_id=user_id, archived=True) .order_by(Chat.updated_at.desc()) - # .limit(limit) - # .offset(skip) - ] + # .limit(limit).offset(skip) + .all() + ) + return [ChatModel.model_validate(chat) for chat in all_chats] def get_chat_list_by_user_id( self, @@ -225,92 +211,80 @@ class ChatTable: skip: int = 0, limit: int = 50, ) -> List[ChatModel]: - if include_archived: - return [ - ChatModel(**model_to_dict(chat)) - for chat in Chat.select() - .where(Chat.user_id == user_id) - .order_by(Chat.updated_at.desc()) - # .limit(limit) - # .offset(skip) - ] - else: - return [ - ChatModel(**model_to_dict(chat)) - for chat in Chat.select() - .where(Chat.archived == False) - .where(Chat.user_id == user_id) - .order_by(Chat.updated_at.desc()) - # .limit(limit) - # .offset(skip) - ] + query = Session.query(Chat).filter_by(user_id=user_id) + if not include_archived: + query = query.filter_by(archived=False) + all_chats = ( + query.order_by(Chat.updated_at.desc()) + # .limit(limit).offset(skip) + .all() + ) + return [ChatModel.model_validate(chat) for chat in all_chats] def get_chat_list_by_chat_ids( self, chat_ids: List[str], skip: int = 0, limit: int = 50 ) -> List[ChatModel]: - return [ - ChatModel(**model_to_dict(chat)) - for chat in Chat.select() - .where(Chat.archived == False) - .where(Chat.id.in_(chat_ids)) + all_chats = ( + Session.query(Chat) + .filter(Chat.id.in_(chat_ids)) + .filter_by(archived=False) .order_by(Chat.updated_at.desc()) - ] + .all() + ) + return [ChatModel.model_validate(chat) for chat in all_chats] def get_chat_by_id(self, id: str) -> Optional[ChatModel]: try: - chat = Chat.get(Chat.id == id) - return ChatModel(**model_to_dict(chat)) + chat = Session.get(Chat, id) + return ChatModel.model_validate(chat) except: return None def get_chat_by_share_id(self, id: str) -> Optional[ChatModel]: try: - chat = Chat.get(Chat.share_id == id) + chat = Session.query(Chat).filter_by(share_id=id).first() if chat: - chat = Chat.get(Chat.id == id) - return ChatModel(**model_to_dict(chat)) + return self.get_chat_by_id(id) else: return None - except: + except Exception as e: return None def get_chat_by_id_and_user_id(self, id: str, user_id: str) -> Optional[ChatModel]: try: - chat = Chat.get(Chat.id == id, Chat.user_id == user_id) - return ChatModel(**model_to_dict(chat)) + chat = Session.query(Chat).filter_by(id=id, user_id=user_id).first() + return ChatModel.model_validate(chat) except: return None def get_chats(self, skip: int = 0, limit: int = 50) -> List[ChatModel]: - return [ - ChatModel(**model_to_dict(chat)) - for chat in Chat.select().order_by(Chat.updated_at.desc()) + all_chats = ( + Session.query(Chat) # .limit(limit).offset(skip) - ] + .order_by(Chat.updated_at.desc()) + ) + return [ChatModel.model_validate(chat) for chat in all_chats] def get_chats_by_user_id(self, user_id: str) -> List[ChatModel]: - return [ - ChatModel(**model_to_dict(chat)) - for chat in Chat.select() - .where(Chat.user_id == user_id) + all_chats = ( + Session.query(Chat) + .filter_by(user_id=user_id) .order_by(Chat.updated_at.desc()) - # .limit(limit).offset(skip) - ] + ) + return [ChatModel.model_validate(chat) for chat in all_chats] def get_archived_chats_by_user_id(self, user_id: str) -> List[ChatModel]: - return [ - ChatModel(**model_to_dict(chat)) - for chat in Chat.select() - .where(Chat.archived == True) - .where(Chat.user_id == user_id) + all_chats = ( + Session.query(Chat) + .filter_by(user_id=user_id, archived=True) .order_by(Chat.updated_at.desc()) - ] + ) + return [ChatModel.model_validate(chat) for chat in all_chats] def delete_chat_by_id(self, id: str) -> bool: try: - query = Chat.delete().where((Chat.id == id)) - query.execute() # Remove the rows, return number of rows removed. + Session.query(Chat).filter_by(id=id).delete() return True and self.delete_shared_chat_by_chat_id(id) except: @@ -318,8 +292,7 @@ class ChatTable: def delete_chat_by_id_and_user_id(self, id: str, user_id: str) -> bool: try: - query = Chat.delete().where((Chat.id == id) & (Chat.user_id == user_id)) - query.execute() # Remove the rows, return number of rows removed. + Session.query(Chat).filter_by(id=id, user_id=user_id).delete() return True and self.delete_shared_chat_by_chat_id(id) except: @@ -327,29 +300,23 @@ class ChatTable: def delete_chats_by_user_id(self, user_id: str) -> bool: try: - self.delete_shared_chats_by_user_id(user_id) - query = Chat.delete().where(Chat.user_id == user_id) - query.execute() # Remove the rows, return number of rows removed. - + Session.query(Chat).filter_by(user_id=user_id).delete() return True except: return False def delete_shared_chats_by_user_id(self, user_id: str) -> bool: try: - shared_chat_ids = [ - f"shared-{chat.id}" - for chat in Chat.select().where(Chat.user_id == user_id) - ] + chats_by_user = Session.query(Chat).filter_by(user_id=user_id).all() + shared_chat_ids = [f"shared-{chat.id}" for chat in chats_by_user] - query = Chat.delete().where(Chat.user_id << shared_chat_ids) - query.execute() # Remove the rows, return number of rows removed. + Session.query(Chat).filter(Chat.user_id.in_(shared_chat_ids)).delete() return True except: return False -Chats = ChatTable(DB) +Chats = ChatTable() diff --git a/backend/apps/webui/models/documents.py b/backend/apps/webui/models/documents.py index 3b730535fb..1b69d44a56 100644 --- a/backend/apps/webui/models/documents.py +++ b/backend/apps/webui/models/documents.py @@ -1,14 +1,11 @@ -from pydantic import BaseModel -from peewee import * -from playhouse.shortcuts import model_to_dict -from typing import List, Union, Optional +from pydantic import BaseModel, ConfigDict +from typing import List, Optional import time import logging -from utils.utils import decode_token -from utils.misc import get_gravatar_url +from sqlalchemy import String, Column, BigInteger, Text -from apps.webui.internal.db import DB +from apps.webui.internal.db import Base, Session import json @@ -22,20 +19,21 @@ log.setLevel(SRC_LOG_LEVELS["MODELS"]) #################### -class Document(Model): - collection_name = CharField(unique=True) - name = CharField(unique=True) - title = TextField() - filename = TextField() - content = TextField(null=True) - user_id = CharField() - timestamp = BigIntegerField() +class Document(Base): + __tablename__ = "document" - class Meta: - database = DB + collection_name = Column(String, primary_key=True) + name = Column(String, unique=True) + title = Column(Text) + filename = Column(Text) + content = Column(Text, nullable=True) + user_id = Column(String) + timestamp = Column(BigInteger) class DocumentModel(BaseModel): + model_config = ConfigDict(from_attributes=True) + collection_name: str name: str title: str @@ -72,9 +70,6 @@ class DocumentForm(DocumentUpdateForm): class DocumentsTable: - def __init__(self, db): - self.db = db - self.db.create_tables([Document]) def insert_new_doc( self, user_id: str, form_data: DocumentForm @@ -88,9 +83,12 @@ class DocumentsTable: ) try: - result = Document.create(**document.model_dump()) + result = Document(**document.model_dump()) + Session.add(result) + Session.commit() + Session.refresh(result) if result: - return document + return DocumentModel.model_validate(result) else: return None except: @@ -98,31 +96,29 @@ class DocumentsTable: def get_doc_by_name(self, name: str) -> Optional[DocumentModel]: try: - document = Document.get(Document.name == name) - return DocumentModel(**model_to_dict(document)) + document = Session.query(Document).filter_by(name=name).first() + return DocumentModel.model_validate(document) if document else None except: return None def get_docs(self) -> List[DocumentModel]: return [ - DocumentModel(**model_to_dict(doc)) - for doc in Document.select() - # .limit(limit).offset(skip) + DocumentModel.model_validate(doc) for doc in Session.query(Document).all() ] def update_doc_by_name( self, name: str, form_data: DocumentUpdateForm ) -> Optional[DocumentModel]: try: - query = Document.update( - title=form_data.title, - name=form_data.name, - timestamp=int(time.time()), - ).where(Document.name == name) - query.execute() - - doc = Document.get(Document.name == form_data.name) - return DocumentModel(**model_to_dict(doc)) + Session.query(Document).filter_by(name=name).update( + { + "title": form_data.title, + "name": form_data.name, + "timestamp": int(time.time()), + } + ) + Session.commit() + return self.get_doc_by_name(form_data.name) except Exception as e: log.exception(e) return None @@ -135,26 +131,24 @@ class DocumentsTable: doc_content = json.loads(doc.content if doc.content else "{}") doc_content = {**doc_content, **updated} - query = Document.update( - content=json.dumps(doc_content), - timestamp=int(time.time()), - ).where(Document.name == name) - query.execute() - - doc = Document.get(Document.name == name) - return DocumentModel(**model_to_dict(doc)) + Session.query(Document).filter_by(name=name).update( + { + "content": json.dumps(doc_content), + "timestamp": int(time.time()), + } + ) + Session.commit() + return self.get_doc_by_name(name) except Exception as e: log.exception(e) return None def delete_doc_by_name(self, name: str) -> bool: try: - query = Document.delete().where((Document.name == name)) - query.execute() # Remove the rows, return number of rows removed. - + Session.query(Document).filter_by(name=name).delete() return True except: return False -Documents = DocumentsTable(DB) +Documents = DocumentsTable() diff --git a/backend/apps/webui/models/files.py b/backend/apps/webui/models/files.py index 6459ad7250..ce904215d9 100644 --- a/backend/apps/webui/models/files.py +++ b/backend/apps/webui/models/files.py @@ -1,10 +1,11 @@ -from pydantic import BaseModel -from peewee import * -from playhouse.shortcuts import model_to_dict +from pydantic import BaseModel, ConfigDict from typing import List, Union, Optional import time import logging -from apps.webui.internal.db import DB, JSONField + +from sqlalchemy import Column, String, BigInteger, Text + +from apps.webui.internal.db import JSONField, Base, Session import json @@ -18,15 +19,14 @@ log.setLevel(SRC_LOG_LEVELS["MODELS"]) #################### -class File(Model): - id = CharField(unique=True) - user_id = CharField() - filename = TextField() - meta = JSONField() - created_at = BigIntegerField() +class File(Base): + __tablename__ = "file" - class Meta: - database = DB + id = Column(String, primary_key=True) + user_id = Column(String) + filename = Column(Text) + meta = Column(JSONField) + created_at = Column(BigInteger) class FileModel(BaseModel): @@ -36,6 +36,8 @@ class FileModel(BaseModel): meta: dict created_at: int # timestamp in epoch + model_config = ConfigDict(from_attributes=True) + #################### # Forms @@ -57,9 +59,6 @@ class FileForm(BaseModel): class FilesTable: - def __init__(self, db): - self.db = db - self.db.create_tables([File]) def insert_new_file(self, user_id: str, form_data: FileForm) -> Optional[FileModel]: file = FileModel( @@ -71,9 +70,12 @@ class FilesTable: ) try: - result = File.create(**file.model_dump()) + result = File(**file.model_dump()) + Session.add(result) + Session.commit() + Session.refresh(result) if result: - return file + return FileModel.model_validate(result) else: return None except Exception as e: @@ -82,31 +84,27 @@ class FilesTable: def get_file_by_id(self, id: str) -> Optional[FileModel]: try: - file = File.get(File.id == id) - return FileModel(**model_to_dict(file)) + file = Session.get(File, id) + return FileModel.model_validate(file) except: return None def get_files(self) -> List[FileModel]: - return [FileModel(**model_to_dict(file)) for file in File.select()] + return [FileModel.model_validate(file) for file in Session.query(File).all()] def delete_file_by_id(self, id: str) -> bool: try: - query = File.delete().where((File.id == id)) - query.execute() # Remove the rows, return number of rows removed. - + Session.query(File).filter_by(id=id).delete() return True except: return False def delete_all_files(self) -> bool: try: - query = File.delete() - query.execute() # Remove the rows, return number of rows removed. - + Session.query(File).delete() return True except: return False -Files = FilesTable(DB) +Files = FilesTable() diff --git a/backend/apps/webui/models/functions.py b/backend/apps/webui/models/functions.py index 677f022f6e..7e3ac92cd8 100644 --- a/backend/apps/webui/models/functions.py +++ b/backend/apps/webui/models/functions.py @@ -1,10 +1,11 @@ -from pydantic import BaseModel -from peewee import * -from playhouse.shortcuts import model_to_dict +from pydantic import BaseModel, ConfigDict from typing import List, Union, Optional import time import logging -from apps.webui.internal.db import DB, JSONField + +from sqlalchemy import Column, String, Text, BigInteger, Boolean + +from apps.webui.internal.db import JSONField, Base, Session from apps.webui.models.users import Users import json @@ -21,21 +22,20 @@ log.setLevel(SRC_LOG_LEVELS["MODELS"]) #################### -class Function(Model): - id = CharField(unique=True) - user_id = CharField() - name = TextField() - type = TextField() - content = TextField() - meta = JSONField() - valves = JSONField() - is_active = BooleanField(default=False) - is_global = BooleanField(default=False) - updated_at = BigIntegerField() - created_at = BigIntegerField() +class Function(Base): + __tablename__ = "function" - class Meta: - database = DB + id = Column(String, primary_key=True) + user_id = Column(String) + name = Column(Text) + type = Column(Text) + content = Column(Text) + meta = Column(JSONField) + valves = Column(JSONField) + is_active = Column(Boolean) + is_global = Column(Boolean) + updated_at = Column(BigInteger) + created_at = Column(BigInteger) class FunctionMeta(BaseModel): @@ -55,6 +55,8 @@ class FunctionModel(BaseModel): updated_at: int # timestamp in epoch created_at: int # timestamp in epoch + model_config = ConfigDict(from_attributes=True) + #################### # Forms @@ -85,9 +87,6 @@ class FunctionValves(BaseModel): class FunctionsTable: - def __init__(self, db): - self.db = db - self.db.create_tables([Function]) def insert_new_function( self, user_id: str, type: str, form_data: FunctionForm @@ -103,9 +102,12 @@ class FunctionsTable: ) try: - result = Function.create(**function.model_dump()) + result = Function(**function.model_dump()) + Session.add(result) + Session.commit() + Session.refresh(result) if result: - return function + return FunctionModel.model_validate(result) else: return None except Exception as e: @@ -114,21 +116,21 @@ class FunctionsTable: def get_function_by_id(self, id: str) -> Optional[FunctionModel]: try: - function = Function.get(Function.id == id) - return FunctionModel(**model_to_dict(function)) + function = Session.get(Function, id) + return FunctionModel.model_validate(function) except: return None def get_functions(self, active_only=False) -> List[FunctionModel]: if active_only: return [ - FunctionModel(**model_to_dict(function)) - for function in Function.select().where(Function.is_active == True) + FunctionModel.model_validate(function) + for function in Session.query(Function).filter_by(is_active=True).all() ] else: return [ - FunctionModel(**model_to_dict(function)) - for function in Function.select() + FunctionModel.model_validate(function) + for function in Session.query(Function).all() ] def get_functions_by_type( @@ -136,15 +138,15 @@ class FunctionsTable: ) -> List[FunctionModel]: if active_only: return [ - FunctionModel(**model_to_dict(function)) - for function in Function.select().where( - Function.type == type, Function.is_active == True - ) + FunctionModel.model_validate(function) + for function in Session.query(Function) + .filter_by(type=type, is_active=True) + .all() ] else: return [ - FunctionModel(**model_to_dict(function)) - for function in Function.select().where(Function.type == type) + FunctionModel.model_validate(function) + for function in Session.query(Function).filter_by(type=type).all() ] def get_global_filter_functions(self) -> List[FunctionModel]: @@ -159,7 +161,7 @@ class FunctionsTable: def get_function_valves_by_id(self, id: str) -> Optional[dict]: try: - function = Function.get(Function.id == id) + function = Session.get(Function, id) return function.valves if function.valves else {} except Exception as e: print(f"An error occurred: {e}") @@ -169,14 +171,12 @@ class FunctionsTable: self, id: str, valves: dict ) -> Optional[FunctionValves]: try: - query = Function.update( - **{"valves": valves}, - updated_at=int(time.time()), - ).where(Function.id == id) - query.execute() - - function = Function.get(Function.id == id) - return FunctionValves(**model_to_dict(function)) + function = Session.get(Function, id) + function.valves = valves + function.updated_at = int(time.time()) + Session.commit() + Session.refresh(function) + return self.get_function_by_id(id) except: return None @@ -223,38 +223,36 @@ class FunctionsTable: def update_function_by_id(self, id: str, updated: dict) -> Optional[FunctionModel]: try: - query = Function.update( - **updated, - updated_at=int(time.time()), - ).where(Function.id == id) - query.execute() - - function = Function.get(Function.id == id) - return FunctionModel(**model_to_dict(function)) + Session.query(Function).filter_by(id=id).update( + { + **updated, + "updated_at": int(time.time()), + } + ) + Session.commit() + return self.get_function_by_id(id) except: return None def deactivate_all_functions(self) -> Optional[bool]: try: - query = Function.update( - **{"is_active": False}, - updated_at=int(time.time()), + Session.query(Function).update( + { + "is_active": False, + "updated_at": int(time.time()), + } ) - - query.execute() - + Session.commit() return True except: return None def delete_function_by_id(self, id: str) -> bool: try: - query = Function.delete().where((Function.id == id)) - query.execute() # Remove the rows, return number of rows removed. - + Session.query(Function).filter_by(id=id).delete() return True except: return False -Functions = FunctionsTable(DB) +Functions = FunctionsTable() diff --git a/backend/apps/webui/models/memories.py b/backend/apps/webui/models/memories.py index ef63674abb..1f03318fd1 100644 --- a/backend/apps/webui/models/memories.py +++ b/backend/apps/webui/models/memories.py @@ -1,10 +1,9 @@ -from pydantic import BaseModel -from peewee import * -from playhouse.shortcuts import model_to_dict +from pydantic import BaseModel, ConfigDict from typing import List, Union, Optional -from apps.webui.internal.db import DB -from apps.webui.models.chats import Chats +from sqlalchemy import Column, String, BigInteger, Text + +from apps.webui.internal.db import Base, Session import time import uuid @@ -14,15 +13,14 @@ import uuid #################### -class Memory(Model): - id = CharField(unique=True) - user_id = CharField() - content = TextField() - updated_at = BigIntegerField() - created_at = BigIntegerField() +class Memory(Base): + __tablename__ = "memory" - class Meta: - database = DB + id = Column(String, primary_key=True) + user_id = Column(String) + content = Column(Text) + updated_at = Column(BigInteger) + created_at = Column(BigInteger) class MemoryModel(BaseModel): @@ -32,6 +30,8 @@ class MemoryModel(BaseModel): updated_at: int # timestamp in epoch created_at: int # timestamp in epoch + model_config = ConfigDict(from_attributes=True) + #################### # Forms @@ -39,9 +39,6 @@ class MemoryModel(BaseModel): class MemoriesTable: - def __init__(self, db): - self.db = db - self.db.create_tables([Memory]) def insert_new_memory( self, @@ -59,9 +56,12 @@ class MemoriesTable: "updated_at": int(time.time()), } ) - result = Memory.create(**memory.model_dump()) + result = Memory(**memory.model_dump()) + Session.add(result) + Session.commit() + Session.refresh(result) if result: - return memory + return MemoryModel.model_validate(result) else: return None @@ -71,40 +71,38 @@ class MemoriesTable: content: str, ) -> Optional[MemoryModel]: try: - memory = Memory.get(Memory.id == id) - memory.content = content - memory.updated_at = int(time.time()) - memory.save() - return MemoryModel(**model_to_dict(memory)) + Session.query(Memory).filter_by(id=id).update( + {"content": content, "updated_at": int(time.time())} + ) + Session.commit() + return self.get_memory_by_id(id) except: return None def get_memories(self) -> List[MemoryModel]: try: - memories = Memory.select() - return [MemoryModel(**model_to_dict(memory)) for memory in memories] + memories = Session.query(Memory).all() + return [MemoryModel.model_validate(memory) for memory in memories] except: return None def get_memories_by_user_id(self, user_id: str) -> List[MemoryModel]: try: - memories = Memory.select().where(Memory.user_id == user_id) - return [MemoryModel(**model_to_dict(memory)) for memory in memories] + memories = Session.query(Memory).filter_by(user_id=user_id).all() + return [MemoryModel.model_validate(memory) for memory in memories] except: return None - def get_memory_by_id(self, id) -> Optional[MemoryModel]: + def get_memory_by_id(self, id: str) -> Optional[MemoryModel]: try: - memory = Memory.get(Memory.id == id) - return MemoryModel(**model_to_dict(memory)) + memory = Session.get(Memory, id) + return MemoryModel.model_validate(memory) except: return None def delete_memory_by_id(self, id: str) -> bool: try: - query = Memory.delete().where(Memory.id == id) - query.execute() # Remove the rows, return number of rows removed. - + Session.query(Memory).filter_by(id=id).delete() return True except: @@ -112,21 +110,17 @@ class MemoriesTable: def delete_memories_by_user_id(self, user_id: str) -> bool: try: - query = Memory.delete().where(Memory.user_id == user_id) - query.execute() - + Session.query(Memory).filter_by(user_id=user_id).delete() return True except: return False def delete_memory_by_id_and_user_id(self, id: str, user_id: str) -> bool: try: - query = Memory.delete().where(Memory.id == id, Memory.user_id == user_id) - query.execute() - + Session.query(Memory).filter_by(id=id, user_id=user_id).delete() return True except: return False -Memories = MemoriesTable(DB) +Memories = MemoriesTable() diff --git a/backend/apps/webui/models/models.py b/backend/apps/webui/models/models.py index 8513523989..6543edefcb 100644 --- a/backend/apps/webui/models/models.py +++ b/backend/apps/webui/models/models.py @@ -2,13 +2,10 @@ import json import logging from typing import Optional -import peewee as pw -from peewee import * - -from playhouse.shortcuts import model_to_dict from pydantic import BaseModel, ConfigDict +from sqlalchemy import String, Column, BigInteger, Text -from apps.webui.internal.db import DB, JSONField +from apps.webui.internal.db import Base, JSONField, Session from typing import List, Union, Optional from config import SRC_LOG_LEVELS @@ -46,38 +43,37 @@ class ModelMeta(BaseModel): pass -class Model(pw.Model): - id = pw.TextField(unique=True) +class Model(Base): + __tablename__ = "model" + + id = Column(Text, primary_key=True) """ The model's id as used in the API. If set to an existing model, it will override the model. """ - user_id = pw.TextField() + user_id = Column(Text) - base_model_id = pw.TextField(null=True) + base_model_id = Column(Text, nullable=True) """ An optional pointer to the actual model that should be used when proxying requests. """ - name = pw.TextField() + name = Column(Text) """ The human-readable display name of the model. """ - params = JSONField() + params = Column(JSONField) """ Holds a JSON encoded blob of parameters, see `ModelParams`. """ - meta = JSONField() + meta = Column(JSONField) """ Holds a JSON encoded blob of metadata, see `ModelMeta`. """ - updated_at = BigIntegerField() - created_at = BigIntegerField() - - class Meta: - database = DB + updated_at = Column(BigInteger) + created_at = Column(BigInteger) class ModelModel(BaseModel): @@ -92,6 +88,8 @@ class ModelModel(BaseModel): updated_at: int # timestamp in epoch created_at: int # timestamp in epoch + model_config = ConfigDict(from_attributes=True) + #################### # Forms @@ -115,12 +113,6 @@ class ModelForm(BaseModel): class ModelsTable: - def __init__( - self, - db: pw.SqliteDatabase | pw.PostgresqlDatabase, - ): - self.db = db - self.db.create_tables([Model]) def insert_new_model( self, form_data: ModelForm, user_id: str @@ -134,10 +126,13 @@ class ModelsTable: } ) try: - result = Model.create(**model.model_dump()) + result = Model(**model.model_dump()) + Session.add(result) + Session.commit() + Session.refresh(result) if result: - return model + return ModelModel.model_validate(result) else: return None except Exception as e: @@ -145,23 +140,25 @@ class ModelsTable: return None def get_all_models(self) -> List[ModelModel]: - return [ModelModel(**model_to_dict(model)) for model in Model.select()] + return [ + ModelModel.model_validate(model) for model in Session.query(Model).all() + ] def get_model_by_id(self, id: str) -> Optional[ModelModel]: try: - model = Model.get(Model.id == id) - return ModelModel(**model_to_dict(model)) + model = Session.get(Model, id) + return ModelModel.model_validate(model) except: return None def update_model_by_id(self, id: str, model: ModelForm) -> Optional[ModelModel]: try: # update only the fields that are present in the model - query = Model.update(**model.model_dump()).where(Model.id == id) - query.execute() - - model = Model.get(Model.id == id) - return ModelModel(**model_to_dict(model)) + model = Session.query(Model).get(id) + model.update(**model.model_dump()) + Session.commit() + Session.refresh(model) + return ModelModel.model_validate(model) except Exception as e: print(e) @@ -169,11 +166,10 @@ class ModelsTable: def delete_model_by_id(self, id: str) -> bool: try: - query = Model.delete().where(Model.id == id) - query.execute() + Session.query(Model).filter_by(id=id).delete() return True except: return False -Models = ModelsTable(DB) +Models = ModelsTable() diff --git a/backend/apps/webui/models/prompts.py b/backend/apps/webui/models/prompts.py index c4ac6be149..ab8cc04ce8 100644 --- a/backend/apps/webui/models/prompts.py +++ b/backend/apps/webui/models/prompts.py @@ -1,13 +1,10 @@ -from pydantic import BaseModel -from peewee import * -from playhouse.shortcuts import model_to_dict -from typing import List, Union, Optional +from pydantic import BaseModel, ConfigDict +from typing import List, Optional import time -from utils.utils import decode_token -from utils.misc import get_gravatar_url +from sqlalchemy import String, Column, BigInteger, Text -from apps.webui.internal.db import DB +from apps.webui.internal.db import Base, Session import json @@ -16,15 +13,14 @@ import json #################### -class Prompt(Model): - command = CharField(unique=True) - user_id = CharField() - title = TextField() - content = TextField() - timestamp = BigIntegerField() +class Prompt(Base): + __tablename__ = "prompt" - class Meta: - database = DB + command = Column(String, primary_key=True) + user_id = Column(String) + title = Column(Text) + content = Column(Text) + timestamp = Column(BigInteger) class PromptModel(BaseModel): @@ -34,6 +30,8 @@ class PromptModel(BaseModel): content: str timestamp: int # timestamp in epoch + model_config = ConfigDict(from_attributes=True) + #################### # Forms @@ -48,10 +46,6 @@ class PromptForm(BaseModel): class PromptsTable: - def __init__(self, db): - self.db = db - self.db.create_tables([Prompt]) - def insert_new_prompt( self, user_id: str, form_data: PromptForm ) -> Optional[PromptModel]: @@ -66,53 +60,48 @@ class PromptsTable: ) try: - result = Prompt.create(**prompt.model_dump()) + result = Prompt(**prompt.dict()) + Session.add(result) + Session.commit() + Session.refresh(result) if result: - return prompt + return PromptModel.model_validate(result) else: return None - except: + except Exception as e: return None def get_prompt_by_command(self, command: str) -> Optional[PromptModel]: try: - prompt = Prompt.get(Prompt.command == command) - return PromptModel(**model_to_dict(prompt)) + prompt = Session.query(Prompt).filter_by(command=command).first() + return PromptModel.model_validate(prompt) except: return None def get_prompts(self) -> List[PromptModel]: return [ - PromptModel(**model_to_dict(prompt)) - for prompt in Prompt.select() - # .limit(limit).offset(skip) + PromptModel.model_validate(prompt) for prompt in Session.query(Prompt).all() ] def update_prompt_by_command( self, command: str, form_data: PromptForm ) -> Optional[PromptModel]: try: - query = Prompt.update( - title=form_data.title, - content=form_data.content, - timestamp=int(time.time()), - ).where(Prompt.command == command) - - query.execute() - - prompt = Prompt.get(Prompt.command == command) - return PromptModel(**model_to_dict(prompt)) + prompt = Session.query(Prompt).filter_by(command=command).first() + prompt.title = form_data.title + prompt.content = form_data.content + prompt.timestamp = int(time.time()) + Session.commit() + return PromptModel.model_validate(prompt) except: return None def delete_prompt_by_command(self, command: str) -> bool: try: - query = Prompt.delete().where((Prompt.command == command)) - query.execute() # Remove the rows, return number of rows removed. - + Session.query(Prompt).filter_by(command=command).delete() return True except: return False -Prompts = PromptsTable(DB) +Prompts = PromptsTable() diff --git a/backend/apps/webui/models/tags.py b/backend/apps/webui/models/tags.py index 4c4fa82e68..7b0df6b6bc 100644 --- a/backend/apps/webui/models/tags.py +++ b/backend/apps/webui/models/tags.py @@ -1,14 +1,14 @@ -from pydantic import BaseModel -from typing import List, Union, Optional -from peewee import * -from playhouse.shortcuts import model_to_dict +from pydantic import BaseModel, ConfigDict +from typing import List, Optional import json import uuid import time import logging -from apps.webui.internal.db import DB +from sqlalchemy import String, Column, BigInteger, Text + +from apps.webui.internal.db import Base, Session from config import SRC_LOG_LEVELS @@ -20,25 +20,23 @@ log.setLevel(SRC_LOG_LEVELS["MODELS"]) #################### -class Tag(Model): - id = CharField(unique=True) - name = CharField() - user_id = CharField() - data = TextField(null=True) +class Tag(Base): + __tablename__ = "tag" - class Meta: - database = DB + id = Column(String, primary_key=True) + name = Column(String) + user_id = Column(String) + data = Column(Text, nullable=True) -class ChatIdTag(Model): - id = CharField(unique=True) - tag_name = CharField() - chat_id = CharField() - user_id = CharField() - timestamp = BigIntegerField() +class ChatIdTag(Base): + __tablename__ = "chatidtag" - class Meta: - database = DB + id = Column(String, primary_key=True) + tag_name = Column(String) + chat_id = Column(String) + user_id = Column(String) + timestamp = Column(BigInteger) class TagModel(BaseModel): @@ -47,6 +45,8 @@ class TagModel(BaseModel): user_id: str data: Optional[str] = None + model_config = ConfigDict(from_attributes=True) + class ChatIdTagModel(BaseModel): id: str @@ -55,6 +55,8 @@ class ChatIdTagModel(BaseModel): user_id: str timestamp: int + model_config = ConfigDict(from_attributes=True) + #################### # Forms @@ -75,17 +77,17 @@ class ChatTagsResponse(BaseModel): class TagTable: - def __init__(self, db): - self.db = db - db.create_tables([Tag, ChatIdTag]) def insert_new_tag(self, name: str, user_id: str) -> Optional[TagModel]: id = str(uuid.uuid4()) tag = TagModel(**{"id": id, "user_id": user_id, "name": name}) try: - result = Tag.create(**tag.model_dump()) + result = Tag(**tag.model_dump()) + Session.add(result) + Session.commit() + Session.refresh(result) if result: - return tag + return TagModel.model_validate(result) else: return None except Exception as e: @@ -95,8 +97,8 @@ class TagTable: self, name: str, user_id: str ) -> Optional[TagModel]: try: - tag = Tag.get(Tag.name == name, Tag.user_id == user_id) - return TagModel(**model_to_dict(tag)) + tag = Session.query(Tag).filter(name=name, user_id=user_id).first() + return TagModel.model_validate(tag) except Exception as e: return None @@ -118,9 +120,12 @@ class TagTable: } ) try: - result = ChatIdTag.create(**chatIdTag.model_dump()) + result = ChatIdTag(**chatIdTag.model_dump()) + Session.add(result) + Session.commit() + Session.refresh(result) if result: - return chatIdTag + return ChatIdTagModel.model_validate(result) else: return None except: @@ -128,71 +133,84 @@ class TagTable: def get_tags_by_user_id(self, user_id: str) -> List[TagModel]: tag_names = [ - ChatIdTagModel(**model_to_dict(chat_id_tag)).tag_name - for chat_id_tag in ChatIdTag.select() - .where(ChatIdTag.user_id == user_id) - .order_by(ChatIdTag.timestamp.desc()) + chat_id_tag.tag_name + for chat_id_tag in ( + Session.query(ChatIdTag) + .filter_by(user_id=user_id) + .order_by(ChatIdTag.timestamp.desc()) + .all() + ) ] return [ - TagModel(**model_to_dict(tag)) - for tag in Tag.select() - .where(Tag.user_id == user_id) - .where(Tag.name.in_(tag_names)) + TagModel.model_validate(tag) + for tag in ( + Session.query(Tag) + .filter_by(user_id=user_id) + .filter(Tag.name.in_(tag_names)) + .all() + ) ] def get_tags_by_chat_id_and_user_id( self, chat_id: str, user_id: str ) -> List[TagModel]: tag_names = [ - ChatIdTagModel(**model_to_dict(chat_id_tag)).tag_name - for chat_id_tag in ChatIdTag.select() - .where((ChatIdTag.user_id == user_id) & (ChatIdTag.chat_id == chat_id)) - .order_by(ChatIdTag.timestamp.desc()) + chat_id_tag.tag_name + for chat_id_tag in ( + Session.query(ChatIdTag) + .filter_by(user_id=user_id, chat_id=chat_id) + .order_by(ChatIdTag.timestamp.desc()) + .all() + ) ] return [ - TagModel(**model_to_dict(tag)) - for tag in Tag.select() - .where(Tag.user_id == user_id) - .where(Tag.name.in_(tag_names)) + TagModel.model_validate(tag) + for tag in ( + Session.query(Tag) + .filter_by(user_id=user_id) + .filter(Tag.name.in_(tag_names)) + .all() + ) ] def get_chat_ids_by_tag_name_and_user_id( self, tag_name: str, user_id: str - ) -> Optional[ChatIdTagModel]: + ) -> List[ChatIdTagModel]: return [ - ChatIdTagModel(**model_to_dict(chat_id_tag)) - for chat_id_tag in ChatIdTag.select() - .where((ChatIdTag.user_id == user_id) & (ChatIdTag.tag_name == tag_name)) - .order_by(ChatIdTag.timestamp.desc()) + ChatIdTagModel.model_validate(chat_id_tag) + for chat_id_tag in ( + Session.query(ChatIdTag) + .filter_by(user_id=user_id, tag_name=tag_name) + .order_by(ChatIdTag.timestamp.desc()) + .all() + ) ] def count_chat_ids_by_tag_name_and_user_id( self, tag_name: str, user_id: str ) -> int: return ( - ChatIdTag.select() - .where((ChatIdTag.tag_name == tag_name) & (ChatIdTag.user_id == user_id)) + Session.query(ChatIdTag) + .filter_by(tag_name=tag_name, user_id=user_id) .count() ) def delete_tag_by_tag_name_and_user_id(self, tag_name: str, user_id: str) -> bool: try: - query = ChatIdTag.delete().where( - (ChatIdTag.tag_name == tag_name) & (ChatIdTag.user_id == user_id) + res = ( + Session.query(ChatIdTag) + .filter_by(tag_name=tag_name, user_id=user_id) + .delete() ) - res = query.execute() # Remove the rows, return number of rows removed. log.debug(f"res: {res}") + Session.commit() tag_count = self.count_chat_ids_by_tag_name_and_user_id(tag_name, user_id) if tag_count == 0: # Remove tag item from Tag col as well - query = Tag.delete().where( - (Tag.name == tag_name) & (Tag.user_id == user_id) - ) - query.execute() # Remove the rows, return number of rows removed. - + Session.query(Tag).filter_by(name=tag_name, user_id=user_id).delete() return True except Exception as e: log.error(f"delete_tag: {e}") @@ -202,21 +220,18 @@ class TagTable: self, tag_name: str, chat_id: str, user_id: str ) -> bool: try: - query = ChatIdTag.delete().where( - (ChatIdTag.tag_name == tag_name) - & (ChatIdTag.chat_id == chat_id) - & (ChatIdTag.user_id == user_id) + res = ( + Session.query(ChatIdTag) + .filter_by(tag_name=tag_name, chat_id=chat_id, user_id=user_id) + .delete() ) - res = query.execute() # Remove the rows, return number of rows removed. log.debug(f"res: {res}") + Session.commit() tag_count = self.count_chat_ids_by_tag_name_and_user_id(tag_name, user_id) if tag_count == 0: # Remove tag item from Tag col as well - query = Tag.delete().where( - (Tag.name == tag_name) & (Tag.user_id == user_id) - ) - query.execute() # Remove the rows, return number of rows removed. + Session.query(Tag).filter_by(name=tag_name, user_id=user_id).delete() return True except Exception as e: @@ -234,4 +249,4 @@ class TagTable: return True -Tags = TagTable(DB) +Tags = TagTable() diff --git a/backend/apps/webui/models/tools.py b/backend/apps/webui/models/tools.py index 950972c2d8..f5df106371 100644 --- a/backend/apps/webui/models/tools.py +++ b/backend/apps/webui/models/tools.py @@ -1,10 +1,10 @@ -from pydantic import BaseModel -from peewee import * -from playhouse.shortcuts import model_to_dict -from typing import List, Union, Optional +from pydantic import BaseModel, ConfigDict +from typing import List, Optional import time import logging -from apps.webui.internal.db import DB, JSONField +from sqlalchemy import String, Column, BigInteger, Text + +from apps.webui.internal.db import Base, JSONField, Session from apps.webui.models.users import Users import json @@ -21,19 +21,18 @@ log.setLevel(SRC_LOG_LEVELS["MODELS"]) #################### -class Tool(Model): - id = CharField(unique=True) - user_id = CharField() - name = TextField() - content = TextField() - specs = JSONField() - meta = JSONField() - valves = JSONField() - updated_at = BigIntegerField() - created_at = BigIntegerField() +class Tool(Base): + __tablename__ = "tool" - class Meta: - database = DB + id = Column(String, primary_key=True) + user_id = Column(String) + name = Column(Text) + content = Column(Text) + specs = Column(JSONField) + meta = Column(JSONField) + valves = Column(JSONField) + updated_at = Column(BigInteger) + created_at = Column(BigInteger) class ToolMeta(BaseModel): @@ -51,6 +50,8 @@ class ToolModel(BaseModel): updated_at: int # timestamp in epoch created_at: int # timestamp in epoch + model_config = ConfigDict(from_attributes=True) + #################### # Forms @@ -78,9 +79,6 @@ class ToolValves(BaseModel): class ToolsTable: - def __init__(self, db): - self.db = db - self.db.create_tables([Tool]) def insert_new_tool( self, user_id: str, form_data: ToolForm, specs: List[dict] @@ -96,9 +94,12 @@ class ToolsTable: ) try: - result = Tool.create(**tool.model_dump()) + result = Tool(**tool.model_dump()) + Session.add(result) + Session.commit() + Session.refresh(result) if result: - return tool + return ToolModel.model_validate(result) else: return None except Exception as e: @@ -107,17 +108,17 @@ class ToolsTable: def get_tool_by_id(self, id: str) -> Optional[ToolModel]: try: - tool = Tool.get(Tool.id == id) - return ToolModel(**model_to_dict(tool)) + tool = Session.get(Tool, id) + return ToolModel.model_validate(tool) except: return None def get_tools(self) -> List[ToolModel]: - return [ToolModel(**model_to_dict(tool)) for tool in Tool.select()] + return [ToolModel.model_validate(tool) for tool in Session.query(Tool).all()] def get_tool_valves_by_id(self, id: str) -> Optional[dict]: try: - tool = Tool.get(Tool.id == id) + tool = Session.get(Tool, id) return tool.valves if tool.valves else {} except Exception as e: print(f"An error occurred: {e}") @@ -125,14 +126,11 @@ class ToolsTable: def update_tool_valves_by_id(self, id: str, valves: dict) -> Optional[ToolValves]: try: - query = Tool.update( - **{"valves": valves}, - updated_at=int(time.time()), - ).where(Tool.id == id) - query.execute() - - tool = Tool.get(Tool.id == id) - return ToolValves(**model_to_dict(tool)) + Session.query(Tool).filter_by(id=id).update( + {"valves": valves, "updated_at": int(time.time())} + ) + Session.commit() + return self.get_tool_by_id(id) except: return None @@ -179,25 +177,21 @@ class ToolsTable: def update_tool_by_id(self, id: str, updated: dict) -> Optional[ToolModel]: try: - query = Tool.update( - **updated, - updated_at=int(time.time()), - ).where(Tool.id == id) - query.execute() - - tool = Tool.get(Tool.id == id) - return ToolModel(**model_to_dict(tool)) + tool = Session.get(Tool, id) + tool.update(**updated) + tool.updated_at = int(time.time()) + Session.commit() + Session.refresh(tool) + return ToolModel.model_validate(tool) except: return None def delete_tool_by_id(self, id: str) -> bool: try: - query = Tool.delete().where((Tool.id == id)) - query.execute() # Remove the rows, return number of rows removed. - + Session.query(Tool).filter_by(id=id).delete() return True except: return False -Tools = ToolsTable(DB) +Tools = ToolsTable() diff --git a/backend/apps/webui/models/users.py b/backend/apps/webui/models/users.py index e3e1842b83..9e1e25ac67 100644 --- a/backend/apps/webui/models/users.py +++ b/backend/apps/webui/models/users.py @@ -1,11 +1,12 @@ -from pydantic import BaseModel, ConfigDict -from peewee import * -from playhouse.shortcuts import model_to_dict +from pydantic import BaseModel, ConfigDict, parse_obj_as from typing import List, Union, Optional import time + +from sqlalchemy import String, Column, BigInteger, Text + from utils.misc import get_gravatar_url -from apps.webui.internal.db import DB, JSONField +from apps.webui.internal.db import Base, JSONField, Session from apps.webui.models.chats import Chats #################### @@ -13,25 +14,24 @@ from apps.webui.models.chats import Chats #################### -class User(Model): - id = CharField(unique=True) - name = CharField() - email = CharField() - role = CharField() - profile_image_url = TextField() +class User(Base): + __tablename__ = "user" - last_active_at = BigIntegerField() - updated_at = BigIntegerField() - created_at = BigIntegerField() + id = Column(String, primary_key=True) + name = Column(String) + email = Column(String) + role = Column(String) + profile_image_url = Column(Text) - api_key = CharField(null=True, unique=True) - settings = JSONField(null=True) - info = JSONField(null=True) + last_active_at = Column(BigInteger) + updated_at = Column(BigInteger) + created_at = Column(BigInteger) - oauth_sub = TextField(null=True, unique=True) + api_key = Column(String, nullable=True, unique=True) + settings = Column(JSONField, nullable=True) + info = Column(JSONField, nullable=True) - class Meta: - database = DB + oauth_sub = Column(Text, unique=True) class UserSettings(BaseModel): @@ -57,6 +57,8 @@ class UserModel(BaseModel): oauth_sub: Optional[str] = None + model_config = ConfigDict(from_attributes=True) + #################### # Forms @@ -76,9 +78,6 @@ class UserUpdateForm(BaseModel): class UsersTable: - def __init__(self, db): - self.db = db - self.db.create_tables([User]) def insert_new_user( self, @@ -102,7 +101,10 @@ class UsersTable: "oauth_sub": oauth_sub, } ) - result = User.create(**user.model_dump()) + result = User(**user.model_dump()) + Session.add(result) + Session.commit() + Session.refresh(result) if result: return user else: @@ -110,56 +112,57 @@ class UsersTable: def get_user_by_id(self, id: str) -> Optional[UserModel]: try: - user = User.get(User.id == id) - return UserModel(**model_to_dict(user)) - except: + user = Session.query(User).filter_by(id=id).first() + return UserModel.model_validate(user) + except Exception as e: return None def get_user_by_api_key(self, api_key: str) -> Optional[UserModel]: try: - user = User.get(User.api_key == api_key) - return UserModel(**model_to_dict(user)) + user = Session.query(User).filter_by(api_key=api_key).first() + return UserModel.model_validate(user) except: return None def get_user_by_email(self, email: str) -> Optional[UserModel]: try: - user = User.get(User.email == email) - return UserModel(**model_to_dict(user)) + user = Session.query(User).filter_by(email=email).first() + return UserModel.model_validate(user) except: return None def get_user_by_oauth_sub(self, sub: str) -> Optional[UserModel]: try: - user = User.get(User.oauth_sub == sub) - return UserModel(**model_to_dict(user)) + user = Session.query(User).filter_by(oauth_sub=sub).first() + return UserModel.model_validate(user) except: return None def get_users(self, skip: int = 0, limit: int = 50) -> List[UserModel]: - return [ - UserModel(**model_to_dict(user)) - for user in User.select() - # .limit(limit).offset(skip) - ] + users = ( + Session.query(User) + # .offset(skip).limit(limit) + .all() + ) + return [UserModel.model_validate(user) for user in users] def get_num_users(self) -> Optional[int]: - return User.select().count() + return Session.query(User).count() def get_first_user(self) -> UserModel: try: - user = User.select().order_by(User.created_at).first() - return UserModel(**model_to_dict(user)) + user = Session.query(User).order_by(User.created_at).first() + return UserModel.model_validate(user) except: return None def update_user_role_by_id(self, id: str, role: str) -> Optional[UserModel]: try: - query = User.update(role=role).where(User.id == id) - query.execute() + Session.query(User).filter_by(id=id).update({"role": role}) + Session.commit() - user = User.get(User.id == id) - return UserModel(**model_to_dict(user)) + user = Session.query(User).filter_by(id=id).first() + return UserModel.model_validate(user) except: return None @@ -167,23 +170,25 @@ class UsersTable: self, id: str, profile_image_url: str ) -> Optional[UserModel]: try: - query = User.update(profile_image_url=profile_image_url).where( - User.id == id + Session.query(User).filter_by(id=id).update( + {"profile_image_url": profile_image_url} ) - query.execute() + Session.commit() - user = User.get(User.id == id) - return UserModel(**model_to_dict(user)) + user = Session.query(User).filter_by(id=id).first() + return UserModel.model_validate(user) except: return None def update_user_last_active_by_id(self, id: str) -> Optional[UserModel]: try: - query = User.update(last_active_at=int(time.time())).where(User.id == id) - query.execute() + Session.query(User).filter_by(id=id).update( + {"last_active_at": int(time.time())} + ) + Session.commit() - user = User.get(User.id == id) - return UserModel(**model_to_dict(user)) + user = Session.query(User).filter_by(id=id).first() + return UserModel.model_validate(user) except: return None @@ -191,22 +196,22 @@ class UsersTable: self, id: str, oauth_sub: str ) -> Optional[UserModel]: try: - query = User.update(oauth_sub=oauth_sub).where(User.id == id) - query.execute() + Session.query(User).filter_by(id=id).update({"oauth_sub": oauth_sub}) - user = User.get(User.id == id) - return UserModel(**model_to_dict(user)) + user = Session.query(User).filter_by(id=id).first() + return UserModel.model_validate(user) except: return None def update_user_by_id(self, id: str, updated: dict) -> Optional[UserModel]: try: - query = User.update(**updated).where(User.id == id) - query.execute() + Session.query(User).filter_by(id=id).update(updated) + Session.commit() - user = User.get(User.id == id) - return UserModel(**model_to_dict(user)) - except: + user = Session.query(User).filter_by(id=id).first() + return UserModel.model_validate(user) + # return UserModel(**user.dict()) + except Exception as e: return None def delete_user_by_id(self, id: str) -> bool: @@ -216,8 +221,8 @@ class UsersTable: if result: # Delete User - query = User.delete().where(User.id == id) - query.execute() # Remove the rows, return number of rows removed. + Session.query(User).filter_by(id=id).delete() + Session.commit() return True else: @@ -227,19 +232,18 @@ class UsersTable: def update_user_api_key_by_id(self, id: str, api_key: str) -> str: try: - query = User.update(api_key=api_key).where(User.id == id) - result = query.execute() - + result = Session.query(User).filter_by(id=id).update({"api_key": api_key}) + Session.commit() return True if result == 1 else False except: return False def get_user_api_key_by_id(self, id: str) -> Optional[str]: try: - user = User.get(User.id == id) + user = Session.query(User).filter_by(id=id).first() return user.api_key - except: + except Exception as e: return None -Users = UsersTable(DB) +Users = UsersTable() diff --git a/backend/apps/webui/routers/chats.py b/backend/apps/webui/routers/chats.py index c4d6575c2f..7f80cbda71 100644 --- a/backend/apps/webui/routers/chats.py +++ b/backend/apps/webui/routers/chats.py @@ -76,7 +76,10 @@ async def delete_all_user_chats(request: Request, user=Depends(get_verified_user @router.get("/list/user/{user_id}", response_model=List[ChatTitleIdResponse]) async def get_user_chat_list_by_user_id( - user_id: str, user=Depends(get_admin_user), skip: int = 0, limit: int = 50 + user_id: str, + user=Depends(get_admin_user), + skip: int = 0, + limit: int = 50, ): return Chats.get_chat_list_by_user_id( user_id, include_archived=True, skip=skip, limit=limit @@ -119,7 +122,7 @@ async def get_user_chats(user=Depends(get_verified_user)): @router.get("/all/archived", response_model=List[ChatResponse]) -async def get_user_chats(user=Depends(get_verified_user)): +async def get_user_archived_chats(user=Depends(get_verified_user)): return [ ChatResponse(**{**chat.model_dump(), "chat": json.loads(chat.chat)}) for chat in Chats.get_archived_chats_by_user_id(user.id) diff --git a/backend/apps/webui/routers/documents.py b/backend/apps/webui/routers/documents.py index dc53b52467..2299b2fee3 100644 --- a/backend/apps/webui/routers/documents.py +++ b/backend/apps/webui/routers/documents.py @@ -130,7 +130,9 @@ async def tag_doc_by_name(form_data: TagDocumentForm, user=Depends(get_verified_ @router.post("/doc/update", response_model=Optional[DocumentResponse]) async def update_doc_by_name( - name: str, form_data: DocumentUpdateForm, user=Depends(get_admin_user) + name: str, + form_data: DocumentUpdateForm, + user=Depends(get_admin_user), ): doc = Documents.update_doc_by_name(name, form_data) if doc: diff --git a/backend/apps/webui/routers/files.py b/backend/apps/webui/routers/files.py index 3b6d44aa5b..fffe0743c1 100644 --- a/backend/apps/webui/routers/files.py +++ b/backend/apps/webui/routers/files.py @@ -50,10 +50,7 @@ router = APIRouter() @router.post("/") -def upload_file( - file: UploadFile = File(...), - user=Depends(get_verified_user), -): +def upload_file(file: UploadFile = File(...), user=Depends(get_verified_user)): log.info(f"file.content_type: {file.content_type}") try: unsanitized_filename = file.filename diff --git a/backend/apps/webui/routers/memories.py b/backend/apps/webui/routers/memories.py index e9ae961730..2c473ebe8f 100644 --- a/backend/apps/webui/routers/memories.py +++ b/backend/apps/webui/routers/memories.py @@ -50,7 +50,9 @@ class MemoryUpdateModel(BaseModel): @router.post("/add", response_model=Optional[MemoryModel]) async def add_memory( - request: Request, form_data: AddMemoryForm, user=Depends(get_verified_user) + request: Request, + form_data: AddMemoryForm, + user=Depends(get_verified_user), ): memory = Memories.insert_new_memory(user.id, form_data.content) memory_embedding = request.app.state.EMBEDDING_FUNCTION(memory.content) diff --git a/backend/apps/webui/routers/models.py b/backend/apps/webui/routers/models.py index acc1c6b47d..eeae9e1c41 100644 --- a/backend/apps/webui/routers/models.py +++ b/backend/apps/webui/routers/models.py @@ -5,6 +5,7 @@ from typing import List, Union, Optional from fastapi import APIRouter from pydantic import BaseModel import json + from apps.webui.models.models import Models, ModelModel, ModelForm, ModelResponse from utils.utils import get_verified_user, get_admin_user @@ -29,7 +30,9 @@ async def get_models(user=Depends(get_verified_user)): @router.post("/add", response_model=Optional[ModelModel]) async def add_new_model( - request: Request, form_data: ModelForm, user=Depends(get_admin_user) + request: Request, + form_data: ModelForm, + user=Depends(get_admin_user), ): if form_data.id in request.app.state.MODELS: raise HTTPException( @@ -73,7 +76,10 @@ async def get_model_by_id(id: str, user=Depends(get_verified_user)): @router.post("/update", response_model=Optional[ModelModel]) async def update_model_by_id( - request: Request, id: str, form_data: ModelForm, user=Depends(get_admin_user) + request: Request, + id: str, + form_data: ModelForm, + user=Depends(get_admin_user), ): model = Models.get_model_by_id(id) if model: diff --git a/backend/apps/webui/routers/prompts.py b/backend/apps/webui/routers/prompts.py index e609a0a1b3..c674590e95 100644 --- a/backend/apps/webui/routers/prompts.py +++ b/backend/apps/webui/routers/prompts.py @@ -71,7 +71,9 @@ async def get_prompt_by_command(command: str, user=Depends(get_verified_user)): @router.post("/command/{command}/update", response_model=Optional[PromptModel]) async def update_prompt_by_command( - command: str, form_data: PromptForm, user=Depends(get_admin_user) + command: str, + form_data: PromptForm, + user=Depends(get_admin_user), ): prompt = Prompts.update_prompt_by_command(f"/{command}", form_data) if prompt: diff --git a/backend/apps/webui/routers/tools.py b/backend/apps/webui/routers/tools.py index d20584c228..ea9db8180b 100644 --- a/backend/apps/webui/routers/tools.py +++ b/backend/apps/webui/routers/tools.py @@ -6,7 +6,6 @@ from fastapi import APIRouter from pydantic import BaseModel import json - from apps.webui.models.users import Users from apps.webui.models.tools import Tools, ToolForm, ToolModel, ToolResponse from apps.webui.utils import load_toolkit_module_by_id @@ -57,7 +56,9 @@ async def get_toolkits(user=Depends(get_admin_user)): @router.post("/create", response_model=Optional[ToolResponse]) async def create_new_toolkit( - request: Request, form_data: ToolForm, user=Depends(get_admin_user) + request: Request, + form_data: ToolForm, + user=Depends(get_admin_user), ): if not form_data.id.isidentifier(): raise HTTPException( @@ -131,7 +132,10 @@ async def get_toolkit_by_id(id: str, user=Depends(get_admin_user)): @router.post("/id/{id}/update", response_model=Optional[ToolModel]) async def update_toolkit_by_id( - request: Request, id: str, form_data: ToolForm, user=Depends(get_admin_user) + request: Request, + id: str, + form_data: ToolForm, + user=Depends(get_admin_user), ): toolkit_path = os.path.join(TOOLS_DIR, f"{id}.py") diff --git a/backend/apps/webui/routers/users.py b/backend/apps/webui/routers/users.py index 270d72a238..9627f0b067 100644 --- a/backend/apps/webui/routers/users.py +++ b/backend/apps/webui/routers/users.py @@ -138,7 +138,7 @@ async def get_user_info_by_session_user(user=Depends(get_verified_user)): @router.post("/user/info/update", response_model=Optional[dict]) -async def update_user_settings_by_session_user( +async def update_user_info_by_session_user( form_data: dict, user=Depends(get_verified_user) ): user = Users.get_user_by_id(user.id) @@ -205,7 +205,9 @@ async def get_user_by_id(user_id: str, user=Depends(get_verified_user)): @router.post("/{user_id}/update", response_model=Optional[UserModel]) async def update_user_by_id( - user_id: str, form_data: UserUpdateForm, session_user=Depends(get_admin_user) + user_id: str, + form_data: UserUpdateForm, + session_user=Depends(get_admin_user), ): user = Users.get_user_by_id(user_id) diff --git a/backend/apps/webui/routers/utils.py b/backend/apps/webui/routers/utils.py index 8f6d663b47..780ed6b43e 100644 --- a/backend/apps/webui/routers/utils.py +++ b/backend/apps/webui/routers/utils.py @@ -1,6 +1,5 @@ from fastapi import APIRouter, UploadFile, File, Response from fastapi import Depends, HTTPException, status -from peewee import SqliteDatabase from starlette.responses import StreamingResponse, FileResponse from pydantic import BaseModel @@ -10,7 +9,6 @@ import markdown import black -from apps.webui.internal.db import DB from utils.utils import get_admin_user from utils.misc import calculate_sha256, get_gravatar_url @@ -114,13 +112,15 @@ async def download_db(user=Depends(get_admin_user)): status_code=status.HTTP_401_UNAUTHORIZED, detail=ERROR_MESSAGES.ACCESS_PROHIBITED, ) - if not isinstance(DB, SqliteDatabase): + from apps.webui.internal.db import engine + + if engine.name != "sqlite": raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, detail=ERROR_MESSAGES.DB_NOT_SQLITE, ) return FileResponse( - DB.database, + engine.url.database, media_type="application/octet-stream", filename="webui.db", ) diff --git a/backend/main.py b/backend/main.py index 0e3986f21b..7fd0b4166c 100644 --- a/backend/main.py +++ b/backend/main.py @@ -1,5 +1,6 @@ import base64 import uuid +import subprocess from contextlib import asynccontextmanager from authlib.integrations.starlette_client import OAuth @@ -27,6 +28,7 @@ from fastapi.responses import JSONResponse from fastapi import HTTPException from fastapi.middleware.wsgi import WSGIMiddleware from fastapi.middleware.cors import CORSMiddleware +from sqlalchemy import text from starlette.exceptions import HTTPException as StarletteHTTPException from starlette.middleware.base import BaseHTTPMiddleware from starlette.middleware.sessions import SessionMiddleware @@ -54,6 +56,7 @@ from apps.webui.main import ( get_pipe_models, generate_function_chat_completion, ) +from apps.webui.internal.db import Session, SessionLocal from pydantic import BaseModel @@ -125,6 +128,8 @@ from config import ( WEBUI_SESSION_COOKIE_SAME_SITE, WEBUI_SESSION_COOKIE_SECURE, AppConfig, + BACKEND_DIR, + DATABASE_URL, ) from constants import ERROR_MESSAGES, WEBHOOK_MESSAGES from utils.webhook import post_webhook @@ -167,8 +172,19 @@ https://github.com/open-webui/open-webui ) +def run_migrations(): + env = os.environ.copy() + env["DATABASE_URL"] = DATABASE_URL + migration_task = subprocess.run( + ["alembic", f"-c{BACKEND_DIR}/alembic.ini", "upgrade", "head"], env=env + ) + if migration_task.returncode > 0: + raise ValueError("Error running migrations") + + @asynccontextmanager async def lifespan(app: FastAPI): + run_migrations() yield @@ -902,6 +918,14 @@ app.add_middleware( ) +@app.middleware("http") +async def commit_session_after_request(request: Request, call_next): + response = await call_next(request) + log.debug("Commit session after request") + Session.commit() + return response + + @app.middleware("http") async def check_url(request: Request, call_next): if len(app.state.MODELS) == 0: @@ -1743,7 +1767,9 @@ async def get_pipelines(urlIdx: Optional[int] = None, user=Depends(get_admin_use @app.get("/api/pipelines/{pipeline_id}/valves") async def get_pipeline_valves( - urlIdx: Optional[int], pipeline_id: str, user=Depends(get_admin_user) + urlIdx: Optional[int], + pipeline_id: str, + user=Depends(get_admin_user), ): models = await get_all_models() r = None @@ -1781,7 +1807,9 @@ async def get_pipeline_valves( @app.get("/api/pipelines/{pipeline_id}/valves/spec") async def get_pipeline_valves_spec( - urlIdx: Optional[int], pipeline_id: str, user=Depends(get_admin_user) + urlIdx: Optional[int], + pipeline_id: str, + user=Depends(get_admin_user), ): models = await get_all_models() @@ -2168,6 +2196,12 @@ async def healthcheck(): return {"status": True} +@app.get("/health/db") +async def healthcheck_with_db(): + Session.execute(text("SELECT 1;")).all() + return {"status": True} + + app.mount("/static", StaticFiles(directory=STATIC_DIR), name="static") app.mount("/cache", StaticFiles(directory=CACHE_DIR), name="cache") diff --git a/backend/migrations/README b/backend/migrations/README new file mode 100644 index 0000000000..f1d93dff9d --- /dev/null +++ b/backend/migrations/README @@ -0,0 +1,4 @@ +Generic single-database configuration. + +Create new migrations with +DATABASE_URL= alembic revision --autogenerate -m "a description" diff --git a/backend/migrations/env.py b/backend/migrations/env.py new file mode 100644 index 0000000000..836893bbe6 --- /dev/null +++ b/backend/migrations/env.py @@ -0,0 +1,93 @@ +import os +from logging.config import fileConfig + +from sqlalchemy import engine_from_config +from sqlalchemy import pool + +from alembic import context + +from apps.webui.models.auths import Auth +from apps.webui.models.chats import Chat +from apps.webui.models.documents import Document +from apps.webui.models.memories import Memory +from apps.webui.models.models import Model +from apps.webui.models.prompts import Prompt +from apps.webui.models.tags import Tag, ChatIdTag +from apps.webui.models.tools import Tool +from apps.webui.models.users import User +from apps.webui.models.files import File +from apps.webui.models.functions import Function + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = Auth.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + +database_url = os.getenv("DATABASE_URL", None) +if database_url: + config.set_main_option("sqlalchemy.url", database_url) + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/backend/migrations/script.py.mako b/backend/migrations/script.py.mako new file mode 100644 index 0000000000..5f667ccfe0 --- /dev/null +++ b/backend/migrations/script.py.mako @@ -0,0 +1,27 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import apps.webui.internal.db +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/backend/migrations/util.py b/backend/migrations/util.py new file mode 100644 index 0000000000..401bb94d03 --- /dev/null +++ b/backend/migrations/util.py @@ -0,0 +1,9 @@ +from alembic import op +from sqlalchemy import Inspector + + +def get_existing_tables(): + con = op.get_bind() + inspector = Inspector.from_engine(con) + tables = set(inspector.get_table_names()) + return tables diff --git a/backend/migrations/versions/7e5b5dc7342b_init.py b/backend/migrations/versions/7e5b5dc7342b_init.py new file mode 100644 index 0000000000..b82627f5bc --- /dev/null +++ b/backend/migrations/versions/7e5b5dc7342b_init.py @@ -0,0 +1,202 @@ +"""init + +Revision ID: 7e5b5dc7342b +Revises: +Create Date: 2024-06-24 13:15:33.808998 + +""" + +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import apps.webui.internal.db +from migrations.util import get_existing_tables + +# revision identifiers, used by Alembic. +revision: str = "7e5b5dc7342b" +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + existing_tables = set(get_existing_tables()) + + # ### commands auto generated by Alembic - please adjust! ### + if "auth" not in existing_tables: + op.create_table( + "auth", + sa.Column("id", sa.String(), nullable=False), + sa.Column("email", sa.String(), nullable=True), + sa.Column("password", sa.Text(), nullable=True), + sa.Column("active", sa.Boolean(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + + if "chat" not in existing_tables: + op.create_table( + "chat", + sa.Column("id", sa.String(), nullable=False), + sa.Column("user_id", sa.String(), nullable=True), + sa.Column("title", sa.Text(), nullable=True), + sa.Column("chat", sa.Text(), nullable=True), + sa.Column("created_at", sa.BigInteger(), nullable=True), + sa.Column("updated_at", sa.BigInteger(), nullable=True), + sa.Column("share_id", sa.Text(), nullable=True), + sa.Column("archived", sa.Boolean(), nullable=True), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("share_id"), + ) + + if "chatidtag" not in existing_tables: + op.create_table( + "chatidtag", + sa.Column("id", sa.String(), nullable=False), + sa.Column("tag_name", sa.String(), nullable=True), + sa.Column("chat_id", sa.String(), nullable=True), + sa.Column("user_id", sa.String(), nullable=True), + sa.Column("timestamp", sa.BigInteger(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + + if "document" not in existing_tables: + op.create_table( + "document", + sa.Column("collection_name", sa.String(), nullable=False), + sa.Column("name", sa.String(), nullable=True), + sa.Column("title", sa.Text(), nullable=True), + sa.Column("filename", sa.Text(), nullable=True), + sa.Column("content", sa.Text(), nullable=True), + sa.Column("user_id", sa.String(), nullable=True), + sa.Column("timestamp", sa.BigInteger(), nullable=True), + sa.PrimaryKeyConstraint("collection_name"), + sa.UniqueConstraint("name"), + ) + + if "file" not in existing_tables: + op.create_table( + "file", + sa.Column("id", sa.String(), nullable=False), + sa.Column("user_id", sa.String(), nullable=True), + sa.Column("filename", sa.Text(), nullable=True), + sa.Column("meta", apps.webui.internal.db.JSONField(), nullable=True), + sa.Column("created_at", sa.BigInteger(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + + if "function" not in existing_tables: + op.create_table( + "function", + sa.Column("id", sa.String(), nullable=False), + sa.Column("user_id", sa.String(), nullable=True), + sa.Column("name", sa.Text(), nullable=True), + sa.Column("type", sa.Text(), nullable=True), + sa.Column("content", sa.Text(), nullable=True), + sa.Column("meta", apps.webui.internal.db.JSONField(), nullable=True), + sa.Column("valves", apps.webui.internal.db.JSONField(), nullable=True), + sa.Column("is_active", sa.Boolean(), nullable=True), + sa.Column("is_global", sa.Boolean(), nullable=True), + sa.Column("updated_at", sa.BigInteger(), nullable=True), + sa.Column("created_at", sa.BigInteger(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + + if "memory" not in existing_tables: + op.create_table( + "memory", + sa.Column("id", sa.String(), nullable=False), + sa.Column("user_id", sa.String(), nullable=True), + sa.Column("content", sa.Text(), nullable=True), + sa.Column("updated_at", sa.BigInteger(), nullable=True), + sa.Column("created_at", sa.BigInteger(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + + if "model" not in existing_tables: + op.create_table( + "model", + sa.Column("id", sa.Text(), nullable=False), + sa.Column("user_id", sa.Text(), nullable=True), + sa.Column("base_model_id", sa.Text(), nullable=True), + sa.Column("name", sa.Text(), nullable=True), + sa.Column("params", apps.webui.internal.db.JSONField(), nullable=True), + sa.Column("meta", apps.webui.internal.db.JSONField(), nullable=True), + sa.Column("updated_at", sa.BigInteger(), nullable=True), + sa.Column("created_at", sa.BigInteger(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + + if "prompt" not in existing_tables: + op.create_table( + "prompt", + sa.Column("command", sa.String(), nullable=False), + sa.Column("user_id", sa.String(), nullable=True), + sa.Column("title", sa.Text(), nullable=True), + sa.Column("content", sa.Text(), nullable=True), + sa.Column("timestamp", sa.BigInteger(), nullable=True), + sa.PrimaryKeyConstraint("command"), + ) + + if "tag" not in existing_tables: + op.create_table( + "tag", + sa.Column("id", sa.String(), nullable=False), + sa.Column("name", sa.String(), nullable=True), + sa.Column("user_id", sa.String(), nullable=True), + sa.Column("data", sa.Text(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + + if "tool" not in existing_tables: + op.create_table( + "tool", + sa.Column("id", sa.String(), nullable=False), + sa.Column("user_id", sa.String(), nullable=True), + sa.Column("name", sa.Text(), nullable=True), + sa.Column("content", sa.Text(), nullable=True), + sa.Column("specs", apps.webui.internal.db.JSONField(), nullable=True), + sa.Column("meta", apps.webui.internal.db.JSONField(), nullable=True), + sa.Column("valves", apps.webui.internal.db.JSONField(), nullable=True), + sa.Column("updated_at", sa.BigInteger(), nullable=True), + sa.Column("created_at", sa.BigInteger(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + + if "user" not in existing_tables: + op.create_table( + "user", + sa.Column("id", sa.String(), nullable=False), + sa.Column("name", sa.String(), nullable=True), + sa.Column("email", sa.String(), nullable=True), + sa.Column("role", sa.String(), nullable=True), + sa.Column("profile_image_url", sa.Text(), nullable=True), + sa.Column("last_active_at", sa.BigInteger(), nullable=True), + sa.Column("updated_at", sa.BigInteger(), nullable=True), + sa.Column("created_at", sa.BigInteger(), nullable=True), + sa.Column("api_key", sa.String(), nullable=True), + sa.Column("settings", apps.webui.internal.db.JSONField(), nullable=True), + sa.Column("info", apps.webui.internal.db.JSONField(), nullable=True), + sa.Column("oauth_sub", sa.Text(), nullable=True), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("api_key"), + sa.UniqueConstraint("oauth_sub"), + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("user") + op.drop_table("tool") + op.drop_table("tag") + op.drop_table("prompt") + op.drop_table("model") + op.drop_table("memory") + op.drop_table("function") + op.drop_table("file") + op.drop_table("document") + op.drop_table("chatidtag") + op.drop_table("chat") + op.drop_table("auth") + # ### end Alembic commands ### diff --git a/backend/requirements.txt b/backend/requirements.txt index 750298456e..7c6d629036 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -12,8 +12,10 @@ passlib[bcrypt]==1.7.4 requests==2.32.3 aiohttp==3.9.5 -peewee==3.17.5 -peewee-migrate==1.12.2 +sqlalchemy==2.0.30 +alembic==1.13.1 +# peewee==3.17.5 +# peewee-migrate==1.12.2 psycopg2-binary==2.9.9 PyMySQL==1.1.1 bcrypt==4.1.3 @@ -67,4 +69,9 @@ pytube==15.0.0 extract_msg pydub -duckduckgo-search~=6.1.7 \ No newline at end of file +duckduckgo-search~=6.1.7 + +## Tests +docker~=7.1.0 +pytest~=8.2.1 +pytest-docker~=3.1.1 diff --git a/backend/test/__init__.py b/backend/test/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/backend/test/apps/webui/routers/test_auths.py b/backend/test/apps/webui/routers/test_auths.py new file mode 100644 index 0000000000..3a8695a693 --- /dev/null +++ b/backend/test/apps/webui/routers/test_auths.py @@ -0,0 +1,202 @@ +import pytest + +from test.util.abstract_integration_test import AbstractPostgresTest +from test.util.mock_user import mock_webui_user + + +class TestAuths(AbstractPostgresTest): + BASE_PATH = "/api/v1/auths" + + def setup_class(cls): + super().setup_class() + from apps.webui.models.users import Users + from apps.webui.models.auths import Auths + + cls.users = Users + cls.auths = Auths + + def test_get_session_user(self): + with mock_webui_user(): + response = self.fast_api_client.get(self.create_url("")) + assert response.status_code == 200 + assert response.json() == { + "id": "1", + "name": "John Doe", + "email": "john.doe@openwebui.com", + "role": "user", + "profile_image_url": "/user.png", + } + + def test_update_profile(self): + from utils.utils import get_password_hash + + user = self.auths.insert_new_auth( + email="john.doe@openwebui.com", + password=get_password_hash("old_password"), + name="John Doe", + profile_image_url="/user.png", + role="user", + ) + + with mock_webui_user(id=user.id): + response = self.fast_api_client.post( + self.create_url("/update/profile"), + json={"name": "John Doe 2", "profile_image_url": "/user2.png"}, + ) + assert response.status_code == 200 + db_user = self.users.get_user_by_id(user.id) + assert db_user.name == "John Doe 2" + assert db_user.profile_image_url == "/user2.png" + + def test_update_password(self): + from utils.utils import get_password_hash + + user = self.auths.insert_new_auth( + email="john.doe@openwebui.com", + password=get_password_hash("old_password"), + name="John Doe", + profile_image_url="/user.png", + role="user", + ) + + with mock_webui_user(id=user.id): + response = self.fast_api_client.post( + self.create_url("/update/password"), + json={"password": "old_password", "new_password": "new_password"}, + ) + assert response.status_code == 200 + + old_auth = self.auths.authenticate_user( + "john.doe@openwebui.com", "old_password" + ) + assert old_auth is None + new_auth = self.auths.authenticate_user( + "john.doe@openwebui.com", "new_password" + ) + assert new_auth is not None + + def test_signin(self): + from utils.utils import get_password_hash + + user = self.auths.insert_new_auth( + email="john.doe@openwebui.com", + password=get_password_hash("password"), + name="John Doe", + profile_image_url="/user.png", + role="user", + ) + response = self.fast_api_client.post( + self.create_url("/signin"), + json={"email": "john.doe@openwebui.com", "password": "password"}, + ) + assert response.status_code == 200 + data = response.json() + assert data["id"] == user.id + assert data["name"] == "John Doe" + assert data["email"] == "john.doe@openwebui.com" + assert data["role"] == "user" + assert data["profile_image_url"] == "/user.png" + assert data["token"] is not None and len(data["token"]) > 0 + assert data["token_type"] == "Bearer" + + def test_signup(self): + response = self.fast_api_client.post( + self.create_url("/signup"), + json={ + "name": "John Doe", + "email": "john.doe@openwebui.com", + "password": "password", + }, + ) + assert response.status_code == 200 + data = response.json() + assert data["id"] is not None and len(data["id"]) > 0 + assert data["name"] == "John Doe" + assert data["email"] == "john.doe@openwebui.com" + assert data["role"] in ["admin", "user", "pending"] + assert data["profile_image_url"] == "/user.png" + assert data["token"] is not None and len(data["token"]) > 0 + assert data["token_type"] == "Bearer" + + def test_add_user(self): + with mock_webui_user(): + response = self.fast_api_client.post( + self.create_url("/add"), + json={ + "name": "John Doe 2", + "email": "john.doe2@openwebui.com", + "password": "password2", + "role": "admin", + }, + ) + assert response.status_code == 200 + data = response.json() + assert data["id"] is not None and len(data["id"]) > 0 + assert data["name"] == "John Doe 2" + assert data["email"] == "john.doe2@openwebui.com" + assert data["role"] == "admin" + assert data["profile_image_url"] == "/user.png" + assert data["token"] is not None and len(data["token"]) > 0 + assert data["token_type"] == "Bearer" + + def test_get_admin_details(self): + self.auths.insert_new_auth( + email="john.doe@openwebui.com", + password="password", + name="John Doe", + profile_image_url="/user.png", + role="admin", + ) + with mock_webui_user(): + response = self.fast_api_client.get(self.create_url("/admin/details")) + + assert response.status_code == 200 + assert response.json() == { + "name": "John Doe", + "email": "john.doe@openwebui.com", + } + + def test_create_api_key_(self): + user = self.auths.insert_new_auth( + email="john.doe@openwebui.com", + password="password", + name="John Doe", + profile_image_url="/user.png", + role="admin", + ) + with mock_webui_user(id=user.id): + response = self.fast_api_client.post(self.create_url("/api_key")) + assert response.status_code == 200 + data = response.json() + assert data["api_key"] is not None + assert len(data["api_key"]) > 0 + + def test_delete_api_key(self): + user = self.auths.insert_new_auth( + email="john.doe@openwebui.com", + password="password", + name="John Doe", + profile_image_url="/user.png", + role="admin", + ) + self.users.update_user_api_key_by_id(user.id, "abc") + with mock_webui_user(id=user.id): + response = self.fast_api_client.delete(self.create_url("/api_key")) + assert response.status_code == 200 + assert response.json() == True + db_user = self.users.get_user_by_id(user.id) + assert db_user.api_key is None + + def test_get_api_key(self): + user = self.auths.insert_new_auth( + email="john.doe@openwebui.com", + password="password", + name="John Doe", + profile_image_url="/user.png", + role="admin", + ) + self.users.update_user_api_key_by_id(user.id, "abc") + with mock_webui_user(id=user.id): + response = self.fast_api_client.get(self.create_url("/api_key")) + assert response.status_code == 200 + assert response.json() == {"api_key": "abc"} diff --git a/backend/test/apps/webui/routers/test_chats.py b/backend/test/apps/webui/routers/test_chats.py new file mode 100644 index 0000000000..f4661b6257 --- /dev/null +++ b/backend/test/apps/webui/routers/test_chats.py @@ -0,0 +1,238 @@ +import uuid + +from test.util.abstract_integration_test import AbstractPostgresTest +from test.util.mock_user import mock_webui_user + + +class TestChats(AbstractPostgresTest): + + BASE_PATH = "/api/v1/chats" + + def setup_class(cls): + super().setup_class() + + def setup_method(self): + super().setup_method() + from apps.webui.models.chats import ChatForm + from apps.webui.models.chats import Chats + + self.chats = Chats + self.chats.insert_new_chat( + "2", + ChatForm( + **{ + "chat": { + "name": "chat1", + "description": "chat1 description", + "tags": ["tag1", "tag2"], + "history": {"currentId": "1", "messages": []}, + } + } + ), + ) + + def test_get_session_user_chat_list(self): + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/")) + assert response.status_code == 200 + first_chat = response.json()[0] + assert first_chat["id"] is not None + assert first_chat["title"] == "New Chat" + assert first_chat["created_at"] is not None + assert first_chat["updated_at"] is not None + + def test_delete_all_user_chats(self): + with mock_webui_user(id="2"): + response = self.fast_api_client.delete(self.create_url("/")) + assert response.status_code == 200 + assert len(self.chats.get_chats()) == 0 + + def test_get_user_chat_list_by_user_id(self): + with mock_webui_user(id="3"): + response = self.fast_api_client.get(self.create_url("/list/user/2")) + assert response.status_code == 200 + first_chat = response.json()[0] + assert first_chat["id"] is not None + assert first_chat["title"] == "New Chat" + assert first_chat["created_at"] is not None + assert first_chat["updated_at"] is not None + + def test_create_new_chat(self): + with mock_webui_user(id="2"): + response = self.fast_api_client.post( + self.create_url("/new"), + json={ + "chat": { + "name": "chat2", + "description": "chat2 description", + "tags": ["tag1", "tag2"], + } + }, + ) + assert response.status_code == 200 + data = response.json() + assert data["archived"] is False + assert data["chat"] == { + "name": "chat2", + "description": "chat2 description", + "tags": ["tag1", "tag2"], + } + assert data["user_id"] == "2" + assert data["id"] is not None + assert data["share_id"] is None + assert data["title"] == "New Chat" + assert data["updated_at"] is not None + assert data["created_at"] is not None + assert len(self.chats.get_chats()) == 2 + + def test_get_user_chats(self): + self.test_get_session_user_chat_list() + + def test_get_user_archived_chats(self): + self.chats.archive_all_chats_by_user_id("2") + from apps.webui.internal.db import Session + + Session.commit() + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/all/archived")) + assert response.status_code == 200 + first_chat = response.json()[0] + assert first_chat["id"] is not None + assert first_chat["title"] == "New Chat" + assert first_chat["created_at"] is not None + assert first_chat["updated_at"] is not None + + def test_get_all_user_chats_in_db(self): + with mock_webui_user(id="4"): + response = self.fast_api_client.get(self.create_url("/all/db")) + assert response.status_code == 200 + assert len(response.json()) == 1 + + def test_get_archived_session_user_chat_list(self): + self.test_get_user_archived_chats() + + def test_archive_all_chats(self): + with mock_webui_user(id="2"): + response = self.fast_api_client.post(self.create_url("/archive/all")) + assert response.status_code == 200 + assert len(self.chats.get_archived_chats_by_user_id("2")) == 1 + + def test_get_shared_chat_by_id(self): + chat_id = self.chats.get_chats()[0].id + self.chats.update_chat_share_id_by_id(chat_id, chat_id) + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url(f"/share/{chat_id}")) + assert response.status_code == 200 + data = response.json() + assert data["id"] == chat_id + assert data["chat"] == { + "name": "chat1", + "description": "chat1 description", + "tags": ["tag1", "tag2"], + "history": {"currentId": "1", "messages": []}, + } + assert data["id"] == chat_id + assert data["share_id"] == chat_id + assert data["title"] == "New Chat" + + def test_get_chat_by_id(self): + chat_id = self.chats.get_chats()[0].id + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url(f"/{chat_id}")) + assert response.status_code == 200 + data = response.json() + assert data["id"] == chat_id + assert data["chat"] == { + "name": "chat1", + "description": "chat1 description", + "tags": ["tag1", "tag2"], + "history": {"currentId": "1", "messages": []}, + } + assert data["share_id"] is None + assert data["title"] == "New Chat" + assert data["user_id"] == "2" + + def test_update_chat_by_id(self): + chat_id = self.chats.get_chats()[0].id + with mock_webui_user(id="2"): + response = self.fast_api_client.post( + self.create_url(f"/{chat_id}"), + json={ + "chat": { + "name": "chat2", + "description": "chat2 description", + "tags": ["tag2", "tag4"], + "title": "Just another title", + } + }, + ) + assert response.status_code == 200 + data = response.json() + assert data["id"] == chat_id + assert data["chat"] == { + "name": "chat2", + "title": "Just another title", + "description": "chat2 description", + "tags": ["tag2", "tag4"], + "history": {"currentId": "1", "messages": []}, + } + assert data["share_id"] is None + assert data["title"] == "Just another title" + assert data["user_id"] == "2" + + def test_delete_chat_by_id(self): + chat_id = self.chats.get_chats()[0].id + with mock_webui_user(id="2"): + response = self.fast_api_client.delete(self.create_url(f"/{chat_id}")) + assert response.status_code == 200 + assert response.json() is True + + def test_clone_chat_by_id(self): + chat_id = self.chats.get_chats()[0].id + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url(f"/{chat_id}/clone")) + + assert response.status_code == 200 + data = response.json() + assert data["id"] != chat_id + assert data["chat"] == { + "branchPointMessageId": "1", + "description": "chat1 description", + "history": {"currentId": "1", "messages": []}, + "name": "chat1", + "originalChatId": chat_id, + "tags": ["tag1", "tag2"], + "title": "Clone of New Chat", + } + assert data["share_id"] is None + assert data["title"] == "Clone of New Chat" + assert data["user_id"] == "2" + + def test_archive_chat_by_id(self): + chat_id = self.chats.get_chats()[0].id + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url(f"/{chat_id}/archive")) + assert response.status_code == 200 + + chat = self.chats.get_chat_by_id(chat_id) + assert chat.archived is True + + def test_share_chat_by_id(self): + chat_id = self.chats.get_chats()[0].id + with mock_webui_user(id="2"): + response = self.fast_api_client.post(self.create_url(f"/{chat_id}/share")) + assert response.status_code == 200 + + chat = self.chats.get_chat_by_id(chat_id) + assert chat.share_id is not None + + def test_delete_shared_chat_by_id(self): + chat_id = self.chats.get_chats()[0].id + share_id = str(uuid.uuid4()) + self.chats.update_chat_share_id_by_id(chat_id, share_id) + with mock_webui_user(id="2"): + response = self.fast_api_client.delete(self.create_url(f"/{chat_id}/share")) + assert response.status_code + + chat = self.chats.get_chat_by_id(chat_id) + assert chat.share_id is None diff --git a/backend/test/apps/webui/routers/test_documents.py b/backend/test/apps/webui/routers/test_documents.py new file mode 100644 index 0000000000..14ca339fd0 --- /dev/null +++ b/backend/test/apps/webui/routers/test_documents.py @@ -0,0 +1,106 @@ +from test.util.abstract_integration_test import AbstractPostgresTest +from test.util.mock_user import mock_webui_user + + +class TestDocuments(AbstractPostgresTest): + + BASE_PATH = "/api/v1/documents" + + def setup_class(cls): + super().setup_class() + from apps.webui.models.documents import Documents + + cls.documents = Documents + + def test_documents(self): + # Empty database + assert len(self.documents.get_docs()) == 0 + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/")) + assert response.status_code == 200 + assert len(response.json()) == 0 + + # Create a new document + with mock_webui_user(id="2"): + response = self.fast_api_client.post( + self.create_url("/create"), + json={ + "name": "doc_name", + "title": "doc title", + "collection_name": "custom collection", + "filename": "doc_name.pdf", + "content": "", + }, + ) + assert response.status_code == 200 + assert response.json()["name"] == "doc_name" + assert len(self.documents.get_docs()) == 1 + + # Get the document + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/doc?name=doc_name")) + assert response.status_code == 200 + data = response.json() + assert data["collection_name"] == "custom collection" + assert data["name"] == "doc_name" + assert data["title"] == "doc title" + assert data["filename"] == "doc_name.pdf" + assert data["content"] == {} + + # Create another document + with mock_webui_user(id="2"): + response = self.fast_api_client.post( + self.create_url("/create"), + json={ + "name": "doc_name 2", + "title": "doc title 2", + "collection_name": "custom collection 2", + "filename": "doc_name2.pdf", + "content": "", + }, + ) + assert response.status_code == 200 + assert response.json()["name"] == "doc_name 2" + assert len(self.documents.get_docs()) == 2 + + # Get all documents + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/")) + assert response.status_code == 200 + assert len(response.json()) == 2 + + # Update the first document + with mock_webui_user(id="2"): + response = self.fast_api_client.post( + self.create_url("/doc/update?name=doc_name"), + json={"name": "doc_name rework", "title": "updated title"}, + ) + assert response.status_code == 200 + data = response.json() + assert data["name"] == "doc_name rework" + assert data["title"] == "updated title" + + # Tag the first document + with mock_webui_user(id="2"): + response = self.fast_api_client.post( + self.create_url("/doc/tags"), + json={ + "name": "doc_name rework", + "tags": [{"name": "testing-tag"}, {"name": "another-tag"}], + }, + ) + assert response.status_code == 200 + data = response.json() + assert data["name"] == "doc_name rework" + assert data["content"] == { + "tags": [{"name": "testing-tag"}, {"name": "another-tag"}] + } + assert len(self.documents.get_docs()) == 2 + + # Delete the first document + with mock_webui_user(id="2"): + response = self.fast_api_client.delete( + self.create_url("/doc/delete?name=doc_name rework") + ) + assert response.status_code == 200 + assert len(self.documents.get_docs()) == 1 diff --git a/backend/test/apps/webui/routers/test_models.py b/backend/test/apps/webui/routers/test_models.py new file mode 100644 index 0000000000..34d3e30bd1 --- /dev/null +++ b/backend/test/apps/webui/routers/test_models.py @@ -0,0 +1,62 @@ +from test.util.abstract_integration_test import AbstractPostgresTest +from test.util.mock_user import mock_webui_user + + +class TestModels(AbstractPostgresTest): + + BASE_PATH = "/api/v1/models" + + def setup_class(cls): + super().setup_class() + from apps.webui.models.models import Model + + cls.models = Model + + def test_models(self): + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/")) + assert response.status_code == 200 + assert len(response.json()) == 0 + + with mock_webui_user(id="2"): + response = self.fast_api_client.post( + self.create_url("/add"), + json={ + "id": "my-model", + "base_model_id": "base-model-id", + "name": "Hello World", + "meta": { + "profile_image_url": "/favicon.png", + "description": "description", + "capabilities": None, + "model_config": {}, + }, + "params": {}, + }, + ) + assert response.status_code == 200 + + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/")) + assert response.status_code == 200 + assert len(response.json()) == 1 + + with mock_webui_user(id="2"): + response = self.fast_api_client.get( + self.create_url(query_params={"id": "my-model"}) + ) + assert response.status_code == 200 + data = response.json()[0] + assert data["id"] == "my-model" + assert data["name"] == "Hello World" + + with mock_webui_user(id="2"): + response = self.fast_api_client.delete( + self.create_url("/delete?id=my-model") + ) + assert response.status_code == 200 + + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/")) + assert response.status_code == 200 + assert len(response.json()) == 0 diff --git a/backend/test/apps/webui/routers/test_prompts.py b/backend/test/apps/webui/routers/test_prompts.py new file mode 100644 index 0000000000..9f47be9923 --- /dev/null +++ b/backend/test/apps/webui/routers/test_prompts.py @@ -0,0 +1,92 @@ +from test.util.abstract_integration_test import AbstractPostgresTest +from test.util.mock_user import mock_webui_user + + +class TestPrompts(AbstractPostgresTest): + + BASE_PATH = "/api/v1/prompts" + + def test_prompts(self): + # Get all prompts + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/")) + assert response.status_code == 200 + assert len(response.json()) == 0 + + # Create a two new prompts + with mock_webui_user(id="2"): + response = self.fast_api_client.post( + self.create_url("/create"), + json={ + "command": "/my-command", + "title": "Hello World", + "content": "description", + }, + ) + assert response.status_code == 200 + with mock_webui_user(id="3"): + response = self.fast_api_client.post( + self.create_url("/create"), + json={ + "command": "/my-command2", + "title": "Hello World 2", + "content": "description 2", + }, + ) + assert response.status_code == 200 + + # Get all prompts + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/")) + assert response.status_code == 200 + assert len(response.json()) == 2 + + # Get prompt by command + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/command/my-command")) + assert response.status_code == 200 + data = response.json() + assert data["command"] == "/my-command" + assert data["title"] == "Hello World" + assert data["content"] == "description" + assert data["user_id"] == "2" + + # Update prompt + with mock_webui_user(id="2"): + response = self.fast_api_client.post( + self.create_url("/command/my-command2/update"), + json={ + "command": "irrelevant for request", + "title": "Hello World Updated", + "content": "description Updated", + }, + ) + assert response.status_code == 200 + data = response.json() + assert data["command"] == "/my-command2" + assert data["title"] == "Hello World Updated" + assert data["content"] == "description Updated" + assert data["user_id"] == "3" + + # Get prompt by command + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/command/my-command2")) + assert response.status_code == 200 + data = response.json() + assert data["command"] == "/my-command2" + assert data["title"] == "Hello World Updated" + assert data["content"] == "description Updated" + assert data["user_id"] == "3" + + # Delete prompt + with mock_webui_user(id="2"): + response = self.fast_api_client.delete( + self.create_url("/command/my-command/delete") + ) + assert response.status_code == 200 + + # Get all prompts + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/")) + assert response.status_code == 200 + assert len(response.json()) == 1 diff --git a/backend/test/apps/webui/routers/test_users.py b/backend/test/apps/webui/routers/test_users.py new file mode 100644 index 0000000000..9736b4d32a --- /dev/null +++ b/backend/test/apps/webui/routers/test_users.py @@ -0,0 +1,168 @@ +from test.util.abstract_integration_test import AbstractPostgresTest +from test.util.mock_user import mock_webui_user + + +def _get_user_by_id(data, param): + return next((item for item in data if item["id"] == param), None) + + +def _assert_user(data, id, **kwargs): + user = _get_user_by_id(data, id) + assert user is not None + comparison_data = { + "name": f"user {id}", + "email": f"user{id}@openwebui.com", + "profile_image_url": f"/user{id}.png", + "role": "user", + **kwargs, + } + for key, value in comparison_data.items(): + assert user[key] == value + + +class TestUsers(AbstractPostgresTest): + + BASE_PATH = "/api/v1/users" + + def setup_class(cls): + super().setup_class() + from apps.webui.models.users import Users + + cls.users = Users + + def setup_method(self): + super().setup_method() + self.users.insert_new_user( + id="1", + name="user 1", + email="user1@openwebui.com", + profile_image_url="/user1.png", + role="user", + ) + self.users.insert_new_user( + id="2", + name="user 2", + email="user2@openwebui.com", + profile_image_url="/user2.png", + role="user", + ) + + def test_users(self): + # Get all users + with mock_webui_user(id="3"): + response = self.fast_api_client.get(self.create_url("")) + assert response.status_code == 200 + assert len(response.json()) == 2 + data = response.json() + _assert_user(data, "1") + _assert_user(data, "2") + + # update role + with mock_webui_user(id="3"): + response = self.fast_api_client.post( + self.create_url("/update/role"), json={"id": "2", "role": "admin"} + ) + assert response.status_code == 200 + _assert_user([response.json()], "2", role="admin") + + # Get all users + with mock_webui_user(id="3"): + response = self.fast_api_client.get(self.create_url("")) + assert response.status_code == 200 + assert len(response.json()) == 2 + data = response.json() + _assert_user(data, "1") + _assert_user(data, "2", role="admin") + + # Get (empty) user settings + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/user/settings")) + assert response.status_code == 200 + assert response.json() is None + + # Update user settings + with mock_webui_user(id="2"): + response = self.fast_api_client.post( + self.create_url("/user/settings/update"), + json={ + "ui": {"attr1": "value1", "attr2": "value2"}, + "model_config": {"attr3": "value3", "attr4": "value4"}, + }, + ) + assert response.status_code == 200 + + # Get user settings + with mock_webui_user(id="2"): + response = self.fast_api_client.get(self.create_url("/user/settings")) + assert response.status_code == 200 + assert response.json() == { + "ui": {"attr1": "value1", "attr2": "value2"}, + "model_config": {"attr3": "value3", "attr4": "value4"}, + } + + # Get (empty) user info + with mock_webui_user(id="1"): + response = self.fast_api_client.get(self.create_url("/user/info")) + assert response.status_code == 200 + assert response.json() is None + + # Update user info + with mock_webui_user(id="1"): + response = self.fast_api_client.post( + self.create_url("/user/info/update"), + json={"attr1": "value1", "attr2": "value2"}, + ) + assert response.status_code == 200 + + # Get user info + with mock_webui_user(id="1"): + response = self.fast_api_client.get(self.create_url("/user/info")) + assert response.status_code == 200 + assert response.json() == {"attr1": "value1", "attr2": "value2"} + + # Get user by id + with mock_webui_user(id="1"): + response = self.fast_api_client.get(self.create_url("/2")) + assert response.status_code == 200 + assert response.json() == {"name": "user 2", "profile_image_url": "/user2.png"} + + # Update user by id + with mock_webui_user(id="1"): + response = self.fast_api_client.post( + self.create_url("/2/update"), + json={ + "name": "user 2 updated", + "email": "user2-updated@openwebui.com", + "profile_image_url": "/user2-updated.png", + }, + ) + assert response.status_code == 200 + + # Get all users + with mock_webui_user(id="3"): + response = self.fast_api_client.get(self.create_url("")) + assert response.status_code == 200 + assert len(response.json()) == 2 + data = response.json() + _assert_user(data, "1") + _assert_user( + data, + "2", + role="admin", + name="user 2 updated", + email="user2-updated@openwebui.com", + profile_image_url="/user2-updated.png", + ) + + # Delete user by id + with mock_webui_user(id="1"): + response = self.fast_api_client.delete(self.create_url("/2")) + assert response.status_code == 200 + + # Get all users + with mock_webui_user(id="3"): + response = self.fast_api_client.get(self.create_url("")) + assert response.status_code == 200 + assert len(response.json()) == 1 + data = response.json() + _assert_user(data, "1") diff --git a/backend/test/util/abstract_integration_test.py b/backend/test/util/abstract_integration_test.py new file mode 100644 index 0000000000..8535221a85 --- /dev/null +++ b/backend/test/util/abstract_integration_test.py @@ -0,0 +1,161 @@ +import logging +import os +import time + +import docker +import pytest +from docker import DockerClient +from pytest_docker.plugin import get_docker_ip +from fastapi.testclient import TestClient +from sqlalchemy import text, create_engine + + +log = logging.getLogger(__name__) + + +def get_fast_api_client(): + from main import app + + with TestClient(app) as c: + return c + + +class AbstractIntegrationTest: + BASE_PATH = None + + def create_url(self, path="", query_params=None): + if self.BASE_PATH is None: + raise Exception("BASE_PATH is not set") + parts = self.BASE_PATH.split("/") + parts = [part.strip() for part in parts if part.strip() != ""] + path_parts = path.split("/") + path_parts = [part.strip() for part in path_parts if part.strip() != ""] + query_parts = "" + if query_params: + query_parts = "&".join( + [f"{key}={value}" for key, value in query_params.items()] + ) + query_parts = f"?{query_parts}" + return "/".join(parts + path_parts) + query_parts + + @classmethod + def setup_class(cls): + pass + + def setup_method(self): + pass + + @classmethod + def teardown_class(cls): + pass + + def teardown_method(self): + pass + + +class AbstractPostgresTest(AbstractIntegrationTest): + DOCKER_CONTAINER_NAME = "postgres-test-container-will-get-deleted" + docker_client: DockerClient + + @classmethod + def _create_db_url(cls, env_vars_postgres: dict) -> str: + host = get_docker_ip() + user = env_vars_postgres["POSTGRES_USER"] + pw = env_vars_postgres["POSTGRES_PASSWORD"] + port = 8081 + db = env_vars_postgres["POSTGRES_DB"] + return f"postgresql://{user}:{pw}@{host}:{port}/{db}" + + @classmethod + def setup_class(cls): + super().setup_class() + try: + env_vars_postgres = { + "POSTGRES_USER": "user", + "POSTGRES_PASSWORD": "example", + "POSTGRES_DB": "openwebui", + } + cls.docker_client = docker.from_env() + cls.docker_client.containers.run( + "postgres:16.2", + detach=True, + environment=env_vars_postgres, + name=cls.DOCKER_CONTAINER_NAME, + ports={5432: ("0.0.0.0", 8081)}, + command="postgres -c log_statement=all", + ) + time.sleep(0.5) + + database_url = cls._create_db_url(env_vars_postgres) + os.environ["DATABASE_URL"] = database_url + retries = 10 + db = None + while retries > 0: + try: + from config import BACKEND_DIR + + db = create_engine(database_url, pool_pre_ping=True) + db = db.connect() + log.info("postgres is ready!") + break + except Exception as e: + log.warning(e) + time.sleep(3) + retries -= 1 + + if db: + # import must be after setting env! + cls.fast_api_client = get_fast_api_client() + db.close() + else: + raise Exception("Could not connect to Postgres") + except Exception as ex: + log.error(ex) + cls.teardown_class() + pytest.fail(f"Could not setup test environment: {ex}") + + def _check_db_connection(self): + from apps.webui.internal.db import Session + + retries = 10 + while retries > 0: + try: + Session.execute(text("SELECT 1")) + Session.commit() + break + except Exception as e: + Session.rollback() + log.warning(e) + time.sleep(3) + retries -= 1 + + def setup_method(self): + super().setup_method() + self._check_db_connection() + + @classmethod + def teardown_class(cls) -> None: + super().teardown_class() + cls.docker_client.containers.get(cls.DOCKER_CONTAINER_NAME).remove(force=True) + + def teardown_method(self): + from apps.webui.internal.db import Session + + # rollback everything not yet committed + Session.commit() + + # truncate all tables + tables = [ + "auth", + "chat", + "chatidtag", + "document", + "memory", + "model", + "prompt", + "tag", + '"user"', + ] + for table in tables: + Session.execute(text(f"TRUNCATE TABLE {table}")) + Session.commit() diff --git a/backend/test/util/mock_user.py b/backend/test/util/mock_user.py new file mode 100644 index 0000000000..8d0300d3f9 --- /dev/null +++ b/backend/test/util/mock_user.py @@ -0,0 +1,45 @@ +from contextlib import contextmanager + +from fastapi import FastAPI + + +@contextmanager +def mock_webui_user(**kwargs): + from apps.webui.main import app + + with mock_user(app, **kwargs): + yield + + +@contextmanager +def mock_user(app: FastAPI, **kwargs): + from utils.utils import ( + get_current_user, + get_verified_user, + get_admin_user, + get_current_user_by_api_key, + ) + from apps.webui.models.users import User + + def create_user(): + user_parameters = { + "id": "1", + "name": "John Doe", + "email": "john.doe@openwebui.com", + "role": "user", + "profile_image_url": "/user.png", + "last_active_at": 1627351200, + "updated_at": 1627351200, + "created_at": 162735120, + **kwargs, + } + return User(**user_parameters) + + app.dependency_overrides = { + get_current_user: create_user, + get_verified_user: create_user, + get_admin_user: create_user, + get_current_user_by_api_key: create_user, + } + yield + app.dependency_overrides = {} diff --git a/backend/utils/utils.py b/backend/utils/utils.py index 8c3c899bd5..fbc539af5c 100644 --- a/backend/utils/utils.py +++ b/backend/utils/utils.py @@ -1,5 +1,6 @@ from fastapi.security import HTTPBearer, HTTPAuthorizationCredentials from fastapi import HTTPException, status, Depends, Request +from sqlalchemy.orm import Session from apps.webui.models.users import Users