From a7993f6f4e4591cd2aaa4718ece9e5623557d019 Mon Sep 17 00:00:00 2001 From: Timothy Jaeryang Baek Date: Wed, 10 Dec 2025 12:22:40 -0500 Subject: [PATCH] refac --- backend/open_webui/models/chats.py | 66 ++++++++++++++++- backend/open_webui/routers/chats.py | 109 +++++++++++++++++++++++----- 2 files changed, 152 insertions(+), 23 deletions(-) diff --git a/backend/open_webui/models/chats.py b/backend/open_webui/models/chats.py index 187a4522c9..381b625200 100644 --- a/backend/open_webui/models/chats.py +++ b/backend/open_webui/models/chats.py @@ -126,6 +126,49 @@ class ChatTitleIdResponse(BaseModel): created_at: int +class ChatListResponse(BaseModel): + items: list[ChatModel] + total: int + + +class ChatUsageStatsResponse(BaseModel): + id: str # chat id + + models: dict = {} # models used in the chat with their usage counts + message_count: int # number of messages in the chat + + history_models: dict = {} # models used in the chat history with their usage counts + history_message_count: int # number of messages in the chat history + history_user_message_count: int # number of user messages in the chat history + history_assistant_message_count: ( + int # number of assistant messages in the chat history + ) + + average_response_time: ( + float # average response time of assistant messages in seconds + ) + average_user_message_content_length: ( + float # average length of user message contents + ) + average_assistant_message_content_length: ( + float # average length of assistant message contents + ) + + tags: list[str] = [] # tags associated with the chat + + last_message_at: int # timestamp of the last message + updated_at: int + created_at: int + + model_config = ConfigDict(extra="allow") + + +class ChatUsageStatsListResponse(BaseModel): + items: list[ChatUsageStatsResponse] + total: int + model_config = ConfigDict(extra="allow") + + class ChatTable: def _clean_null_bytes(self, obj): """ @@ -675,14 +718,31 @@ class ChatTable: ) return [ChatModel.model_validate(chat) for chat in all_chats] - def get_chats_by_user_id(self, user_id: str) -> list[ChatModel]: + def get_chats_by_user_id( + self, user_id: str, skip: Optional[int] = None, limit: Optional[int] = None + ) -> ChatListResponse: with get_db() as db: - all_chats = ( + query = ( db.query(Chat) .filter_by(user_id=user_id) .order_by(Chat.updated_at.desc()) ) - return [ChatModel.model_validate(chat) for chat in all_chats] + + total = query.count() + + if skip is not None: + query = query.offset(skip) + if limit is not None: + query = query.limit(limit) + + all_chats = query.all() + + return ChatListResponse( + **{ + "items": [ChatModel.model_validate(chat) for chat in all_chats], + "total": total, + } + ) def get_pinned_chats_by_user_id(self, user_id: str) -> list[ChatModel]: with get_db() as db: diff --git a/backend/open_webui/routers/chats.py b/backend/open_webui/routers/chats.py index 9d16ccb567..1b0433587e 100644 --- a/backend/open_webui/routers/chats.py +++ b/backend/open_webui/routers/chats.py @@ -8,6 +8,7 @@ from open_webui.socket.main import get_event_emitter from open_webui.models.chats import ( ChatForm, ChatImportForm, + ChatUsageStatsListResponse, ChatsImportForm, ChatResponse, Chats, @@ -68,16 +69,25 @@ def get_session_user_chat_list( ############################ -# GetChatList +# GetChatUsageStats +# EXPERIMENTAL: may be removed in future releases ############################ -@router.get("/stats/usage", response_model=list[ChatTitleIdResponse]) -def get_session_user_chat_usage( +@router.get("/stats/usage", response_model=ChatUsageStatsListResponse) +def get_session_user_chat_usage_stats( + items_per_page: Optional[int] = 50, + page: Optional[int] = 1, user=Depends(get_verified_user), ): try: - chats = Chats.get_chats_by_user_id(user.id) + limit = items_per_page + skip = (page - 1) * limit + + result = Chats.get_chats_by_user_id(user.id, skip=skip, limit=limit) + + chats = result.items + total = result.total chat_stats = [] for chat in chats: @@ -86,37 +96,96 @@ def get_session_user_chat_usage( if messages_map and message_id: try: + history_models = {} + history_message_count = len(messages_map) + history_user_messages = [] + history_assistant_messages = [] + + for message in messages_map.values(): + if message.get("role", "") == "user": + history_user_messages.append(message) + elif message.get("role", "") == "assistant": + history_assistant_messages.append(message) + model = message.get("model", None) + if model: + if model not in history_models: + history_models[model] = 0 + history_models[model] += 1 + + average_user_message_content_length = ( + sum( + len(message.get("content", "")) + for message in history_user_messages + ) + / len(history_user_messages) + if len(history_user_messages) > 0 + else 0 + ) + average_assistant_message_content_length = ( + sum( + len(message.get("content", "")) + for message in history_assistant_messages + ) + / len(history_assistant_messages) + if len(history_assistant_messages) > 0 + else 0 + ) + + response_times = [] + for message in history_assistant_messages: + user_message_id = message.get("parentId", None) + if user_message_id and user_message_id in messages_map: + user_message = messages_map[user_message_id] + response_time = message.get( + "timestamp", 0 + ) - user_message.get("timestamp", 0) + + response_times.append(response_time) + + average_response_time = ( + sum(response_times) / len(response_times) + if len(response_times) > 0 + else 0 + ) + message_list = get_message_list(messages_map, message_id) message_count = len(message_list) - last_assistant_message = next( - ( - message - for message in reversed(message_list) - if message["role"] == "assistant" - ), - None, - ) + models = {} + for message in reversed(message_list): + if message.get("role") == "assistant": + model = message.get("model", None) + if model: + if model not in models: + models[model] = 0 + models[model] += 1 + + annotation = message.get("annotation", {}) - model_id = ( - last_assistant_message.get("model", None) - if last_assistant_message - else None - ) chat_stats.append( { "id": chat.id, - "model_id": model_id, + "models": models, "message_count": message_count, + "history_models": history_models, + "history_message_count": history_message_count, + "history_user_message_count": len(history_user_messages), + "history_assistant_message_count": len( + history_assistant_messages + ), + "average_response_time": average_response_time, + "average_user_message_content_length": average_user_message_content_length, + "average_assistant_message_content_length": average_assistant_message_content_length, "tags": chat.meta.get("tags", []), - "model_ids": chat.chat.get("models", []), + "last_message_at": message_list[-1].get("timestamp", None), "updated_at": chat.updated_at, "created_at": chat.created_at, } ) except Exception as e: pass - return chat_stats + + return ChatUsageStatsListResponse(items=chat_stats, total=total) except Exception as e: log.exception(e)