mirror of
https://github.com/open-webui/open-webui.git
synced 2026-01-02 22:55:20 +00:00
enh: sync stats
Some checks are pending
Create and publish Docker images with specific build args / build-cuda126-image (linux/arm64, ubuntu-24.04-arm) (push) Waiting to run
Frontend Build / Format & Build Frontend (push) Waiting to run
Create and publish Docker images with specific build args / build-ollama-image (linux/amd64, ubuntu-latest) (push) Waiting to run
Create and publish Docker images with specific build args / build-ollama-image (linux/arm64, ubuntu-24.04-arm) (push) Waiting to run
Create and publish Docker images with specific build args / build-slim-image (linux/amd64, ubuntu-latest) (push) Waiting to run
Create and publish Docker images with specific build args / build-slim-image (linux/arm64, ubuntu-24.04-arm) (push) Waiting to run
Deploy to HuggingFace Spaces / check-secret (push) Waiting to run
Create and publish Docker images with specific build args / merge-main-images (push) Blocked by required conditions
Deploy to HuggingFace Spaces / deploy (push) Blocked by required conditions
Create and publish Docker images with specific build args / merge-cuda-images (push) Blocked by required conditions
Create and publish Docker images with specific build args / merge-ollama-images (push) Blocked by required conditions
Python CI / Format Backend (push) Waiting to run
Create and publish Docker images with specific build args / build-main-image (linux/amd64, ubuntu-latest) (push) Waiting to run
Create and publish Docker images with specific build args / merge-cuda126-images (push) Blocked by required conditions
Create and publish Docker images with specific build args / build-main-image (linux/arm64, ubuntu-24.04-arm) (push) Waiting to run
Create and publish Docker images with specific build args / merge-slim-images (push) Blocked by required conditions
Create and publish Docker images with specific build args / build-cuda-image (linux/amd64, ubuntu-latest) (push) Waiting to run
Create and publish Docker images with specific build args / build-cuda-image (linux/arm64, ubuntu-24.04-arm) (push) Waiting to run
Create and publish Docker images with specific build args / build-cuda126-image (linux/amd64, ubuntu-latest) (push) Waiting to run
Frontend Build / Frontend Unit Tests (push) Waiting to run
Some checks are pending
Create and publish Docker images with specific build args / build-cuda126-image (linux/arm64, ubuntu-24.04-arm) (push) Waiting to run
Frontend Build / Format & Build Frontend (push) Waiting to run
Create and publish Docker images with specific build args / build-ollama-image (linux/amd64, ubuntu-latest) (push) Waiting to run
Create and publish Docker images with specific build args / build-ollama-image (linux/arm64, ubuntu-24.04-arm) (push) Waiting to run
Create and publish Docker images with specific build args / build-slim-image (linux/amd64, ubuntu-latest) (push) Waiting to run
Create and publish Docker images with specific build args / build-slim-image (linux/arm64, ubuntu-24.04-arm) (push) Waiting to run
Deploy to HuggingFace Spaces / check-secret (push) Waiting to run
Create and publish Docker images with specific build args / merge-main-images (push) Blocked by required conditions
Deploy to HuggingFace Spaces / deploy (push) Blocked by required conditions
Create and publish Docker images with specific build args / merge-cuda-images (push) Blocked by required conditions
Create and publish Docker images with specific build args / merge-ollama-images (push) Blocked by required conditions
Python CI / Format Backend (push) Waiting to run
Create and publish Docker images with specific build args / build-main-image (linux/amd64, ubuntu-latest) (push) Waiting to run
Create and publish Docker images with specific build args / merge-cuda126-images (push) Blocked by required conditions
Create and publish Docker images with specific build args / build-main-image (linux/arm64, ubuntu-24.04-arm) (push) Waiting to run
Create and publish Docker images with specific build args / merge-slim-images (push) Blocked by required conditions
Create and publish Docker images with specific build args / build-cuda-image (linux/amd64, ubuntu-latest) (push) Waiting to run
Create and publish Docker images with specific build args / build-cuda-image (linux/arm64, ubuntu-24.04-arm) (push) Waiting to run
Create and publish Docker images with specific build args / build-cuda126-image (linux/amd64, ubuntu-latest) (push) Waiting to run
Frontend Build / Frontend Unit Tests (push) Waiting to run
This commit is contained in:
parent
9c61e95ecb
commit
85bbed3ec5
4 changed files with 362 additions and 106 deletions
|
|
@ -228,8 +228,7 @@ async def export_chat_stats(
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Default pagination
|
limit = 100
|
||||||
limit = 50
|
|
||||||
skip = (page - 1) * limit
|
skip = (page - 1) * limit
|
||||||
|
|
||||||
# Fetch chats with date filtering
|
# Fetch chats with date filtering
|
||||||
|
|
@ -255,121 +254,125 @@ async def export_chat_stats(
|
||||||
chat_stats_export_list = []
|
chat_stats_export_list = []
|
||||||
|
|
||||||
for chat in result.items:
|
for chat in result.items:
|
||||||
messages_map = chat.chat.get("history", {}).get("messages", {})
|
try:
|
||||||
message_id = chat.chat.get("history", {}).get("currentId")
|
messages_map = chat.chat.get("history", {}).get("messages", {})
|
||||||
|
message_id = chat.chat.get("history", {}).get("currentId")
|
||||||
|
|
||||||
history_models = {}
|
history_models = {}
|
||||||
history_message_count = len(messages_map)
|
history_message_count = len(messages_map)
|
||||||
history_user_messages = []
|
history_user_messages = []
|
||||||
history_assistant_messages = []
|
history_assistant_messages = []
|
||||||
|
|
||||||
# --- Detailed Message Stats ---
|
export_messages = {}
|
||||||
export_messages = {}
|
for key, message in messages_map.items():
|
||||||
for key, message in messages_map.items():
|
try:
|
||||||
content = message.get("content", "")
|
content = message.get("content", "")
|
||||||
if isinstance(content, str):
|
if isinstance(content, str):
|
||||||
content_length = len(content)
|
content_length = len(content)
|
||||||
else:
|
else:
|
||||||
content_length = (
|
content_length = 0 # Handle cases where content might be None or not string
|
||||||
0 # Handle cases where content might be None or not string
|
|
||||||
|
# Extract rating safely
|
||||||
|
rating = message.get("annotation", {}).get("rating")
|
||||||
|
message_stat = MessageStats(
|
||||||
|
id=message.get("id"),
|
||||||
|
role=message.get("role"),
|
||||||
|
model=message.get("model"),
|
||||||
|
timestamp=message.get("timestamp"),
|
||||||
|
content_length=content_length,
|
||||||
|
token_count=None, # Populate if available, e.g. message.get("info", {}).get("token_count")
|
||||||
|
rating=rating,
|
||||||
|
)
|
||||||
|
|
||||||
|
export_messages[key] = message_stat
|
||||||
|
|
||||||
|
# --- Aggregation Logic (copied/adapted from usage stats) ---
|
||||||
|
role = message.get("role", "")
|
||||||
|
if role == "user":
|
||||||
|
history_user_messages.append(message)
|
||||||
|
elif role == "assistant":
|
||||||
|
history_assistant_messages.append(message)
|
||||||
|
model = message.get("model")
|
||||||
|
if model:
|
||||||
|
if model not in history_models:
|
||||||
|
history_models[model] = 0
|
||||||
|
history_models[model] += 1
|
||||||
|
except Exception as e:
|
||||||
|
log.debug(f"Error processing message {key}: {e}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Calculate Averages
|
||||||
|
average_user_message_content_length = (
|
||||||
|
sum(
|
||||||
|
len(m.get("content", ""))
|
||||||
|
for m in history_user_messages
|
||||||
|
if isinstance(m.get("content"), str)
|
||||||
)
|
)
|
||||||
|
/ len(history_user_messages)
|
||||||
# Extract rating safely
|
if history_user_messages
|
||||||
rating = message.get("annotation", {}).get("rating")
|
else 0
|
||||||
|
|
||||||
export_messages[key] = MessageStats(
|
|
||||||
id=message.get("id"),
|
|
||||||
role=message.get("role"),
|
|
||||||
model=message.get("model"),
|
|
||||||
timestamp=message.get("timestamp"),
|
|
||||||
content_length=content_length,
|
|
||||||
token_count=None, # Populate if available, e.g. message.get("info", {}).get("token_count")
|
|
||||||
rating=rating,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# --- Aggregation Logic (copied/adapted from usage stats) ---
|
average_assistant_message_content_length = (
|
||||||
role = message.get("role", "")
|
sum(
|
||||||
if role == "user":
|
len(m.get("content", ""))
|
||||||
history_user_messages.append(message)
|
for m in history_assistant_messages
|
||||||
elif role == "assistant":
|
if isinstance(m.get("content"), str)
|
||||||
history_assistant_messages.append(message)
|
)
|
||||||
model = message.get("model")
|
/ len(history_assistant_messages)
|
||||||
if model:
|
if history_assistant_messages
|
||||||
if model not in history_models:
|
else 0
|
||||||
history_models[model] = 0
|
|
||||||
history_models[model] += 1
|
|
||||||
|
|
||||||
# Calculate Averages
|
|
||||||
average_user_message_content_length = (
|
|
||||||
sum(
|
|
||||||
len(m.get("content", ""))
|
|
||||||
for m in history_user_messages
|
|
||||||
if isinstance(m.get("content"), str)
|
|
||||||
)
|
)
|
||||||
/ len(history_user_messages)
|
|
||||||
if history_user_messages
|
|
||||||
else 0
|
|
||||||
)
|
|
||||||
|
|
||||||
average_assistant_message_content_length = (
|
# Response Times
|
||||||
sum(
|
response_times = []
|
||||||
len(m.get("content", ""))
|
for message in history_assistant_messages:
|
||||||
for m in history_assistant_messages
|
user_message_id = message.get("parentId", None)
|
||||||
if isinstance(m.get("content"), str)
|
if user_message_id and user_message_id in messages_map:
|
||||||
|
user_message = messages_map[user_message_id]
|
||||||
|
# Ensure timestamps exist
|
||||||
|
t1 = message.get("timestamp")
|
||||||
|
t0 = user_message.get("timestamp")
|
||||||
|
if t1 and t0:
|
||||||
|
response_times.append(t1 - t0)
|
||||||
|
|
||||||
|
average_response_time = (
|
||||||
|
sum(response_times) / len(response_times) if response_times else 0
|
||||||
)
|
)
|
||||||
/ len(history_assistant_messages)
|
|
||||||
if history_assistant_messages
|
|
||||||
else 0
|
|
||||||
)
|
|
||||||
|
|
||||||
# Response Times
|
# Current Message List Logic (Main path)
|
||||||
response_times = []
|
message_list = get_message_list(messages_map, message_id)
|
||||||
for message in history_assistant_messages:
|
message_count = len(message_list)
|
||||||
user_message_id = message.get("parentId", None)
|
models = {}
|
||||||
if user_message_id and user_message_id in messages_map:
|
for message in reversed(message_list):
|
||||||
user_message = messages_map[user_message_id]
|
if message.get("role") == "assistant":
|
||||||
# Ensure timestamps exist
|
model = message.get("model")
|
||||||
t1 = message.get("timestamp")
|
if model:
|
||||||
t0 = user_message.get("timestamp")
|
if model not in models:
|
||||||
if t1 and t0:
|
models[model] = 0
|
||||||
response_times.append(t1 - t0)
|
models[model] += 1
|
||||||
|
|
||||||
average_response_time = (
|
# Construct Aggregate Stats
|
||||||
sum(response_times) / len(response_times) if response_times else 0
|
stats = AggregateChatStats(
|
||||||
)
|
average_response_time=average_response_time,
|
||||||
|
average_user_message_content_length=average_user_message_content_length,
|
||||||
|
average_assistant_message_content_length=average_assistant_message_content_length,
|
||||||
|
models=models,
|
||||||
|
message_count=message_count,
|
||||||
|
history_models=history_models,
|
||||||
|
history_message_count=history_message_count,
|
||||||
|
history_user_message_count=len(history_user_messages),
|
||||||
|
history_assistant_message_count=len(history_assistant_messages),
|
||||||
|
)
|
||||||
|
|
||||||
# Current Message List Logic (Main path)
|
# Construct Chat Body
|
||||||
message_list = get_message_list(messages_map, message_id)
|
chat_body = ChatBody(
|
||||||
message_count = len(message_list)
|
history=ChatHistoryStats(
|
||||||
models = {}
|
messages=export_messages, currentId=message_id
|
||||||
for message in reversed(message_list):
|
)
|
||||||
if message.get("role") == "assistant":
|
)
|
||||||
model = message.get("model")
|
|
||||||
if model:
|
|
||||||
if model not in models:
|
|
||||||
models[model] = 0
|
|
||||||
models[model] += 1
|
|
||||||
|
|
||||||
# Construct Aggregate Stats
|
chat_stat = ChatStatsExport(
|
||||||
stats = AggregateChatStats(
|
|
||||||
average_response_time=average_response_time,
|
|
||||||
average_user_message_content_length=average_user_message_content_length,
|
|
||||||
average_assistant_message_content_length=average_assistant_message_content_length,
|
|
||||||
models=models,
|
|
||||||
message_count=message_count,
|
|
||||||
history_models=history_models,
|
|
||||||
history_message_count=history_message_count,
|
|
||||||
history_user_message_count=len(history_user_messages),
|
|
||||||
history_assistant_message_count=len(history_assistant_messages),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Construct Chat Body
|
|
||||||
chat_body = ChatBody(
|
|
||||||
history=ChatHistoryStats(messages=export_messages, currentId=message_id)
|
|
||||||
)
|
|
||||||
|
|
||||||
chat_stats_export_list.append(
|
|
||||||
ChatStatsExport(
|
|
||||||
id=chat.id,
|
id=chat.id,
|
||||||
user_id=chat.user_id,
|
user_id=chat.user_id,
|
||||||
created_at=chat.created_at,
|
created_at=chat.created_at,
|
||||||
|
|
@ -378,7 +381,11 @@ async def export_chat_stats(
|
||||||
stats=stats,
|
stats=stats,
|
||||||
chat=chat_body,
|
chat=chat_body,
|
||||||
)
|
)
|
||||||
)
|
|
||||||
|
chat_stats_export_list.append(chat_stat)
|
||||||
|
except Exception as e:
|
||||||
|
log.debug(f"Error exporting stats for chat {chat.id}: {e}")
|
||||||
|
continue
|
||||||
|
|
||||||
return ChatStatsExportList(
|
return ChatStatsExportList(
|
||||||
items=chat_stats_export_list, total=result.total, page=page
|
items=chat_stats_export_list, total=result.total, page=page
|
||||||
|
|
|
||||||
|
|
@ -1166,3 +1166,44 @@ export const archiveAllChats = async (token: string) => {
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
};
|
};
|
||||||
|
export const exportChatStats = async (token: string, page: number = 1, params: object = {}) => {
|
||||||
|
let error = null;
|
||||||
|
|
||||||
|
const searchParams = new URLSearchParams();
|
||||||
|
searchParams.append('page', `${page}`);
|
||||||
|
|
||||||
|
if (params) {
|
||||||
|
for (const [key, value] of Object.entries(params)) {
|
||||||
|
searchParams.append(key, `${value}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const res = await fetch(`${WEBUI_API_BASE_URL}/chats/stats/export?${searchParams.toString()}`, {
|
||||||
|
method: 'GET',
|
||||||
|
headers: {
|
||||||
|
Accept: 'application/json',
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
...(token && { authorization: `Bearer ${token}` })
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.then(async (res) => {
|
||||||
|
if (!res.ok) throw await res.json();
|
||||||
|
return res.json();
|
||||||
|
})
|
||||||
|
.then((json) => {
|
||||||
|
return json;
|
||||||
|
})
|
||||||
|
.catch((err) => {
|
||||||
|
error = err;
|
||||||
|
console.error(err);
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
174
src/lib/components/chat/Settings/SyncStatsModal.svelte
Normal file
174
src/lib/components/chat/Settings/SyncStatsModal.svelte
Normal file
|
|
@ -0,0 +1,174 @@
|
||||||
|
<script lang="ts">
|
||||||
|
import { toast } from 'svelte-sonner';
|
||||||
|
import { onMount, getContext } from 'svelte';
|
||||||
|
|
||||||
|
import { exportChatStats } from '$lib/apis/chats';
|
||||||
|
import { Confetti } from 'svelte-confetti';
|
||||||
|
import Check from '$lib/components/icons/Check.svelte';
|
||||||
|
|
||||||
|
import Modal from '$lib/components/common/Modal.svelte';
|
||||||
|
import XMark from '$lib/components/icons/XMark.svelte';
|
||||||
|
import Spinner from '$lib/components/common/Spinner.svelte';
|
||||||
|
|
||||||
|
const i18n = getContext('i18n');
|
||||||
|
|
||||||
|
export let show = false;
|
||||||
|
export let params = {};
|
||||||
|
let loading = false;
|
||||||
|
let completed = false;
|
||||||
|
let processedItemsCount = 0;
|
||||||
|
let total = 0;
|
||||||
|
|
||||||
|
const syncStats = async () => {
|
||||||
|
if (window.opener) {
|
||||||
|
window.opener.focus();
|
||||||
|
}
|
||||||
|
|
||||||
|
loading = true;
|
||||||
|
processedItemsCount = 0;
|
||||||
|
total = 0;
|
||||||
|
let page = 1;
|
||||||
|
|
||||||
|
let allItemsLoaded = false;
|
||||||
|
|
||||||
|
while (!allItemsLoaded) {
|
||||||
|
const res = await exportChatStats(localStorage.token, page, params).catch(() => {
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (res) {
|
||||||
|
processedItemsCount += res.items.length;
|
||||||
|
total = res.total;
|
||||||
|
|
||||||
|
if (window.opener) {
|
||||||
|
if (res.items.length > 0) {
|
||||||
|
window.opener.postMessage({ type: 'export:stats:chats', data: res }, '*');
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.log('No opener found to send stats back to.');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (processedItemsCount >= total || res.items.length === 0) {
|
||||||
|
allItemsLoaded = true;
|
||||||
|
} else {
|
||||||
|
page += 1;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
allItemsLoaded = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
loading = false;
|
||||||
|
completed = true;
|
||||||
|
};
|
||||||
|
</script>
|
||||||
|
|
||||||
|
<Modal bind:show size="md">
|
||||||
|
<div class="w-full">
|
||||||
|
{#if completed}
|
||||||
|
<div class="flex flex-col items-center justify-center px-6 py-10">
|
||||||
|
<Confetti x={[-0.5, 0.5]} y={[0.25, 1]} />
|
||||||
|
|
||||||
|
<div class="rounded-full bg-green-100 dark:bg-green-900/30 p-3 mb-4">
|
||||||
|
<Check className="size-8 text-green-600 dark:text-green-400" />
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="text-xl font-medium mb-2 text-gray-900 dark:text-gray-100">
|
||||||
|
{$i18n.t('Sync Complete!')}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="text-gray-500 dark:text-gray-400 text-center mb-6 max-w-sm text-xs">
|
||||||
|
{$i18n.t('Your usage stats have been successfully synced with the Open WebUI Community.')}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<button
|
||||||
|
class="px-6 py-1.5 rounded-full text-sm font-medium bg-gray-900 hover:bg-gray-800 text-white dark:bg-white dark:hover:bg-gray-100 dark:text-gray-900 transition-colors"
|
||||||
|
on:click={() => (show = false)}
|
||||||
|
>
|
||||||
|
{$i18n.t('Done')}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
{:else}
|
||||||
|
<div class=" flex justify-between px-5 pt-4 pb-0.5">
|
||||||
|
<div class=" text-lg font-medium self-center">{$i18n.t('Sync Usage Stats')}</div>
|
||||||
|
<button
|
||||||
|
class="self-center"
|
||||||
|
on:click={() => {
|
||||||
|
show = false;
|
||||||
|
}}
|
||||||
|
disabled={loading}
|
||||||
|
>
|
||||||
|
<XMark className={'size-5'} />
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="px-5 pt-2 pb-5">
|
||||||
|
<div class="text-sm text-gray-500 dark:text-gray-400">
|
||||||
|
{$i18n.t('Do you want to sync your usage stats with Open WebUI Community?')}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="mt-2 text-xs text-gray-400 dark:text-gray-500">
|
||||||
|
{$i18n.t(
|
||||||
|
'Participate in community leaderboards and evaluations! Syncing aggregated usage stats helps drive research and improvements to Open WebUI. Your privacy is paramount: no message content is ever shared.'
|
||||||
|
)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="mt-3 text-xs text-gray-400 dark:text-gray-500">
|
||||||
|
<div class="font-medium text-gray-900 dark:text-gray-100 mb-1">
|
||||||
|
{$i18n.t('What is shared:')}
|
||||||
|
</div>
|
||||||
|
<ul class="list-disc list-inside space-y-0.5 ml-1 mb-2">
|
||||||
|
<li>{$i18n.t('Model usage counts and preferences')}</li>
|
||||||
|
<li>{$i18n.t('Message counts and response timestamps')}</li>
|
||||||
|
<li>{$i18n.t('Content lengths (character counts only)')}</li>
|
||||||
|
<li>{$i18n.t('User ratings (thumbs up/down)')}</li>
|
||||||
|
</ul>
|
||||||
|
|
||||||
|
<div class="font-medium text-gray-900 dark:text-gray-100 mb-1">
|
||||||
|
{$i18n.t('What is NOT shared:')}
|
||||||
|
</div>
|
||||||
|
<ul class="list-disc list-inside space-y-0.5 ml-1">
|
||||||
|
<li>{$i18n.t('Your message text or inputs')}</li>
|
||||||
|
<li>{$i18n.t('Model responses or outputs')}</li>
|
||||||
|
<li>{$i18n.t('Uploaded files or images')}</li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{#if loading}
|
||||||
|
<div class="mt-3">
|
||||||
|
<div class="text-xs text-gray-400 dark:text-gray-500 mb-1 flex justify-between">
|
||||||
|
<div>{$i18n.t('Syncing stats...')}</div>
|
||||||
|
<div>{Math.round((processedItemsCount / total) * 100) || 0}%</div>
|
||||||
|
</div>
|
||||||
|
<div class="w-full bg-gray-200 rounded-full h-1.5 dark:bg-gray-700">
|
||||||
|
<div
|
||||||
|
class="bg-gray-900 dark:bg-gray-100 h-1.5 rounded-full transition-all duration-300"
|
||||||
|
style="width: {(processedItemsCount / total) * 100}%"
|
||||||
|
></div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
|
||||||
|
<div class="mt-5 flex justify-end gap-2">
|
||||||
|
<button
|
||||||
|
class="px-4 py-2 rounded-full text-sm font-medium bg-gray-100 hover:bg-gray-200 dark:bg-gray-800 dark:hover:bg-gray-700 text-gray-900 dark:text-gray-100 transition disabled:cursor-not-allowed"
|
||||||
|
on:click={() => (show = false)}
|
||||||
|
disabled={loading}
|
||||||
|
>
|
||||||
|
{$i18n.t('Cancel')}
|
||||||
|
</button>
|
||||||
|
<button
|
||||||
|
class="px-4 py-2 rounded-full text-sm font-medium bg-black hover:bg-gray-900 dark:bg-white dark:hover:bg-gray-100 text-white dark:text-black transition flex items-center gap-2 disabled:cursor-not-allowed"
|
||||||
|
on:click={syncStats}
|
||||||
|
disabled={loading}
|
||||||
|
>
|
||||||
|
{#if loading}
|
||||||
|
<Spinner className="size-4" />
|
||||||
|
{/if}
|
||||||
|
{$i18n.t('Sync')}
|
||||||
|
</button>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
|
</div>
|
||||||
|
</Modal>
|
||||||
|
|
@ -54,6 +54,7 @@
|
||||||
|
|
||||||
import NotificationToast from '$lib/components/NotificationToast.svelte';
|
import NotificationToast from '$lib/components/NotificationToast.svelte';
|
||||||
import AppSidebar from '$lib/components/app/AppSidebar.svelte';
|
import AppSidebar from '$lib/components/app/AppSidebar.svelte';
|
||||||
|
import SyncStatsModal from '$lib/components/chat/Settings/SyncStatsModal.svelte';
|
||||||
import Spinner from '$lib/components/common/Spinner.svelte';
|
import Spinner from '$lib/components/common/Spinner.svelte';
|
||||||
import { getUserSettings } from '$lib/apis/users';
|
import { getUserSettings } from '$lib/apis/users';
|
||||||
import dayjs from 'dayjs';
|
import dayjs from 'dayjs';
|
||||||
|
|
@ -89,6 +90,8 @@
|
||||||
let tokenTimer = null;
|
let tokenTimer = null;
|
||||||
|
|
||||||
let showRefresh = false;
|
let showRefresh = false;
|
||||||
|
let showSyncStatsModal = false;
|
||||||
|
let syncStatsParams = {};
|
||||||
|
|
||||||
let heartbeatInterval = null;
|
let heartbeatInterval = null;
|
||||||
|
|
||||||
|
|
@ -600,7 +603,24 @@
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
const windowMessageEventHandler = async (event) => {
|
||||||
|
if (
|
||||||
|
!['https://openwebui.com', 'https://www.openwebui.com', 'http://localhost:9999'].includes(
|
||||||
|
event.origin
|
||||||
|
)
|
||||||
|
) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (event.data === 'export:stats' || event.data?.type === 'export:stats') {
|
||||||
|
syncStatsParams = event.data?.searchParams ?? {};
|
||||||
|
showSyncStatsModal = true;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
onMount(async () => {
|
onMount(async () => {
|
||||||
|
window.addEventListener('message', windowMessageEventHandler);
|
||||||
|
|
||||||
let touchstartY = 0;
|
let touchstartY = 0;
|
||||||
|
|
||||||
function isNavOrDescendant(el) {
|
function isNavOrDescendant(el) {
|
||||||
|
|
@ -814,10 +834,20 @@
|
||||||
loaded = true;
|
loaded = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Notify opener window that the app has loaded
|
||||||
|
if (window.opener ?? false) {
|
||||||
|
window.opener.postMessage('loaded', '*');
|
||||||
|
}
|
||||||
|
|
||||||
return () => {
|
return () => {
|
||||||
window.removeEventListener('resize', onResize);
|
window.removeEventListener('resize', onResize);
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
|
onDestroy(() => {
|
||||||
|
window.removeEventListener('message', windowMessageEventHandler);
|
||||||
|
bc.close();
|
||||||
|
});
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<svelte:head>
|
<svelte:head>
|
||||||
|
|
@ -855,6 +885,10 @@
|
||||||
{/if}
|
{/if}
|
||||||
{/if}
|
{/if}
|
||||||
|
|
||||||
|
{#if $config?.features.enable_community_sharing}
|
||||||
|
<SyncStatsModal bind:show={showSyncStatsModal} params={syncStatsParams} />
|
||||||
|
{/if}
|
||||||
|
|
||||||
<Toaster
|
<Toaster
|
||||||
theme={$theme.includes('dark')
|
theme={$theme.includes('dark')
|
||||||
? 'dark'
|
? 'dark'
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue