open-webui/src/lib/components/chat/Chat.svelte

2777 lines
76 KiB
Svelte
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

<script lang="ts">
import { v4 as uuidv4 } from 'uuid';
import { toast } from 'svelte-sonner';
import { PaneGroup, Pane, PaneResizer } from 'paneforge';
import { getContext, onDestroy, onMount, tick } from 'svelte';
const i18n: Writable<i18nType> = getContext('i18n');
import { goto } from '$app/navigation';
import { page } from '$app/stores';
import { get, type Unsubscriber, type Writable } from 'svelte/store';
import type { i18n as i18nType } from 'i18next';
import { WEBUI_BASE_URL } from '$lib/constants';
import {
chatId,
chats,
config,
type Model,
models,
userModels,
tags as allTags,
settings,
showSidebar,
WEBUI_NAME,
banners,
user,
socket,
showControls,
showCallOverlay,
currentChatPage,
temporaryChatEnabled,
mobile,
showOverview,
chatTitle,
showArtifacts,
tools,
toolServers,
functions,
selectedFolder,
pinnedChats,
showEmbeds
} from '$lib/stores';
import {
convertMessagesToHistory,
copyToClipboard,
getMessageContentParts,
createMessagesList,
getPromptVariables,
processDetails,
removeAllDetails
} from '$lib/utils';
import {
createNewChat,
getAllTags,
getChatById,
getChatList,
getPinnedChatList,
getTagsById,
updateChatById,
updateChatFolderIdById
} from '$lib/apis/chats';
import { generateOpenAIChatCompletion } from '$lib/apis/openai';
import { processWeb, processWebSearch, processYoutubeVideo } from '$lib/apis/retrieval';
import { getAndUpdateUserLocation, getUserSettings } from '$lib/apis/users';
import {
chatCompleted,
generateQueries,
chatAction,
generateMoACompletion,
stopTask,
getTaskIdsByChatId
} from '$lib/apis';
import { getTools } from '$lib/apis/tools';
import { uploadFile } from '$lib/apis/files';
import { createOpenAITextStream } from '$lib/apis/streaming';
import { fade } from 'svelte/transition';
import Banner from '../common/Banner.svelte';
import MessageInput from '$lib/components/chat/MessageInput.svelte';
import Messages from '$lib/components/chat/Messages.svelte';
import Navbar from '$lib/components/chat/Navbar.svelte';
import ChatControls from './ChatControls.svelte';
import EventConfirmDialog from '../common/ConfirmDialog.svelte';
import Placeholder from './Placeholder.svelte';
import NotificationToast from '../NotificationToast.svelte';
import Spinner from '../common/Spinner.svelte';
import Tooltip from '../common/Tooltip.svelte';
import Sidebar from '../icons/Sidebar.svelte';
import { getFunctions } from '$lib/apis/functions';
import Image from '../common/Image.svelte';
import { updateFolderById } from '$lib/apis/folders';
export let chatIdProp = '';
let loading = true;
const eventTarget = new EventTarget();
let controlPane;
let controlPaneComponent;
let messageInput;
let autoScroll = true;
let processing = '';
let messagesContainerElement: HTMLDivElement;
let navbarElement;
let showEventConfirmation = false;
let eventConfirmationTitle = '';
let eventConfirmationMessage = '';
let eventConfirmationInput = false;
let eventConfirmationInputPlaceholder = '';
let eventConfirmationInputValue = '';
let eventCallback = null;
let chatIdUnsubscriber: Unsubscriber | undefined;
let selectedModels = [''];
let atSelectedModel: Model | undefined;
let selectedModelIds = [];
$: selectedModelIds = atSelectedModel !== undefined ? [atSelectedModel.id] : selectedModels;
let selectedToolIds = [];
let selectedFilterIds = [];
let imageGenerationEnabled = false;
let webSearchEnabled = false;
let codeInterpreterEnabled = false;
let memoryEnabled = true;
let memoryLocked = false;
let showCommands = false;
let generating = false;
let generationController = null;
let chat = null;
let tags = [];
let history = {
messages: {},
currentId: null
};
let taskIds = null;
// Chat Input
let prompt = '';
let chatFiles = [];
let files = [];
let params = {};
$: if (chatIdProp) {
navigateHandler();
}
const navigateHandler = async () => {
loading = true;
prompt = '';
messageInput?.setText('');
files = [];
selectedToolIds = [];
selectedFilterIds = [];
webSearchEnabled = false;
imageGenerationEnabled = false;
memoryEnabled = true;
memoryLocked = false;
const storageChatInput = sessionStorage.getItem(
`chat-input${chatIdProp ? `-${chatIdProp}` : ''}`
);
if (chatIdProp && (await loadChat())) {
await tick();
loading = false;
window.setTimeout(() => scrollToBottom(), 0);
await tick();
if (storageChatInput) {
try {
const input = JSON.parse(storageChatInput);
if (!$temporaryChatEnabled) {
messageInput?.setText(input.prompt);
files = input.files;
selectedToolIds = input.selectedToolIds;
selectedFilterIds = input.selectedFilterIds;
webSearchEnabled = input.webSearchEnabled;
imageGenerationEnabled = input.imageGenerationEnabled;
codeInterpreterEnabled = input.codeInterpreterEnabled;
if (!memoryLocked) {
memoryEnabled = input.memoryEnabled;
}
}
} catch (e) {}
}
const chatInput = document.getElementById('chat-input');
chatInput?.focus();
} else {
await goto('/');
}
};
const onSelect = async (e) => {
const { type, data } = e;
if (type === 'prompt') {
// Handle prompt selection
messageInput?.setText(data, async () => {
if (!($settings?.insertSuggestionPrompt ?? false)) {
await tick();
submitPrompt(prompt);
}
});
}
};
$: if (selectedModels && chatIdProp !== '') {
saveSessionSelectedModels();
}
const saveSessionSelectedModels = () => {
const selectedModelsString = JSON.stringify(selectedModels);
if (
selectedModels.length === 0 ||
(selectedModels.length === 1 && selectedModels[0] === '') ||
sessionStorage.selectedModels === selectedModelsString
) {
return;
}
sessionStorage.selectedModels = selectedModelsString;
console.log('saveSessionSelectedModels', selectedModels, sessionStorage.selectedModels);
};
let oldSelectedModelIds = [''];
$: if (JSON.stringify(selectedModelIds) !== JSON.stringify(oldSelectedModelIds)) {
onSelectedModelIdsChange();
}
const onSelectedModelIdsChange = () => {
if (oldSelectedModelIds.filter((id) => id).length > 0) {
resetInput();
}
oldSelectedModelIds = selectedModelIds;
};
const resetInput = () => {
selectedToolIds = [];
selectedFilterIds = [];
webSearchEnabled = false;
imageGenerationEnabled = false;
codeInterpreterEnabled = false;
memoryEnabled = true;
setDefaults();
};
const setDefaults = async () => {
if (!$tools) {
tools.set(await getTools(localStorage.token));
}
if (!$functions) {
functions.set(await getFunctions(localStorage.token));
}
if (selectedModels.length !== 1 && !atSelectedModel) {
return;
}
const model = atSelectedModel ?? $models.find((m) => m.id === selectedModels[0]);
if (model) {
// Set Default Tools
if (model?.info?.meta?.toolIds) {
selectedToolIds = [
...new Set(
[...(model?.info?.meta?.toolIds ?? [])].filter((id) => $tools.find((t) => t.id === id))
)
];
}
// Set Default Filters (Toggleable only)
if (model?.info?.meta?.defaultFilterIds) {
selectedFilterIds = model.info.meta.defaultFilterIds.filter((id) =>
model?.filters?.find((f) => f.id === id)
);
}
// Set Default Features
if (model?.info?.meta?.defaultFeatureIds) {
if (model.info?.meta?.capabilities?.['image_generation']) {
imageGenerationEnabled = model.info.meta.defaultFeatureIds.includes('image_generation');
}
if (model.info?.meta?.capabilities?.['web_search']) {
webSearchEnabled = model.info.meta.defaultFeatureIds.includes('web_search');
}
if (model.info?.meta?.capabilities?.['code_interpreter']) {
codeInterpreterEnabled = model.info.meta.defaultFeatureIds.includes('code_interpreter');
}
if (model.info?.meta?.capabilities?.['memory']) {
memoryEnabled = model.info.meta.defaultFeatureIds.includes('memory');
}
}
}
};
const showMessage = async (message, ignoreSettings = false) => {
await tick();
const _chatId = JSON.parse(JSON.stringify($chatId));
let _messageId = JSON.parse(JSON.stringify(message.id));
let messageChildrenIds = [];
if (_messageId === null) {
messageChildrenIds = Object.keys(history.messages).filter(
(id) => history.messages[id].parentId === null
);
} else {
messageChildrenIds = history.messages[_messageId].childrenIds;
}
while (messageChildrenIds.length !== 0) {
_messageId = messageChildrenIds.at(-1);
messageChildrenIds = history.messages[_messageId].childrenIds;
}
history.currentId = _messageId;
await tick();
await tick();
await tick();
if (($settings?.scrollOnBranchChange ?? true) || ignoreSettings) {
const messageElement = document.getElementById(`message-${message.id}`);
if (messageElement) {
messageElement.scrollIntoView({ behavior: 'smooth' });
}
}
await tick();
saveChatHandler(_chatId, history);
};
const chatEventHandler = async (event, cb) => {
console.log(event);
if (event.chat_id === $chatId) {
await tick();
let message = history.messages[event.message_id];
if (message) {
const type = event?.data?.type ?? null;
const data = event?.data?.data ?? null;
if (type === 'status') {
if (message?.statusHistory) {
message.statusHistory.push(data);
} else {
message.statusHistory = [data];
}
} else if (type === 'chat:completion') {
chatCompletionEventHandler(data, message, event.chat_id);
} else if (type === 'chat:tasks:cancel') {
taskIds = null;
const responseMessage = history.messages[history.currentId];
// Set all response messages to done
for (const messageId of history.messages[responseMessage.parentId].childrenIds) {
history.messages[messageId].done = true;
}
} else if (type === 'chat:message:delta' || type === 'message') {
message.content += data.content;
} else if (type === 'chat:message' || type === 'replace') {
message.content = data.content;
} else if (type === 'chat:message:files' || type === 'files') {
message.files = data.files;
} else if (type === 'chat:message:embeds' || type === 'embeds') {
message.embeds = data.embeds;
} else if (type === 'chat:message:error') {
message.error = data.error;
} else if (type === 'chat:message:follow_ups') {
message.followUps = data.follow_ups;
if (autoScroll) {
scrollToBottom('smooth');
}
} else if (type === 'chat:title') {
chatTitle.set(data);
currentChatPage.set(1);
await chats.set(await getChatList(localStorage.token, $currentChatPage));
} else if (type === 'chat:tags') {
chat = await getChatById(localStorage.token, $chatId);
allTags.set(await getAllTags(localStorage.token));
} else if (type === 'source' || type === 'citation') {
if (data?.type === 'code_execution') {
// Code execution; update existing code execution by ID, or add new one.
if (!message?.code_executions) {
message.code_executions = [];
}
const existingCodeExecutionIndex = message.code_executions.findIndex(
(execution) => execution.id === data.id
);
if (existingCodeExecutionIndex !== -1) {
message.code_executions[existingCodeExecutionIndex] = data;
} else {
message.code_executions.push(data);
}
message.code_executions = message.code_executions;
} else {
// Regular source.
if (message?.sources) {
message.sources.push(data);
} else {
message.sources = [data];
}
}
} else if (type === 'notification') {
const toastType = data?.type ?? 'info';
const toastContent = data?.content ?? '';
if (toastType === 'success') {
toast.success(toastContent);
} else if (toastType === 'error') {
toast.error(toastContent);
} else if (toastType === 'warning') {
toast.warning(toastContent);
} else {
toast.info(toastContent);
}
} else if (type === 'confirmation') {
eventCallback = cb;
eventConfirmationInput = false;
showEventConfirmation = true;
eventConfirmationTitle = data.title;
eventConfirmationMessage = data.message;
} else if (type === 'execute') {
eventCallback = cb;
try {
// Use Function constructor to evaluate code in a safer way
const asyncFunction = new Function(`return (async () => { ${data.code} })()`);
const result = await asyncFunction(); // Await the result of the async function
if (cb) {
cb(result);
}
} catch (error) {
console.error('Error executing code:', error);
}
} else if (type === 'input') {
eventCallback = cb;
eventConfirmationInput = true;
showEventConfirmation = true;
eventConfirmationTitle = data.title;
eventConfirmationMessage = data.message;
eventConfirmationInputPlaceholder = data.placeholder;
eventConfirmationInputValue = data?.value ?? '';
} else {
console.log('Unknown message type', data);
}
history.messages[event.message_id] = message;
}
}
};
const onMessageHandler = async (event: {
origin: string;
data: { type: string; text: string };
}) => {
if (event.origin !== window.origin) {
return;
}
if (event.data.type === 'action:submit') {
console.debug(event.data.text);
if (prompt !== '') {
await tick();
submitPrompt(prompt);
}
}
// Replace with your iframe's origin
if (event.data.type === 'input:prompt') {
console.debug(event.data.text);
const inputElement = document.getElementById('chat-input');
if (inputElement) {
messageInput?.setText(event.data.text);
inputElement.focus();
}
}
if (event.data.type === 'input:prompt:submit') {
console.debug(event.data.text);
if (event.data.text !== '') {
await tick();
submitPrompt(event.data.text);
}
}
};
const savedModelIds = async () => {
if (
$selectedFolder &&
selectedModels.filter((modelId) => modelId !== '').length > 0 &&
JSON.stringify($selectedFolder?.data?.model_ids) !== JSON.stringify(selectedModels)
) {
const res = await updateFolderById(localStorage.token, $selectedFolder.id, {
data: {
model_ids: selectedModels
}
});
}
};
$: if (selectedModels !== null) {
savedModelIds();
}
let pageSubscribe = null;
let showControlsSubscribe = null;
let selectedFolderSubscribe = null;
onMount(async () => {
loading = true;
console.log('mounted');
window.addEventListener('message', onMessageHandler);
$socket?.on('events', chatEventHandler);
pageSubscribe = page.subscribe(async (p) => {
if (p.url.pathname === '/') {
await tick();
initNewChat();
}
});
const storageChatInput = sessionStorage.getItem(
`chat-input${chatIdProp ? `-${chatIdProp}` : ''}`
);
if (!chatIdProp) {
loading = false;
await tick();
}
if (storageChatInput) {
prompt = '';
messageInput?.setText('');
files = [];
selectedToolIds = [];
selectedFilterIds = [];
webSearchEnabled = false;
imageGenerationEnabled = false;
codeInterpreterEnabled = false;
memoryEnabled = true;
try {
const input = JSON.parse(storageChatInput);
if (!$temporaryChatEnabled) {
messageInput?.setText(input.prompt);
files = input.files;
selectedToolIds = input.selectedToolIds;
selectedFilterIds = input.selectedFilterIds;
webSearchEnabled = input.webSearchEnabled;
imageGenerationEnabled = input.imageGenerationEnabled;
codeInterpreterEnabled = input.codeInterpreterEnabled;
memoryEnabled = input.memoryEnabled;
}
} catch (e) {}
}
showControlsSubscribe = showControls.subscribe(async (value) => {
if (controlPane && !$mobile) {
try {
if (value) {
controlPaneComponent.openPane();
} else {
controlPane.collapse();
}
} catch (e) {
// ignore
}
}
if (!value) {
showCallOverlay.set(false);
showOverview.set(false);
showArtifacts.set(false);
showEmbeds.set(false);
}
});
selectedFolderSubscribe = selectedFolder.subscribe(async (folder) => {
if (
folder?.data?.model_ids &&
JSON.stringify(selectedModels) !== JSON.stringify(folder.data.model_ids)
) {
selectedModels = folder.data.model_ids;
console.log('Set selectedModels from folder data:', selectedModels);
}
});
const chatInput = document.getElementById('chat-input');
chatInput?.focus();
});
onDestroy(() => {
try {
pageSubscribe();
showControlsSubscribe();
selectedFolderSubscribe();
chatIdUnsubscriber?.();
window.removeEventListener('message', onMessageHandler);
$socket?.off('events', chatEventHandler);
} catch (e) {
console.error(e);
}
});
// File upload functions
const uploadGoogleDriveFile = async (fileData) => {
console.log('Starting uploadGoogleDriveFile with:', {
id: fileData.id,
name: fileData.name,
url: fileData.url,
headers: {
Authorization: `Bearer ${token}`
}
});
// Validate input
if (!fileData?.id || !fileData?.name || !fileData?.url || !fileData?.headers?.Authorization) {
throw new Error('Invalid file data provided');
}
const tempItemId = uuidv4();
const fileItem = {
type: 'file',
file: '',
id: null,
url: fileData.url,
name: fileData.name,
collection_name: '',
status: 'uploading',
error: '',
itemId: tempItemId,
size: 0
};
try {
files = [...files, fileItem];
console.log('Processing web file with URL:', fileData.url);
// Configure fetch options with proper headers
const fetchOptions = {
headers: {
Authorization: fileData.headers.Authorization,
Accept: '*/*'
},
method: 'GET'
};
// Attempt to fetch the file
console.log('Fetching file content from Google Drive...');
const fileResponse = await fetch(fileData.url, fetchOptions);
if (!fileResponse.ok) {
const errorText = await fileResponse.text();
throw new Error(`Failed to fetch file (${fileResponse.status}): ${errorText}`);
}
// Get content type from response
const contentType = fileResponse.headers.get('content-type') || 'application/octet-stream';
console.log('Response received with content-type:', contentType);
// Convert response to blob
console.log('Converting response to blob...');
const fileBlob = await fileResponse.blob();
if (fileBlob.size === 0) {
throw new Error('Retrieved file is empty');
}
console.log('Blob created:', {
size: fileBlob.size,
type: fileBlob.type || contentType
});
// Create File object with proper MIME type
const file = new File([fileBlob], fileData.name, {
type: fileBlob.type || contentType
});
console.log('File object created:', {
name: file.name,
size: file.size,
type: file.type
});
if (file.size === 0) {
throw new Error('Created file is empty');
}
// If the file is an audio file, provide the language for STT.
let metadata = null;
if (
(file.type.startsWith('audio/') || file.type.startsWith('video/')) &&
$settings?.audio?.stt?.language
) {
metadata = {
language: $settings?.audio?.stt?.language
};
}
// Upload file to server
console.log('Uploading file to server...');
const uploadedFile = await uploadFile(localStorage.token, file, metadata);
if (!uploadedFile) {
throw new Error('Server returned null response for file upload');
}
console.log('File uploaded successfully:', uploadedFile);
// Update file item with upload results
fileItem.status = 'uploaded';
fileItem.file = uploadedFile;
fileItem.id = uploadedFile.id;
fileItem.size = file.size;
fileItem.collection_name = uploadedFile?.meta?.collection_name;
fileItem.url = `${WEBUI_API_BASE_URL}/files/${uploadedFile.id}`;
files = files;
toast.success($i18n.t('File uploaded successfully'));
} catch (e) {
console.error('Error uploading file:', e);
files = files.filter((f) => f.itemId !== tempItemId);
toast.error(
$i18n.t('Error uploading file: {{error}}', {
error: e.message || 'Unknown error'
})
);
}
};
const uploadWeb = async (url) => {
console.log(url);
const fileItem = {
type: 'text',
name: url,
collection_name: '',
status: 'uploading',
url: url,
error: ''
};
try {
files = [...files, fileItem];
const res = await processWeb(localStorage.token, '', url);
if (res) {
fileItem.status = 'uploaded';
fileItem.collection_name = res.collection_name;
fileItem.file = {
...res.file,
...fileItem.file
};
files = files;
}
} catch (e) {
// Remove the failed doc from the files array
files = files.filter((f) => f.name !== url);
toast.error(JSON.stringify(e));
}
};
const uploadYoutubeTranscription = async (url) => {
console.log(url);
const fileItem = {
type: 'text',
name: url,
collection_name: '',
status: 'uploading',
context: 'full',
url: url,
error: ''
};
try {
files = [...files, fileItem];
const res = await processYoutubeVideo(localStorage.token, url);
if (res) {
fileItem.status = 'uploaded';
fileItem.collection_name = res.collection_name;
fileItem.file = {
...res.file,
...fileItem.file
};
files = files;
}
} catch (e) {
// Remove the failed doc from the files array
files = files.filter((f) => f.name !== url);
toast.error(`${e}`);
}
};
//////////////////////////
// Web functions
//////////////////////////
const initNewChat = async () => {
console.log('initNewChat');
if ($user?.role !== 'admin' && $user?.permissions?.chat?.temporary_enforced) {
await temporaryChatEnabled.set(true);
}
if ($settings?.temporaryChatByDefault ?? false) {
if ($temporaryChatEnabled === false) {
await temporaryChatEnabled.set(true);
} else if ($temporaryChatEnabled === null) {
// if set to null set to false; refer to temp chat toggle click handler
await temporaryChatEnabled.set(false);
}
}
const availableModels = $models
.filter((m) => !(m?.info?.meta?.hidden ?? false))
.map((m) => m.id);
if ($page.url.searchParams.get('models') || $page.url.searchParams.get('model')) {
const urlModels = (
$page.url.searchParams.get('models') ||
$page.url.searchParams.get('model') ||
''
)?.split(',');
if (urlModels.length === 1) {
const m = $models.find((m) => m.id === urlModels[0]);
if (!m) {
const modelSelectorButton = document.getElementById('model-selector-0-button');
if (modelSelectorButton) {
modelSelectorButton.click();
await tick();
const modelSelectorInput = document.getElementById('model-search-input');
if (modelSelectorInput) {
modelSelectorInput.focus();
modelSelectorInput.value = urlModels[0];
modelSelectorInput.dispatchEvent(new Event('input'));
}
}
} else {
selectedModels = urlModels;
}
} else {
selectedModels = urlModels;
}
selectedModels = selectedModels.filter((modelId) =>
$models.map((m) => m.id).includes(modelId)
);
} else {
if ($selectedFolder?.data?.model_ids) {
selectedModels = $selectedFolder?.data?.model_ids;
} else {
if (sessionStorage.selectedModels) {
selectedModels = JSON.parse(sessionStorage.selectedModels);
sessionStorage.removeItem('selectedModels');
} else {
if ($settings?.models) {
selectedModels = $settings?.models;
} else if ($config?.default_models) {
console.log($config?.default_models.split(',') ?? '');
selectedModels = $config?.default_models.split(',');
}
}
}
selectedModels = selectedModels.filter((modelId) => availableModels.includes(modelId));
}
if (selectedModels.length === 0 || (selectedModels.length === 1 && selectedModels[0] === '')) {
if (availableModels.length > 0) {
selectedModels = [availableModels?.at(0) ?? ''];
} else {
selectedModels = [''];
}
}
await showControls.set(false);
await showCallOverlay.set(false);
await showOverview.set(false);
await showArtifacts.set(false);
if ($page.url.pathname.includes('/c/')) {
window.history.replaceState(history.state, '', `/`);
}
autoScroll = true;
resetInput();
await chatId.set('');
await chatTitle.set('');
history = {
messages: {},
currentId: null
};
chatFiles = [];
params = {};
if ($page.url.searchParams.get('youtube')) {
uploadYoutubeTranscription(
`https://www.youtube.com/watch?v=${$page.url.searchParams.get('youtube')}`
);
}
if ($page.url.searchParams.get('load-url')) {
await uploadWeb($page.url.searchParams.get('load-url'));
}
if ($page.url.searchParams.get('web-search') === 'true') {
webSearchEnabled = true;
}
if ($page.url.searchParams.get('image-generation') === 'true') {
imageGenerationEnabled = true;
}
if ($page.url.searchParams.get('code-interpreter') === 'true') {
codeInterpreterEnabled = true;
}
if ($page.url.searchParams.get('tools')) {
selectedToolIds = $page.url.searchParams
.get('tools')
?.split(',')
.map((id) => id.trim())
.filter((id) => id);
} else if ($page.url.searchParams.get('tool-ids')) {
selectedToolIds = $page.url.searchParams
.get('tool-ids')
?.split(',')
.map((id) => id.trim())
.filter((id) => id);
}
if ($page.url.searchParams.get('call') === 'true') {
showCallOverlay.set(true);
showControls.set(true);
}
if ($page.url.searchParams.get('q')) {
const q = $page.url.searchParams.get('q') ?? '';
messageInput?.setText(q);
if (q) {
if (($page.url.searchParams.get('submit') ?? 'true') === 'true') {
await tick();
submitPrompt(q);
}
}
}
selectedModels = selectedModels.map((modelId) =>
$models.map((m) => m.id).includes(modelId) ? modelId : ''
);
const userSettings = await getUserSettings(localStorage.token);
if (userSettings) {
settings.set(userSettings.ui);
} else {
settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
}
const chatInput = document.getElementById('chat-input');
setTimeout(() => chatInput?.focus(), 0);
};
const loadChat = async () => {
chatId.set(chatIdProp);
if ($temporaryChatEnabled) {
temporaryChatEnabled.set(false);
}
chat = await getChatById(localStorage.token, $chatId).catch(async (error) => {
await goto('/');
return null;
});
if (chat) {
tags = await getTagsById(localStorage.token, $chatId).catch(async (error) => {
return [];
});
const chatContent = chat.chat;
if (chatContent) {
console.log(chatContent);
selectedModels =
(chatContent?.models ?? undefined) !== undefined
? chatContent.models
: [chatContent.models ?? ''];
if (!($user?.role === 'admin' || ($user?.permissions?.chat?.multiple_models ?? true))) {
selectedModels = selectedModels.length > 0 ? [selectedModels[0]] : [''];
}
oldSelectedModelIds = selectedModels;
history =
(chatContent?.history ?? undefined) !== undefined
? chatContent.history
: convertMessagesToHistory(chatContent.messages);
chatTitle.set(chatContent.title);
const userSettings = await getUserSettings(localStorage.token);
if (userSettings) {
await settings.set(userSettings.ui);
} else {
await settings.set(JSON.parse(localStorage.getItem('settings') ?? '{}'));
}
params = chatContent?.params ?? {};
chatFiles = chatContent?.files ?? [];
if (chatContent?.memory_enabled !== undefined) {
memoryEnabled = chatContent.memory_enabled;
}
autoScroll = true;
await tick();
if (history.currentId) {
for (const message of Object.values(history.messages)) {
if (message.role === 'assistant') {
message.done = true;
}
}
}
memoryLocked = Object.keys(history?.messages ?? {}).length > 0;
const taskRes = await getTaskIdsByChatId(localStorage.token, $chatId).catch((error) => {
return null;
});
if (taskRes) {
taskIds = taskRes.task_ids;
}
await tick();
return true;
} else {
return null;
}
}
};
const scrollToBottom = async (behavior = 'auto') => {
await tick();
if (messagesContainerElement) {
messagesContainerElement.scrollTo({
top: messagesContainerElement.scrollHeight,
behavior
});
}
};
const chatCompletedHandler = async (chatId, modelId, responseMessageId, messages) => {
const res = await chatCompleted(localStorage.token, {
model: modelId,
messages: messages.map((m) => ({
id: m.id,
role: m.role,
content: m.content,
info: m.info ? m.info : undefined,
timestamp: m.timestamp,
...(m.usage ? { usage: m.usage } : {}),
...(m.sources ? { sources: m.sources } : {})
})),
filter_ids: selectedFilterIds.length > 0 ? selectedFilterIds : undefined,
model_item: $models.find((m) => m.id === modelId),
chat_id: chatId,
session_id: $socket?.id,
id: responseMessageId
}).catch((error) => {
toast.error(`${error}`);
messages.at(-1).error = { content: error };
return null;
});
if (res !== null && res.messages) {
// Update chat history with the new messages
for (const message of res.messages) {
if (message?.id) {
// Add null check for message and message.id
history.messages[message.id] = {
...history.messages[message.id],
...(history.messages[message.id].content !== message.content
? { originalContent: history.messages[message.id].content }
: {}),
...message
};
}
}
}
await tick();
if ($chatId == chatId) {
if (!$temporaryChatEnabled) {
chat = await updateChatById(localStorage.token, chatId, {
models: selectedModels,
messages: messages,
history: history,
params: params,
files: chatFiles
});
currentChatPage.set(1);
await chats.set(await getChatList(localStorage.token, $currentChatPage));
}
}
taskIds = null;
};
const chatActionHandler = async (chatId, actionId, modelId, responseMessageId, event = null) => {
const messages = createMessagesList(history, responseMessageId);
const res = await chatAction(localStorage.token, actionId, {
model: modelId,
messages: messages.map((m) => ({
id: m.id,
role: m.role,
content: m.content,
info: m.info ? m.info : undefined,
timestamp: m.timestamp,
...(m.sources ? { sources: m.sources } : {})
})),
...(event ? { event: event } : {}),
model_item: $models.find((m) => m.id === modelId),
chat_id: chatId,
session_id: $socket?.id,
id: responseMessageId
}).catch((error) => {
toast.error(`${error}`);
messages.at(-1).error = { content: error };
return null;
});
if (res !== null && res.messages) {
// Update chat history with the new messages
for (const message of res.messages) {
history.messages[message.id] = {
...history.messages[message.id],
...(history.messages[message.id].content !== message.content
? { originalContent: history.messages[message.id].content }
: {}),
...message
};
}
}
if ($chatId == chatId) {
if (!$temporaryChatEnabled) {
chat = await updateChatById(localStorage.token, chatId, {
models: selectedModels,
messages: messages,
history: history,
params: params,
files: chatFiles
});
currentChatPage.set(1);
await chats.set(await getChatList(localStorage.token, $currentChatPage));
}
}
};
const getChatEventEmitter = async (modelId: string, chatId: string = '') => {
return setInterval(() => {
$socket?.emit('usage', {
action: 'chat',
model: modelId,
chat_id: chatId
});
}, 1000);
};
const createMessagePair = async (userPrompt) => {
messageInput?.setText('');
if (selectedModels.length === 0) {
toast.error($i18n.t('Model not selected'));
} else {
const modelId = selectedModels[0];
const model = $models.filter((m) => m.id === modelId).at(0);
const messages = createMessagesList(history, history.currentId);
const parentMessage = messages.length !== 0 ? messages.at(-1) : null;
const userMessageId = uuidv4();
const responseMessageId = uuidv4();
const userMessage = {
id: userMessageId,
parentId: parentMessage ? parentMessage.id : null,
childrenIds: [responseMessageId],
role: 'user',
content: userPrompt ? userPrompt : `[PROMPT] ${userMessageId}`,
timestamp: Math.floor(Date.now() / 1000)
};
const responseMessage = {
id: responseMessageId,
parentId: userMessageId,
childrenIds: [],
role: 'assistant',
content: `[RESPONSE] ${responseMessageId}`,
done: true,
model: modelId,
modelName: model.name ?? model.id,
modelIdx: 0,
timestamp: Math.floor(Date.now() / 1000)
};
if (parentMessage) {
parentMessage.childrenIds.push(userMessageId);
history.messages[parentMessage.id] = parentMessage;
}
history.messages[userMessageId] = userMessage;
history.messages[responseMessageId] = responseMessage;
history.currentId = responseMessageId;
await tick();
if (autoScroll) {
scrollToBottom();
}
if (messages.length === 0) {
await initChatHandler(history);
} else {
await saveChatHandler($chatId, history);
}
}
};
const addMessages = async ({ modelId, parentId, messages }) => {
const model = $models.filter((m) => m.id === modelId).at(0);
let parentMessage = history.messages[parentId];
let currentParentId = parentMessage ? parentMessage.id : null;
for (const message of messages) {
let messageId = uuidv4();
if (message.role === 'user') {
const userMessage = {
id: messageId,
parentId: currentParentId,
childrenIds: [],
timestamp: Math.floor(Date.now() / 1000),
...message
};
if (parentMessage) {
parentMessage.childrenIds.push(messageId);
history.messages[parentMessage.id] = parentMessage;
}
history.messages[messageId] = userMessage;
parentMessage = userMessage;
currentParentId = messageId;
} else {
const responseMessage = {
id: messageId,
parentId: currentParentId,
childrenIds: [],
done: true,
model: model.id,
modelName: model.name ?? model.id,
modelIdx: 0,
timestamp: Math.floor(Date.now() / 1000),
...message
};
if (parentMessage) {
parentMessage.childrenIds.push(messageId);
history.messages[parentMessage.id] = parentMessage;
}
history.messages[messageId] = responseMessage;
parentMessage = responseMessage;
currentParentId = messageId;
}
}
history.currentId = currentParentId;
await tick();
if (autoScroll) {
scrollToBottom();
}
if (messages.length === 0) {
await initChatHandler(history);
} else {
await saveChatHandler($chatId, history);
}
};
const chatCompletionEventHandler = async (data, message, chatId) => {
const { id, done, choices, content, sources, selected_model_id, error, usage } = data;
if (error) {
await handleOpenAIError(error, message);
}
if (sources && !message?.sources) {
message.sources = sources;
}
if (choices) {
if (choices[0]?.message?.content) {
// Non-stream response
message.content += choices[0]?.message?.content;
} else {
// Stream response
let value = choices[0]?.delta?.content ?? '';
if (message.content == '' && value == '\n') {
console.log('Empty response');
} else {
message.content += value;
if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
navigator.vibrate(5);
}
// Emit chat event for TTS
const messageContentParts = getMessageContentParts(
removeAllDetails(message.content),
$config?.audio?.tts?.split_on ?? 'punctuation'
);
messageContentParts.pop();
// dispatch only last sentence and make sure it hasn't been dispatched before
if (
messageContentParts.length > 0 &&
messageContentParts[messageContentParts.length - 1] !== message.lastSentence
) {
message.lastSentence = messageContentParts[messageContentParts.length - 1];
eventTarget.dispatchEvent(
new CustomEvent('chat', {
detail: {
id: message.id,
content: messageContentParts[messageContentParts.length - 1]
}
})
);
}
}
}
}
if (content) {
// REALTIME_CHAT_SAVE is disabled
message.content = content;
if (navigator.vibrate && ($settings?.hapticFeedback ?? false)) {
navigator.vibrate(5);
}
// Emit chat event for TTS
const messageContentParts = getMessageContentParts(
removeAllDetails(message.content),
$config?.audio?.tts?.split_on ?? 'punctuation'
);
messageContentParts.pop();
// dispatch only last sentence and make sure it hasn't been dispatched before
if (
messageContentParts.length > 0 &&
messageContentParts[messageContentParts.length - 1] !== message.lastSentence
) {
message.lastSentence = messageContentParts[messageContentParts.length - 1];
eventTarget.dispatchEvent(
new CustomEvent('chat', {
detail: {
id: message.id,
content: messageContentParts[messageContentParts.length - 1]
}
})
);
}
}
if (selected_model_id) {
message.selectedModelId = selected_model_id;
message.arena = true;
}
if (usage) {
message.usage = usage;
}
history.messages[message.id] = message;
if (done) {
message.done = true;
if ($settings.responseAutoCopy) {
copyToClipboard(message.content);
}
if ($settings.responseAutoPlayback && !$showCallOverlay) {
await tick();
document.getElementById(`speak-button-${message.id}`)?.click();
}
// Emit chat event for TTS
let lastMessageContentPart =
getMessageContentParts(
removeAllDetails(message.content),
$config?.audio?.tts?.split_on ?? 'punctuation'
)?.at(-1) ?? '';
if (lastMessageContentPart) {
eventTarget.dispatchEvent(
new CustomEvent('chat', {
detail: { id: message.id, content: lastMessageContentPart }
})
);
}
eventTarget.dispatchEvent(
new CustomEvent('chat:finish', {
detail: {
id: message.id,
content: message.content
}
})
);
history.messages[message.id] = message;
await tick();
if (autoScroll) {
scrollToBottom();
}
await chatCompletedHandler(
chatId,
message.model,
message.id,
createMessagesList(history, message.id)
);
}
console.log(data);
await tick();
if (autoScroll) {
scrollToBottom();
}
};
//////////////////////////
// Chat functions
//////////////////////////
/**
* 提交用户消息 - 前端聊天流程的核心入口函数
*
* 这是用户发送消息时调用的主函数,负责:
* 1. 校验用户输入prompt、模型选择、文件状态
* 2. 创建用户消息对象并更新本地聊天历史
* 3. 处理文件附件(图片、文档等)
* 4. 调用 sendMessage 发起 API 请求
*
* 1. 模型验证 (1511-1520)
- 检查选中模型是否仍然存在
- 过滤掉已删除的模型,避免请求失败
2. 输入验证 (1522-1558)
- 检查是否输入了内容或上传了文件
- 检查是否选择了模型
- 检查文件上传状态(非图片文件需等待上传完成)
- 检查文件数量限制(防止请求过大)
3. 聊天状态检查 (1560-1576)
- 检查上一条消息是否已完成(防止重复提交)
- 检查上一条消息是否有错误
4. 清空输入框 (1578-1580)
- 清空输入框内容
- 重置 prompt 变量
5. 处理文件附件 (1582-1603)
- 深拷贝文件列表
- 将文档类文件添加到聊天上下文(用于 RAG 检索)
- 去重防止重复添加
- 清空当前输入的文件列表
6. 创建用户消息对象 (1605-1616)
- 生成唯一消息 ID (UUID)
- 构造消息对象id、parentId、childrenIds、role、content、files、timestamp、models
7. 更新本地聊天历史 (1618-1629)
- 将用户消息添加到 history.messages
- 设置 history.currentId 为当前消息 ID
- 更新父消息的 childrenIds构建消息树支持对话分支
8. UI 操作 (1631-1637)
- 重新聚焦输入框
- 保存选中的模型到 sessionStorage用于页面刷新恢复
9. 发送消息到后端 (1639-1641)
- 调用 sendMessage(history, userMessageId, { newChat: true })
- newChat: true 表示如果是新对话的第一条消息,需先创建聊天记录
*
* @param userPrompt - 用户输入的文本内容
* @param _raw - 是否使用原始格式(当前未使用)
*/
const submitPrompt = async (userPrompt, { _raw = false } = {}) => {
console.log('submitPrompt', userPrompt, $chatId);
// === 1. 模型验证:确保选中的模型仍然存在 ===
// 过滤掉已被删除或不可用的模型,避免发送请求时出错
const _selectedModels = selectedModels.map((modelId) => {
const allIds = [...$models.map((m) => m.id), ...$userModels.map((m) => m.id)];
return allIds.includes(modelId) ? modelId : '';
});
// 如果模型列表发生变化,同步更新
if (JSON.stringify(selectedModels) !== JSON.stringify(_selectedModels)) {
selectedModels = _selectedModels;
}
// === 2. 输入验证 ===
// 2.1 检查是否输入了内容或上传了文件
if (userPrompt === '' && files.length === 0) {
toast.error($i18n.t('Please enter a prompt'));
return;
}
// 2.2 检查是否选择了模型
if (selectedModels.includes('')) {
toast.error($i18n.t('Model not selected'));
return;
}
// 2.3 检查文件上传状态(非图片文件需要等待上传完成)
// 图片文件可以立即发送,因为支持本地 base64 编码
if (
files.length > 0 &&
files.filter((file) => file.type !== 'image' && file.status === 'uploading').length > 0
) {
toast.error(
$i18n.t(`Oops! There are files still uploading. Please wait for the upload to complete.`)
);
return;
}
// 2.4 检查文件数量限制(防止用户上传过多文件导致请求过大)
if (
($config?.file?.max_count ?? null) !== null &&
files.length + chatFiles.length > $config?.file?.max_count
) {
toast.error(
$i18n.t(`You can only chat with a maximum of {{maxCount}} file(s) at a time.`, {
maxCount: $config?.file?.max_count
})
);
return;
}
// === 3. 检查当前聊天状态 ===
if (history?.currentId) {
const lastMessage = history.messages[history.currentId];
// 3.1 如果上一条消息还没完成(正在生成中),禁止提交新消息
if (lastMessage.done != true) {
// Response not done
return;
}
// 3.2 如果上一条消息有错误且没有内容,提示用户
if (lastMessage.error && !lastMessage.content) {
// Error in response
toast.error($i18n.t(`Oops! There was an error in the previous response.`));
return;
}
}
// === 4. 清空输入框 ===
messageInput?.setText('');
prompt = '';
// === 5. 处理文件附件 ===
const messages = createMessagesList(history, history.currentId);
const _files = JSON.parse(JSON.stringify(files)); // 深拷贝文件列表
// 5.1 将当前消息的文档类文件添加到聊天上下文文件列表
// 这些文件将在整个对话中保持可用(用于 RAG 检索等)
chatFiles.push(
..._files.filter((item) =>
['doc', 'text', 'file', 'note', 'chat', 'folder', 'collection'].includes(item.type)
)
);
// 5.2 去重:防止同一文件被多次添加到上下文
chatFiles = chatFiles.filter(
// Remove duplicates
(item, index, array) =>
array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
);
// 5.3 清空当前输入的文件列表(已保存到 _files 和 chatFiles
files = [];
messageInput?.setText('');
// === 6. 创建用户消息对象 ===
let userMessageId = uuidv4(); // 生成唯一消息 ID
let userMessage = {
id: userMessageId,
parentId: messages.length !== 0 ? messages.at(-1).id : null, // 链接到父消息(上一条消息)
childrenIds: [], // 初始化子消息列表(用于分支对话)
role: 'user',
content: userPrompt,
files: _files.length > 0 ? _files : undefined, // 附加文件(图片、文档等)
timestamp: Math.floor(Date.now() / 1000), // Unix 时间戳
models: selectedModels // 记录使用的模型(用于多模型对话)
};
console.debug('[chat] send user message', {
chatId: $chatId,
messageId: userMessageId,
contentPreview: userPrompt.slice(0, 200),
files: _files?.map((f) => f.name ?? f.id) ?? []
});
// 锁定记忆开关:首条用户消息创建后不再允许切换
memoryLocked = true;
// === 7. 更新本地聊天历史 ===
// 7.1 将用户消息添加到历史记录
history.messages[userMessageId] = userMessage;
// 7.2 设置当前消息 ID用于定位当前对话位置
history.currentId = userMessageId;
// 7.3 更新父消息的子消息列表(构建消息树结构)
// 这种树状结构支持对话分支(用户可以回到之前的消息重新生成响应)
if (messages.length !== 0) {
history.messages[messages.at(-1).id].childrenIds.push(userMessageId);
}
// === 8. UI 操作 ===
// 重新聚焦输入框,方便用户继续输入
const chatInput = document.getElementById('chat-input');
chatInput?.focus();
// 保存当前选中的模型到 sessionStorage用于刷新页面后恢复
saveSessionSelectedModels();
// === 9. 发送消息到后端 ===
// newChat: true 表示如果是新对话的第一条消息,需要先创建聊天记录
await sendMessage(history, userMessageId, { newChat: true });
};
/**
* 发送消息到后端 - 创建响应消息并调用 API
*
* 这是聊天消息发送的核心函数,负责:
* 1. 为每个选中的模型创建空的响应消息占位符
* 2. 如果是新对话的第一条消息,先创建聊天记录
* 3. 并发向所有选中的模型发送请求(支持多模型对话)
* 4. 更新聊天列表
*
* 1. UI 自动滚动 (1708-1711)
- 如果启用了自动滚动,滚动到底部
2. 深拷贝数据 (1713-1715)
- 深拷贝 chatId 和 history避免状态污染
3. 确定模型列表 (1717-1724)
- 优先级:指定的 modelId > atSelectedModel@ 选择的模型)> selectedModels全局选择
4. 创建响应消息占位符 (1726-1765)
- 为每个选中的模型创建空的响应消息对象
- 初始 content 为空,后续通过 WebSocket 流式填充
- 将响应消息添加到 history.messages
- 更新父消息的 childrenIds构建消息树
- 记录 responseMessageIdkey 格式modelId-modelIdx
5. 创建聊天记录 (1767-1771)
- 如果是新对话的第一条消息newChat=true 且 parentId=null
- 调用 initChatHandler 创建聊天记录并获取 chatId
6. 保存聊天历史 (1775-1778)
- 调用 saveChatHandler 将消息树保存到数据库
7. 并发发送请求 (1780-1832)
- 使用 Promise.all 并行向所有选中的模型发送请求
- 对每个模型:
- 7.1 检查模型视觉能力(如果消息包含图片)
- 7.2 获取响应消息 ID
- 7.3 启动聊天事件发射器(定时发送心跳,用于统计模型使用)
- 7.4 调用 sendMessageSocket 发送 API 请求
- 7.5 清理事件发射器
8. 更新聊天列表 (1834-1836)
- 刷新侧边栏聊天列表
* @param _history - 聊天历史对象(消息树)
* @param parentId - 父消息 ID用户消息 ID
* @param messages - 可选的自定义消息列表(用于重新生成等场景)
* @param modelId - 可选的指定模型 ID用于单模型重新生成
* @param modelIdx - 可选的模型索引(用于多模型对话中的特定模型)
* @param newChat - 是否是新对话的第一条消息
*/
const sendMessage = async (
_history,
parentId: string,
{
messages = null,
modelId = null,
modelIdx = null,
newChat = false
}: {
messages?: any[] | null;
modelId?: string | null;
modelIdx?: number | null;
newChat?: boolean;
} = {}
) => {
// === 1. UI 自动滚动 ===
if (autoScroll) {
scrollToBottom();
}
// === 2. 深拷贝数据,避免状态污染 ===
let _chatId = JSON.parse(JSON.stringify($chatId));
_history = JSON.parse(JSON.stringify(_history));
// === 3. 确定要使用的模型列表 ===
const responseMessageIds: Record<PropertyKey, string> = {};
// 优先级:指定的 modelId > atSelectedModel@ 选择的模型)> selectedModels全局选择
let selectedModelIds = modelId
? [modelId]
: atSelectedModel !== undefined
? [atSelectedModel.id]
: selectedModels;
// === 4. 为每个选中的模型创建响应消息占位符 ===
// 这样 UI 可以立即显示"正在输入..."状态
for (const [_modelIdx, modelId] of selectedModelIds.entries()) {
const combined = getCombinedModelById(modelId);
if (combined) {
const model = combined.model ?? combined.credential;
// 4.1 生成响应消息 ID 和空消息对象
let responseMessageId = uuidv4();
let responseMessage = {
parentId: parentId,
id: responseMessageId,
childrenIds: [],
role: 'assistant',
content: '', // 初始为空,后续通过 WebSocket 流式填充
model:
combined.source === 'user' && combined.credential
? combined.credential.model_id
: model.id,
modelName:
combined.source === 'user' && combined.credential
? combined.credential.name ?? combined.credential.model_id
: model.name ?? model.id,
modelIdx: modelIdx ? modelIdx : _modelIdx, // 多模型对话时,区分不同模型的响应
timestamp: Math.floor(Date.now() / 1000) // Unix epoch
};
// 4.2 将响应消息添加到历史记录
history.messages[responseMessageId] = responseMessage;
history.currentId = responseMessageId;
// 4.3 更新父消息(用户消息)的子消息列表
// 构建消息树user message -> [assistant message 1, assistant message 2, ...]
if (parentId !== null && history.messages[parentId]) {
// Add null check before accessing childrenIds
history.messages[parentId].childrenIds = [
...history.messages[parentId].childrenIds,
responseMessageId
];
}
// 4.4 记录响应消息 ID用于后续查找
// key 格式modelId-modelIdx例如 "gpt-4-0"
responseMessageIds[`${modelId}-${modelIdx ? modelIdx : _modelIdx}`] = responseMessageId;
}
}
history = history;
// === 5. 如果是新对话的第一条消息,先创建聊天记录 ===
// 检查条件newChat=true 且当前消息没有父消息(说明是第一条用户消息)
if (newChat && _history.messages[_history.currentId].parentId === null) {
_chatId = await initChatHandler(_history);
}
await tick();
// === 6. 保存聊天历史到数据库 ===
_history = JSON.parse(JSON.stringify(history));
// Save chat after all messages have been created
await saveChatHandler(_chatId, _history);
// === 7. 并发向所有选中的模型发送请求 ===
// 使用 Promise.all 实现并行请求,提升多模型对话的性能
await Promise.all(
selectedModelIds.map(async (modelId, _modelIdx) => {
console.log('modelId', modelId);
const combined = getCombinedModelById(modelId);
const model = combined?.model ?? combined?.credential;
if (combined && model) {
// 7.1 检查模型视觉能力(如果消息包含图片)
const hasImages = createMessagesList(_history, parentId).some((message) =>
message.files?.some((file) => file.type === 'image')
);
// 如果消息包含图片,但模型不支持视觉,提示错误(私有模型默认视为支持)
if (
combined.source !== 'user' &&
hasImages &&
!(model.info?.meta?.capabilities?.vision ?? true)
) {
toast.error(
$i18n.t('Model {{modelName}} is not vision capable', {
modelName: model.name ?? model.id
})
);
}
// 7.2 获取响应消息 ID
let responseMessageId =
responseMessageIds[`${modelId}-${modelIdx ? modelIdx : _modelIdx}`];
// 7.3 启动聊天事件发射器(定时向后端发送心跳,用于统计模型使用情况)
const chatEventEmitter = await getChatEventEmitter(model.id, _chatId);
scrollToBottom();
// 7.4 发送 API 请求(核心函数)
// sendMessageSocket 负责:
// - 构造请求 payloadmessages、files、tools、features 等)
// - 调用 generateOpenAIChatCompletion API
// - 处理流式响应(通过 WebSocket 实时更新消息内容)
await sendMessageSocket(
combined,
messages && messages.length > 0
? messages // 使用自定义消息列表(例如重新生成时追加 follow-up
: createMessagesList(_history, responseMessageId), // 使用完整历史记录
_history,
responseMessageId,
_chatId
);
// 7.5 清理事件发射器
if (chatEventEmitter) clearInterval(chatEventEmitter);
} else {
toast.error($i18n.t(`Model {{modelId}} not found`, { modelId }));
}
})
);
// === 8. 更新聊天列表(刷新侧边栏)===
currentChatPage.set(1);
chats.set(await getChatList(localStorage.token, $currentChatPage));
};
const getFeatures = () => {
let features = {};
if ($config?.features)
features = {
image_generation:
$config?.features?.enable_image_generation &&
($user?.role === 'admin' || $user?.permissions?.features?.image_generation)
? imageGenerationEnabled
: false,
code_interpreter:
$config?.features?.enable_code_interpreter &&
($user?.role === 'admin' || $user?.permissions?.features?.code_interpreter)
? codeInterpreterEnabled
: false,
web_search:
$config?.features?.enable_web_search &&
($user?.role === 'admin' || $user?.permissions?.features?.web_search)
? webSearchEnabled
: false
};
const currentModels = atSelectedModel?.id ? [atSelectedModel.id] : selectedModels;
if (
currentModels.filter(
(model) => $models.find((m) => m.id === model)?.info?.meta?.capabilities?.web_search ?? true
).length === currentModels.length
) {
if ($config?.features?.enable_web_search && ($settings?.webSearch ?? false) === 'always') {
features = { ...features, web_search: true };
}
}
if ($settings?.memory ?? false) {
features = { ...features, memory: true };
}
// 如果用户手动切换了记忆开关,覆盖全局设置
if (memoryEnabled !== undefined && memoryEnabled !== ($settings?.memory ?? false)) {
features = { ...features, memory: memoryEnabled };
}
return features;
};
const getCombinedModelById = (modelId) => {
const platform = $models.find((m) => m.id === modelId);
if (platform) return { source: 'platform', model: platform };
const priv = $userModels.find((m) => m.id === modelId);
if (priv) return { source: 'user', credential: priv };
return null;
};
const sendMessageSocket = async (combinedModel, _messages, _history, responseMessageId, _chatId) => {
const responseMessage = _history.messages[responseMessageId];
const userMessage = _history.messages[responseMessage.parentId];
const model = combinedModel?.model ?? combinedModel?.credential ?? combinedModel;
const chatMessageFiles = _messages
.filter((message) => message.files)
.flatMap((message) => message.files);
// Filter chatFiles to only include files that are in the chatMessageFiles
chatFiles = chatFiles.filter((item) => {
const fileExists = chatMessageFiles.some((messageFile) => messageFile.id === item.id);
return fileExists;
});
let files = JSON.parse(JSON.stringify(chatFiles));
files.push(
...(userMessage?.files ?? []).filter((item) =>
['doc', 'text', 'file', 'note', 'chat', 'collection'].includes(item.type)
)
);
// Remove duplicates
files = files.filter(
(item, index, array) =>
array.findIndex((i) => JSON.stringify(i) === JSON.stringify(item)) === index
);
scrollToBottom();
eventTarget.dispatchEvent(
new CustomEvent('chat:start', {
detail: {
id: responseMessageId
}
})
);
await tick();
let userLocation;
if ($settings?.userLocation) {
userLocation = await getAndUpdateUserLocation(localStorage.token).catch((err) => {
console.error(err);
return undefined;
});
}
const isUserModel = combinedModel?.source === 'user';
const credential = combinedModel?.credential;
const stream = true;
let messages = [
params?.system || $settings.system
? {
role: 'system',
content: `${params?.system ?? $settings?.system ?? ''}`
}
: undefined,
..._messages.map((message) => ({
...message,
content: processDetails(message.content)
}))
].filter((message) => message);
messages = messages
.map((message, idx, arr) => ({
role: message.role,
...((message.files?.filter((file) => file.type === 'image').length > 0 ?? false) &&
message.role === 'user'
? {
content: [
{
type: 'text',
text: message?.merged?.content ?? message.content
},
...message.files
.filter((file) => file.type === 'image')
.map((file) => ({
type: 'image_url',
image_url: {
url: file.url
}
}))
]
}
: {
content: message?.merged?.content ?? message.content
})
}))
.filter((message) => message?.role === 'user' || message?.content?.trim());
const toolIds = [];
const toolServerIds = [];
for (const toolId of selectedToolIds) {
if (toolId.startsWith('direct_server:')) {
let serverId = toolId.replace('direct_server:', '');
// Check if serverId is a number
if (!isNaN(parseInt(serverId))) {
toolServerIds.push(parseInt(serverId));
} else {
toolServerIds.push(serverId);
}
} else {
toolIds.push(toolId);
}
}
const res = await generateOpenAIChatCompletion(
localStorage.token,
{
stream: stream,
model: isUserModel ? credential.model_id : model.id,
messages: messages,
params: {
...$settings?.params,
...params,
stop:
(params?.stop ?? $settings?.params?.stop ?? undefined)
? (params?.stop.split(',').map((token) => token.trim()) ?? $settings.params.stop).map(
(str) => decodeURIComponent(JSON.parse('"' + str.replace(/\"/g, '\\"') + '"'))
)
: undefined
},
files: (files?.length ?? 0) > 0 ? files : undefined,
filter_ids: selectedFilterIds.length > 0 ? selectedFilterIds : undefined,
tool_ids: toolIds.length > 0 ? toolIds : undefined,
tool_servers: ($toolServers ?? []).filter(
(server, idx) => toolServerIds.includes(idx) || toolServerIds.includes(server?.id)
),
features: getFeatures(),
variables: {
...getPromptVariables($user?.name, $settings?.userLocation ? userLocation : undefined)
},
model_item: isUserModel ? { credential_id: credential.id } : $models.find((m) => m.id === model.id),
is_user_model: isUserModel,
session_id: $socket?.id,
chat_id: $chatId,
id: responseMessageId,
background_tasks: {
...(!$temporaryChatEnabled &&
(messages.length == 1 ||
(messages.length == 2 &&
messages.at(0)?.role === 'system' &&
messages.at(1)?.role === 'user')) &&
(selectedModels[0] === model.id || atSelectedModel !== undefined)
? {
title_generation: $settings?.title?.auto ?? true,
tags_generation: $settings?.autoTags ?? true
}
: {}),
follow_up_generation: $settings?.autoFollowUps ?? true
},
...(stream && (model.info?.meta?.capabilities?.usage ?? false)
? {
stream_options: {
include_usage: true
}
}
: {})
},
`${WEBUI_BASE_URL}/api`
).catch(async (error) => {
console.log(error);
let errorMessage = error;
if (error?.error?.message) {
errorMessage = error.error.message;
} else if (error?.message) {
errorMessage = error.message;
}
if (typeof errorMessage === 'object') {
errorMessage = $i18n.t(`Uh-oh! There was an issue with the response.`);
}
toast.error(`${errorMessage}`);
responseMessage.error = {
content: error
};
responseMessage.done = true;
history.messages[responseMessageId] = responseMessage;
history.currentId = responseMessageId;
return null;
});
if (res) {
if (res.error) {
await handleOpenAIError(res.error, responseMessage);
} else {
if (taskIds) {
taskIds.push(res.task_id);
} else {
taskIds = [res.task_id];
}
}
}
await tick();
scrollToBottom();
};
const handleOpenAIError = async (error, responseMessage) => {
let errorMessage = '';
let innerError;
if (error) {
innerError = error;
}
console.error(innerError);
if ('detail' in innerError) {
// FastAPI error
toast.error(innerError.detail);
errorMessage = innerError.detail;
} else if ('error' in innerError) {
// OpenAI error
if ('message' in innerError.error) {
toast.error(innerError.error.message);
errorMessage = innerError.error.message;
} else {
toast.error(innerError.error);
errorMessage = innerError.error;
}
} else if ('message' in innerError) {
// OpenAI error
toast.error(innerError.message);
errorMessage = innerError.message;
}
responseMessage.error = {
content: $i18n.t(`Uh-oh! There was an issue with the response.`) + '\n' + errorMessage
};
responseMessage.done = true;
if (responseMessage.statusHistory) {
responseMessage.statusHistory = responseMessage.statusHistory.filter(
(status) => status.action !== 'knowledge_search'
);
}
history.messages[responseMessage.id] = responseMessage;
};
const stopResponse = async () => {
if (taskIds) {
for (const taskId of taskIds) {
const res = await stopTask(localStorage.token, taskId).catch((error) => {
toast.error(`${error}`);
return null;
});
}
taskIds = null;
const responseMessage = history.messages[history.currentId];
// Set all response messages to done
for (const messageId of history.messages[responseMessage.parentId].childrenIds) {
history.messages[messageId].done = true;
}
history.messages[history.currentId] = responseMessage;
if (autoScroll) {
scrollToBottom();
}
}
if (generating) {
generating = false;
generationController?.abort();
generationController = null;
}
};
const submitMessage = async (parentId, prompt) => {
let userPrompt = prompt;
let userMessageId = uuidv4();
let userMessage = {
id: userMessageId,
parentId: parentId,
childrenIds: [],
role: 'user',
content: userPrompt,
models: selectedModels,
timestamp: Math.floor(Date.now() / 1000) // Unix epoch
};
if (parentId !== null) {
history.messages[parentId].childrenIds = [
...history.messages[parentId].childrenIds,
userMessageId
];
}
history.messages[userMessageId] = userMessage;
history.currentId = userMessageId;
await tick();
if (autoScroll) {
scrollToBottom();
}
await sendMessage(history, userMessageId);
};
const regenerateResponse = async (message, suggestionPrompt = null) => {
console.log('regenerateResponse');
if (history.currentId) {
let userMessage = history.messages[message.parentId];
if (autoScroll) {
scrollToBottom();
}
await sendMessage(history, userMessage.id, {
...(suggestionPrompt
? {
messages: [
...createMessagesList(history, message.id),
{
role: 'user',
content: suggestionPrompt
}
]
}
: {}),
...((userMessage?.models ?? [...selectedModels]).length > 1
? {
// If multiple models are selected, use the model from the message
modelId: message.model,
modelIdx: message.modelIdx
}
: {})
});
}
};
const continueResponse = async () => {
console.log('continueResponse');
const _chatId = JSON.parse(JSON.stringify($chatId));
if (history.currentId && history.messages[history.currentId].done == true) {
const responseMessage = history.messages[history.currentId];
responseMessage.done = false;
await tick();
const model = $models
.filter((m) => m.id === (responseMessage?.selectedModelId ?? responseMessage.model))
.at(0);
if (model) {
await sendMessageSocket(
model,
createMessagesList(history, responseMessage.id),
history,
responseMessage.id,
_chatId
);
}
}
};
const mergeResponses = async (messageId, responses, _chatId) => {
console.log('mergeResponses', messageId, responses);
const message = history.messages[messageId];
const mergedResponse = {
status: true,
content: ''
};
message.merged = mergedResponse;
history.messages[messageId] = message;
try {
generating = true;
const [res, controller] = await generateMoACompletion(
localStorage.token,
message.model,
history.messages[message.parentId].content,
responses
);
if (res && res.ok && res.body && generating) {
generationController = controller;
const textStream = await createOpenAITextStream(res.body, $settings.splitLargeChunks);
for await (const update of textStream) {
const { value, done, sources, error, usage } = update;
if (error || done) {
generating = false;
generationController = null;
break;
}
if (mergedResponse.content == '' && value == '\n') {
continue;
} else {
mergedResponse.content += value;
history.messages[messageId] = message;
}
if (autoScroll) {
scrollToBottom();
}
}
await saveChatHandler(_chatId, history);
} else {
console.error(res);
}
} catch (e) {
console.error(e);
}
};
const initChatHandler = async (history) => {
let _chatId = $chatId;
if (!$temporaryChatEnabled) {
chat = await createNewChat(
localStorage.token,
{
id: _chatId,
title: $i18n.t('New Chat'),
models: selectedModels,
system: $settings.system ?? undefined,
params: params,
history: history,
messages: createMessagesList(history, history.currentId),
memory_enabled: memoryEnabled,
tags: [],
timestamp: Date.now()
},
$selectedFolder?.id
);
_chatId = chat.id;
await chatId.set(_chatId);
window.history.replaceState(history.state, '', `/c/${_chatId}`);
await tick();
await chats.set(await getChatList(localStorage.token, $currentChatPage));
currentChatPage.set(1);
selectedFolder.set(null);
} else {
_chatId = `local:${$socket?.id}`; // Use socket id for temporary chat
await chatId.set(_chatId);
}
await tick();
return _chatId;
};
const saveChatHandler = async (_chatId, history) => {
if ($chatId == _chatId) {
if (!$temporaryChatEnabled) {
chat = await updateChatById(localStorage.token, _chatId, {
models: selectedModels,
history: history,
messages: createMessagesList(history, history.currentId),
params: params,
files: chatFiles,
memory_enabled: memoryEnabled
});
currentChatPage.set(1);
await chats.set(await getChatList(localStorage.token, $currentChatPage));
}
}
};
const MAX_DRAFT_LENGTH = 5000;
let saveDraftTimeout = null;
const saveDraft = async (draft, chatId = null) => {
if (saveDraftTimeout) {
clearTimeout(saveDraftTimeout);
}
if (draft.prompt !== null && draft.prompt.length < MAX_DRAFT_LENGTH) {
saveDraftTimeout = setTimeout(async () => {
await sessionStorage.setItem(
`chat-input${chatId ? `-${chatId}` : ''}`,
JSON.stringify(draft)
);
}, 500);
} else {
sessionStorage.removeItem(`chat-input${chatId ? `-${chatId}` : ''}`);
}
};
const clearDraft = async (chatId = null) => {
if (saveDraftTimeout) {
clearTimeout(saveDraftTimeout);
}
await sessionStorage.removeItem(`chat-input${chatId ? `-${chatId}` : ''}`);
};
const moveChatHandler = async (chatId, folderId) => {
if (chatId && folderId) {
const res = await updateChatFolderIdById(localStorage.token, chatId, folderId).catch(
(error) => {
toast.error(`${error}`);
return null;
}
);
if (res) {
currentChatPage.set(1);
await chats.set(await getChatList(localStorage.token, $currentChatPage));
await pinnedChats.set(await getPinnedChatList(localStorage.token));
toast.success($i18n.t('Chat moved successfully'));
}
} else {
toast.error($i18n.t('Failed to move chat'));
}
};
</script>
<svelte:head>
<title>
{$settings.showChatTitleInTab !== false && $chatTitle
? `${$chatTitle.length > 30 ? `${$chatTitle.slice(0, 30)}...` : $chatTitle} ${$WEBUI_NAME}`
: `${$WEBUI_NAME}`}
</title>
</svelte:head>
<audio id="audioElement" src="" style="display: none;" />
<EventConfirmDialog
bind:show={showEventConfirmation}
title={eventConfirmationTitle}
message={eventConfirmationMessage}
input={eventConfirmationInput}
inputPlaceholder={eventConfirmationInputPlaceholder}
inputValue={eventConfirmationInputValue}
on:confirm={(e) => {
if (e.detail) {
eventCallback(e.detail);
} else {
eventCallback(true);
}
}}
on:cancel={() => {
eventCallback(false);
}}
/>
<div
class="h-screen max-h-[100dvh] transition-width duration-200 ease-in-out {$showSidebar
? ' md:max-w-[calc(100%-260px)]'
: ' '} w-full max-w-full flex flex-col"
id="chat-container"
>
{#if !loading}
<div in:fade={{ duration: 50 }} class="w-full h-full flex flex-col">
{#if $selectedFolder && $selectedFolder?.meta?.background_image_url}
<div
class="absolute {$showSidebar
? 'md:max-w-[calc(100%-260px)] md:translate-x-[260px]'
: ''} top-0 left-0 w-full h-full bg-cover bg-center bg-no-repeat"
style="background-image: url({$selectedFolder?.meta?.background_image_url}) "
/>
<div
class="absolute top-0 left-0 w-full h-full bg-linear-to-t from-white to-white/85 dark:from-gray-900 dark:to-gray-900/90 z-0"
/>
{:else if $settings?.backgroundImageUrl ?? $config?.license_metadata?.background_image_url ?? null}
<div
class="absolute {$showSidebar
? 'md:max-w-[calc(100%-260px)] md:translate-x-[260px]'
: ''} top-0 left-0 w-full h-full bg-cover bg-center bg-no-repeat"
style="background-image: url({$settings?.backgroundImageUrl ??
$config?.license_metadata?.background_image_url}) "
/>
<div
class="absolute top-0 left-0 w-full h-full bg-linear-to-t from-white to-white/85 dark:from-gray-900 dark:to-gray-900/90 z-0"
/>
{/if}
<PaneGroup direction="horizontal" class="w-full h-full">
<Pane defaultSize={50} minSize={30} class="h-full flex relative max-w-full flex-col">
<Navbar
bind:this={navbarElement}
chat={{
id: $chatId,
chat: {
title: $chatTitle,
models: selectedModels,
system: $settings.system ?? undefined,
params: params,
history: history,
timestamp: Date.now()
}
}}
{history}
title={$chatTitle}
bind:selectedModels
shareEnabled={!!history.currentId}
{initNewChat}
archiveChatHandler={() => {}}
{moveChatHandler}
onSaveTempChat={async () => {
try {
if (!history?.currentId || !Object.keys(history.messages).length) {
toast.error($i18n.t('No conversation to save'));
return;
}
const messages = createMessagesList(history, history.currentId);
const title =
messages.find((m) => m.role === 'user')?.content ?? $i18n.t('New Chat');
const savedChat = await createNewChat(
localStorage.token,
{
id: uuidv4(),
title: title.length > 50 ? `${title.slice(0, 50)}...` : title,
models: selectedModels,
history: history,
messages: messages,
timestamp: Date.now()
},
null
);
if (savedChat) {
temporaryChatEnabled.set(false);
chatId.set(savedChat.id);
chats.set(await getChatList(localStorage.token, $currentChatPage));
await goto(`/c/${savedChat.id}`);
toast.success($i18n.t('Conversation saved successfully'));
}
} catch (error) {
console.error('Error saving conversation:', error);
toast.error($i18n.t('Failed to save conversation'));
}
}}
/>
<div class="flex flex-col flex-auto z-10 w-full @container overflow-auto">
{#if ($settings?.landingPageMode === 'chat' && !$selectedFolder) || createMessagesList(history, history.currentId).length > 0}
<div
class=" pb-2.5 flex flex-col justify-between w-full flex-auto overflow-auto h-0 max-w-full z-10 scrollbar-hidden"
id="messages-container"
bind:this={messagesContainerElement}
on:scroll={(e) => {
autoScroll =
messagesContainerElement.scrollHeight - messagesContainerElement.scrollTop <=
messagesContainerElement.clientHeight + 5;
}}
>
<div class=" h-full w-full flex flex-col">
<Messages
chatId={$chatId}
bind:history
bind:autoScroll
bind:prompt
setInputText={(text) => {
messageInput?.setText(text);
}}
{selectedModels}
{atSelectedModel}
{sendMessage}
{showMessage}
{submitMessage}
{continueResponse}
{regenerateResponse}
{mergeResponses}
{chatActionHandler}
{addMessages}
topPadding={true}
bottomPadding={files.length > 0}
{onSelect}
/>
</div>
</div>
<div class=" pb-2">
<MessageInput
bind:this={messageInput}
{history}
{taskIds}
{selectedModels}
bind:files
bind:prompt
bind:autoScroll
bind:selectedToolIds
bind:selectedFilterIds
bind:imageGenerationEnabled
bind:codeInterpreterEnabled
bind:webSearchEnabled
bind:memoryEnabled
{memoryLocked}
bind:atSelectedModel
bind:showCommands
toolServers={$toolServers}
{generating}
{stopResponse}
{createMessagePair}
onChange={(data) => {
if (!$temporaryChatEnabled) {
saveDraft(data, $chatId);
}
}}
on:upload={async (e) => {
const { type, data } = e.detail;
if (type === 'web') {
await uploadWeb(data);
} else if (type === 'youtube') {
await uploadYoutubeTranscription(data);
} else if (type === 'google-drive') {
await uploadGoogleDriveFile(data);
}
}}
on:submit={async (e) => {
clearDraft();
if (e.detail || files.length > 0) {
await tick();
submitPrompt(e.detail.replaceAll('\n\n', '\n'));
}
}}
/>
<div
class="absolute bottom-1 text-xs text-gray-500 text-center line-clamp-1 right-0 left-0"
>
<!-- {$i18n.t('LLMs can make mistakes. Verify important information.')} -->
</div>
</div>
{:else}
<div class="flex items-center h-full">
<Placeholder
{history}
{selectedModels}
bind:messageInput
bind:files
bind:prompt
bind:autoScroll
bind:selectedToolIds
bind:selectedFilterIds
bind:imageGenerationEnabled
bind:codeInterpreterEnabled
bind:webSearchEnabled
bind:memoryEnabled
{memoryLocked}
bind:atSelectedModel
bind:showCommands
toolServers={$toolServers}
{stopResponse}
{createMessagePair}
{onSelect}
onChange={(data) => {
if (!$temporaryChatEnabled) {
saveDraft(data);
}
}}
on:upload={async (e) => {
const { type, data } = e.detail;
if (type === 'web') {
await uploadWeb(data);
} else if (type === 'youtube') {
await uploadYoutubeTranscription(data);
}
}}
on:submit={async (e) => {
clearDraft();
if (e.detail || files.length > 0) {
await tick();
submitPrompt(e.detail.replaceAll('\n\n', '\n'));
}
}}
/>
</div>
{/if}
</div>
</Pane>
<ChatControls
bind:this={controlPaneComponent}
bind:history
bind:chatFiles
bind:params
bind:files
bind:pane={controlPane}
chatId={$chatId}
modelId={selectedModelIds?.at(0) ?? null}
models={selectedModelIds.reduce((a, e, i, arr) => {
const model = $models.find((m) => m.id === e);
if (model) {
return [...a, model];
}
return a;
}, [])}
{submitPrompt}
{stopResponse}
{showMessage}
{eventTarget}
/>
</PaneGroup>
</div>
{:else if loading}
<div class=" flex items-center justify-center h-full w-full">
<div class="m-auto">
<Spinner className="size-5" />
</div>
</div>
{/if}
</div>