mirror of
https://github.com/open-webui/open-webui.git
synced 2025-12-13 21:05:19 +00:00
1. 添加 summary 接口 2. src/lib/components/chat/Chat.svelte 中强制 stream 传输 3. src/lib/components/layout/Sidebar.svelte initChatList()的调用补充await
This commit is contained in:
parent
271af2b73d
commit
f112cd3ced
9 changed files with 1220 additions and 419 deletions
|
|
@ -621,6 +621,15 @@ else:
|
||||||
except Exception:
|
except Exception:
|
||||||
CHAT_RESPONSE_MAX_TOOL_CALL_RETRIES = 30
|
CHAT_RESPONSE_MAX_TOOL_CALL_RETRIES = 30
|
||||||
|
|
||||||
|
# 全局调试开关(默认开启)
|
||||||
|
CHAT_DEBUG_FLAG = os.environ.get("CHAT_DEBUG_FALG", "True").lower() == "true"
|
||||||
|
|
||||||
|
# 摘要/聊天相关的默认阈值
|
||||||
|
SUMMARY_TOKEN_THRESHOLD_DEFAULT = os.environ.get("SUMMARY_TOKEN_THRESHOLD", "3000")
|
||||||
|
try:
|
||||||
|
SUMMARY_TOKEN_THRESHOLD_DEFAULT = int(SUMMARY_TOKEN_THRESHOLD_DEFAULT)
|
||||||
|
except Exception:
|
||||||
|
SUMMARY_TOKEN_THRESHOLD_DEFAULT = 3000
|
||||||
|
|
||||||
####################################
|
####################################
|
||||||
# WEBSOCKET SUPPORT
|
# WEBSOCKET SUPPORT
|
||||||
|
|
|
||||||
|
|
@ -465,6 +465,8 @@ from open_webui.env import (
|
||||||
EXTERNAL_PWA_MANIFEST_URL,
|
EXTERNAL_PWA_MANIFEST_URL,
|
||||||
AIOHTTP_CLIENT_SESSION_SSL,
|
AIOHTTP_CLIENT_SESSION_SSL,
|
||||||
ENABLE_STAR_SESSIONS_MIDDLEWARE,
|
ENABLE_STAR_SESSIONS_MIDDLEWARE,
|
||||||
|
|
||||||
|
CHAT_DEBUG_FLAG,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -481,6 +483,12 @@ from open_webui.utils.chat import (
|
||||||
chat_action as chat_action_handler,
|
chat_action as chat_action_handler,
|
||||||
)
|
)
|
||||||
from open_webui.utils.misc import get_message_list
|
from open_webui.utils.misc import get_message_list
|
||||||
|
from open_webui.utils.summary import (
|
||||||
|
summarize,
|
||||||
|
compute_token_count,
|
||||||
|
build_ordered_messages,
|
||||||
|
get_recent_messages_by_user_id,
|
||||||
|
)
|
||||||
from open_webui.utils.embeddings import generate_embeddings
|
from open_webui.utils.embeddings import generate_embeddings
|
||||||
from open_webui.utils.middleware import process_chat_payload, process_chat_response
|
from open_webui.utils.middleware import process_chat_payload, process_chat_response
|
||||||
from open_webui.utils.access_control import has_access
|
from open_webui.utils.access_control import has_access
|
||||||
|
|
@ -1619,7 +1627,69 @@ async def chat_completion(
|
||||||
# === 8. 定义内部处理函数 process_chat ===
|
# === 8. 定义内部处理函数 process_chat ===
|
||||||
async def process_chat(request, form_data, user, metadata, model):
|
async def process_chat(request, form_data, user, metadata, model):
|
||||||
"""处理完整的聊天流程:Payload 处理 → LLM 调用 → 响应处理"""
|
"""处理完整的聊天流程:Payload 处理 → LLM 调用 → 响应处理"""
|
||||||
|
|
||||||
|
async def ensure_initial_summary():
|
||||||
|
"""
|
||||||
|
如果是新聊天,其中没有summary,获得最近的若干次互动,生成一次摘要并保存。
|
||||||
|
触发条件:非 local 会话,无已有摘要。
|
||||||
|
"""
|
||||||
|
|
||||||
|
# 获取 chat_id(跳过本地会话)
|
||||||
|
chat_id = metadata.get("chat_id")
|
||||||
|
if not chat_id or str(chat_id).startswith("local:"):
|
||||||
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
# 检查是否已有摘要
|
||||||
|
old_summary = Chats.get_summary_by_user_id_and_chat_id(user.id, chat_id)
|
||||||
|
if CHAT_DEBUG_FLAG:
|
||||||
|
print(f"[summary:init] chat_id={chat_id} 现有摘要={bool(old_summary)}")
|
||||||
|
if old_summary:
|
||||||
|
if CHAT_DEBUG_FLAG:
|
||||||
|
print(f"[summary:init] chat_id={chat_id} 已存在摘要,跳过生成")
|
||||||
|
return
|
||||||
|
|
||||||
|
# 获取消息列表
|
||||||
|
ordered = get_recent_messages_by_user_id(user.id, chat_id, 100)
|
||||||
|
if CHAT_DEBUG_FLAG:
|
||||||
|
print(f"[summary:init] chat_id={chat_id} 最近消息数={len(ordered)} (优先当前会话)")
|
||||||
|
|
||||||
|
if not ordered:
|
||||||
|
if CHAT_DEBUG_FLAG:
|
||||||
|
print(f"[summary:init] chat_id={chat_id} 无可用消息,跳过生成")
|
||||||
|
return
|
||||||
|
|
||||||
|
# 调用 LLM 生成摘要并保存
|
||||||
|
summary_text = summarize(ordered, None)
|
||||||
|
last_id = ordered[-1].get("id") if ordered else None
|
||||||
|
recent_ids = [m.get("id") for m in ordered[-20:] if m.get("id")] # 记录最近20条消息为冷启动消息
|
||||||
|
|
||||||
|
if CHAT_DEBUG_FLAG:
|
||||||
|
print(
|
||||||
|
f"[summary:init] chat_id={chat_id} 生成首条摘要,msg_count={len(ordered)}, last_id={last_id}, recent_ids={len(recent_ids)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
print("[summary:init]: ordered")
|
||||||
|
for i in ordered:
|
||||||
|
print(i['role'], " ", i['content'][:100])
|
||||||
|
|
||||||
|
res = Chats.set_summary_by_user_id_and_chat_id(
|
||||||
|
user.id,
|
||||||
|
chat_id,
|
||||||
|
summary_text,
|
||||||
|
last_id,
|
||||||
|
int(time.time()),
|
||||||
|
recent_message_ids=recent_ids,
|
||||||
|
)
|
||||||
|
if not res:
|
||||||
|
if CHAT_DEBUG_FLAG:
|
||||||
|
print(f"[summary:init] chat_id={chat_id} 写入摘要失败")
|
||||||
|
except Exception as e:
|
||||||
|
log.exception(f"initial summary failed: {e}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
await ensure_initial_summary()
|
||||||
|
|
||||||
# 8.1 Payload 预处理:执行 Pipeline Filters、工具注入、RAG 检索等
|
# 8.1 Payload 预处理:执行 Pipeline Filters、工具注入、RAG 检索等
|
||||||
# remark:并不涉及消息的持久化,只涉及发送给 LLM 前,上下文的封装
|
# remark:并不涉及消息的持久化,只涉及发送给 LLM 前,上下文的封装
|
||||||
form_data, metadata, events = await process_chat_payload(
|
form_data, metadata, events = await process_chat_payload(
|
||||||
|
|
@ -1661,7 +1731,7 @@ async def chat_completion(
|
||||||
|
|
||||||
# 8.6 异常处理:记录错误到数据库并通知前端
|
# 8.6 异常处理:记录错误到数据库并通知前端
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.debug(f"Error processing chat payload: {e}")
|
log.exception(f"Error processing chat payload: {e}")
|
||||||
if metadata.get("chat_id") and metadata.get("message_id"):
|
if metadata.get("chat_id") and metadata.get("message_id"):
|
||||||
try:
|
try:
|
||||||
# 将错误信息保存到消息记录
|
# 将错误信息保存到消息记录
|
||||||
|
|
|
||||||
|
|
@ -16,6 +16,7 @@ def last_process_payload(
|
||||||
messages (List[Dict]): 该用户在该对话下的聊天消息列表,
|
messages (List[Dict]): 该用户在该对话下的聊天消息列表,
|
||||||
形如 {"role": "system|user|assistant", "content": "...", "timestamp": 0}。
|
形如 {"role": "system|user|assistant", "content": "...", "timestamp": 0}。
|
||||||
"""
|
"""
|
||||||
print("user_id:", user_id)
|
return
|
||||||
print("session_id:", session_id)
|
# print("user_id:", user_id)
|
||||||
print("messages:", messages)
|
# print("session_id:", session_id)
|
||||||
|
# print("messages:", messages)
|
||||||
|
|
|
||||||
|
|
@ -252,6 +252,62 @@ class ChatTable:
|
||||||
|
|
||||||
return chat.chat.get("history", {}).get("messages", {}).get(message_id, {})
|
return chat.chat.get("history", {}).get("messages", {}).get(message_id, {})
|
||||||
|
|
||||||
|
def get_summary_by_user_id_and_chat_id(
|
||||||
|
self, user_id: str, chat_id: str
|
||||||
|
) -> Optional[dict]:
|
||||||
|
"""
|
||||||
|
读取 chat.meta.summary,包含摘要内容及摘要边界(last_message_id/timestamp)。
|
||||||
|
"""
|
||||||
|
chat = self.get_chat_by_id_and_user_id(chat_id, user_id)
|
||||||
|
if chat is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return chat.meta.get("summary", None) if isinstance(chat.meta, dict) else None
|
||||||
|
|
||||||
|
def set_summary_by_user_id_and_chat_id(
|
||||||
|
self,
|
||||||
|
user_id: str,
|
||||||
|
chat_id: str,
|
||||||
|
summary: str,
|
||||||
|
last_message_id: Optional[str],
|
||||||
|
last_timestamp: Optional[int],
|
||||||
|
recent_message_ids: Optional[list[str]] = None,
|
||||||
|
) -> Optional[ChatModel]:
|
||||||
|
"""
|
||||||
|
写入 chat.meta.summary,并更新更新时间。
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
with get_db() as db:
|
||||||
|
chat = db.query(Chat).filter_by(id=chat_id, user_id=user_id).first()
|
||||||
|
|
||||||
|
if chat is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
meta = chat.meta if isinstance(chat.meta, dict) else {}
|
||||||
|
new_meta = {
|
||||||
|
**meta,
|
||||||
|
"summary": {
|
||||||
|
"content": summary,
|
||||||
|
"last_message_id": last_message_id,
|
||||||
|
"last_timestamp": last_timestamp,
|
||||||
|
},
|
||||||
|
**(
|
||||||
|
{"recent_message_id_for_cold_start": recent_message_ids}
|
||||||
|
if recent_message_ids is not None
|
||||||
|
else {}
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
# 重新赋值以触发 SQLAlchemy 变更检测
|
||||||
|
chat.meta = new_meta
|
||||||
|
chat.updated_at = int(time.time())
|
||||||
|
db.commit()
|
||||||
|
db.refresh(chat)
|
||||||
|
return ChatModel.model_validate(chat)
|
||||||
|
except Exception as e:
|
||||||
|
log.exception(f"set_summary_by_user_id_and_chat_id failed: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
def upsert_message_to_chat_by_id_and_message_id(
|
def upsert_message_to_chat_by_id_and_message_id(
|
||||||
self, id: str, message_id: str, message: dict
|
self, id: str, message_id: str, message: dict
|
||||||
) -> Optional[ChatModel]:
|
) -> Optional[ChatModel]:
|
||||||
|
|
|
||||||
|
|
@ -1004,12 +1004,6 @@ async def generate_chat_completion(
|
||||||
log.debug(
|
log.debug(
|
||||||
f"chatting_completion hook user={user.id} chat_id={metadata.get('chat_id')} model={payload.get('model')}"
|
f"chatting_completion hook user={user.id} chat_id={metadata.get('chat_id')} model={payload.get('model')}"
|
||||||
)
|
)
|
||||||
|
|
||||||
last_process_payload(
|
|
||||||
user_id = user.id,
|
|
||||||
session_id = metadata.get("chat_id"),
|
|
||||||
messages = extract_timestamped_messages(payload.get("messages", [])),
|
|
||||||
)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.debug(f"chatting_completion 钩子执行失败: {e}")
|
log.debug(f"chatting_completion 钩子执行失败: {e}")
|
||||||
|
|
||||||
|
|
|
||||||
File diff suppressed because it is too large
Load diff
172
backend/open_webui/utils/summary.py
Normal file
172
backend/open_webui/utils/summary.py
Normal file
|
|
@ -0,0 +1,172 @@
|
||||||
|
from typing import Dict, List, Optional, Tuple
|
||||||
|
|
||||||
|
from open_webui.models.chats import Chats
|
||||||
|
|
||||||
|
|
||||||
|
def build_ordered_messages(
|
||||||
|
messages_map: Optional[Dict], anchor_id: Optional[str] = None
|
||||||
|
) -> List[Dict]:
|
||||||
|
"""
|
||||||
|
将消息 map 还原为有序列表
|
||||||
|
|
||||||
|
策略:
|
||||||
|
1. 优先:基于 parentId 链条追溯(从 anchor_id 向上回溯到根消息)
|
||||||
|
2. 退化:按时间戳排序(无 anchor_id 或追溯失败时)
|
||||||
|
|
||||||
|
参数:
|
||||||
|
messages_map: 消息 map,格式 {"msg-id": {"role": "user", "content": "...", "parentId": "...", "timestamp": 123456}}
|
||||||
|
anchor_id: 锚点消息 ID(链尾),从此消息向上追溯
|
||||||
|
|
||||||
|
返回:
|
||||||
|
有序的消息列表,每个消息包含 id 字段
|
||||||
|
"""
|
||||||
|
if not messages_map:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# 补齐消息的 id 字段
|
||||||
|
def with_id(message_id: str, message: Dict) -> Dict:
|
||||||
|
return {**message, **({"id": message_id} if "id" not in message else {})}
|
||||||
|
|
||||||
|
# 模式 1:基于 parentId 链条追溯
|
||||||
|
if anchor_id and anchor_id in messages_map:
|
||||||
|
ordered: List[Dict] = []
|
||||||
|
current_id: Optional[str] = anchor_id
|
||||||
|
|
||||||
|
while current_id:
|
||||||
|
current_msg = messages_map.get(current_id)
|
||||||
|
if not current_msg:
|
||||||
|
break
|
||||||
|
ordered.insert(0, with_id(current_id, current_msg))
|
||||||
|
current_id = current_msg.get("parentId")
|
||||||
|
|
||||||
|
return ordered
|
||||||
|
|
||||||
|
# 模式 2:基于时间戳排序
|
||||||
|
sortable: List[Tuple[int, str, Dict]] = []
|
||||||
|
for mid, message in messages_map.items():
|
||||||
|
ts = (
|
||||||
|
message.get("createdAt")
|
||||||
|
or message.get("created_at")
|
||||||
|
or message.get("timestamp")
|
||||||
|
or 0
|
||||||
|
)
|
||||||
|
sortable.append((int(ts), mid, message))
|
||||||
|
|
||||||
|
sortable.sort(key=lambda x: x[0])
|
||||||
|
return [with_id(mid, msg) for _, mid, msg in sortable]
|
||||||
|
|
||||||
|
|
||||||
|
def get_recent_messages_by_user_id(user_id: str, chat_id: str, num: int) -> List[Dict]:
|
||||||
|
"""
|
||||||
|
获取指定用户的全局最近 N 条消息(按时间顺序)
|
||||||
|
|
||||||
|
参数:
|
||||||
|
user_id: 用户 ID
|
||||||
|
num: 需要获取的消息数量(<= 0 时返回全部)
|
||||||
|
|
||||||
|
返回:
|
||||||
|
有序的消息列表(最近的 num 条)
|
||||||
|
"""
|
||||||
|
all_messages: List[Dict] = []
|
||||||
|
|
||||||
|
# 遍历用户的所有聊天
|
||||||
|
chats = Chats.get_chat_list_by_user_id(user_id, include_archived=True)
|
||||||
|
for chat in chats:
|
||||||
|
messages_map = chat.chat.get("history", {}).get("messages", {}) or {}
|
||||||
|
for mid, msg in messages_map.items():
|
||||||
|
# 跳过空内容
|
||||||
|
if msg.get("content", "") == "":
|
||||||
|
continue
|
||||||
|
ts = (
|
||||||
|
msg.get("createdAt")
|
||||||
|
or msg.get("created_at")
|
||||||
|
or msg.get("timestamp")
|
||||||
|
or 0
|
||||||
|
)
|
||||||
|
entry = {**msg, "id": mid}
|
||||||
|
entry.setdefault("chat_id", chat.id)
|
||||||
|
entry.setdefault("timestamp", int(ts))
|
||||||
|
all_messages.append(entry)
|
||||||
|
|
||||||
|
# 按时间戳排序
|
||||||
|
all_messages.sort(key=lambda m: m.get("timestamp", 0))
|
||||||
|
|
||||||
|
if num <= 0:
|
||||||
|
return all_messages
|
||||||
|
|
||||||
|
return all_messages[-num:]
|
||||||
|
|
||||||
|
|
||||||
|
def slice_messages_with_summary(
|
||||||
|
messages_map: Dict,
|
||||||
|
boundary_message_id: Optional[str],
|
||||||
|
anchor_id: Optional[str],
|
||||||
|
pre_boundary: int = 20,
|
||||||
|
) -> List[Dict]:
|
||||||
|
"""
|
||||||
|
基于摘要边界裁剪消息列表(返回摘要前 N 条 + 摘要后全部消息)
|
||||||
|
|
||||||
|
策略:保留摘要边界前 N 条消息(提供上下文)+ 摘要后全部消息(最新对话)
|
||||||
|
目的:降低 token 消耗,同时保留足够的上下文信息
|
||||||
|
|
||||||
|
参数:
|
||||||
|
messages_map: 消息 map
|
||||||
|
boundary_message_id: 摘要边界消息 ID(None 时返回全量消息)
|
||||||
|
anchor_id: 锚点消息 ID(链尾)
|
||||||
|
pre_boundary: 摘要边界前保留的消息数量(默认 20)
|
||||||
|
|
||||||
|
返回:
|
||||||
|
裁剪后的有序消息列表
|
||||||
|
|
||||||
|
示例:
|
||||||
|
100 条消息,摘要边界在第 50 条,pre_boundary=20
|
||||||
|
→ 返回消息 29-99(共 71 条)
|
||||||
|
"""
|
||||||
|
ordered = build_ordered_messages(messages_map, anchor_id)
|
||||||
|
|
||||||
|
if boundary_message_id:
|
||||||
|
try:
|
||||||
|
# 查找摘要边界消息的索引
|
||||||
|
boundary_idx = next(
|
||||||
|
idx for idx, msg in enumerate(ordered) if msg.get("id") == boundary_message_id
|
||||||
|
)
|
||||||
|
# 计算裁剪起点
|
||||||
|
start_idx = max(boundary_idx - pre_boundary, 0)
|
||||||
|
ordered = ordered[start_idx:]
|
||||||
|
except StopIteration:
|
||||||
|
# 边界消息不存在,返回全量
|
||||||
|
pass
|
||||||
|
|
||||||
|
return ordered
|
||||||
|
|
||||||
|
|
||||||
|
def summarize(messages: List[Dict], old_summary: Optional[str] = None) -> str:
|
||||||
|
"""
|
||||||
|
生成对话摘要(占位接口)
|
||||||
|
|
||||||
|
参数:
|
||||||
|
messages: 需要摘要的消息列表
|
||||||
|
old_summary: 旧摘要(可选,当前未使用)
|
||||||
|
|
||||||
|
返回:
|
||||||
|
摘要字符串
|
||||||
|
|
||||||
|
TODO:
|
||||||
|
- 实现增量摘要逻辑(基于 old_summary 生成新摘要)
|
||||||
|
- 支持摘要策略配置(长度、详细程度)
|
||||||
|
"""
|
||||||
|
return "\n".join(m.get("content")[:100] for m in messages)
|
||||||
|
|
||||||
|
def compute_token_count(messages: List[Dict]) -> int:
|
||||||
|
"""
|
||||||
|
计算消息的 token 数量(占位实现)
|
||||||
|
|
||||||
|
当前算法:4 字符 ≈ 1 token(粗略估算)
|
||||||
|
TODO:接入真实 tokenizer(如 tiktoken for OpenAI models)
|
||||||
|
"""
|
||||||
|
total_chars = 0
|
||||||
|
for msg in messages:
|
||||||
|
total_chars += len(msg['content'])
|
||||||
|
|
||||||
|
return max(total_chars // 4, 0)
|
||||||
|
|
||||||
|
|
@ -1998,11 +1998,7 @@ const getCombinedModelById = (modelId) => {
|
||||||
const isUserModel = combinedModel?.source === 'user';
|
const isUserModel = combinedModel?.source === 'user';
|
||||||
const credential = combinedModel?.credential;
|
const credential = combinedModel?.credential;
|
||||||
|
|
||||||
const stream =
|
const stream = true;
|
||||||
model?.info?.params?.stream_response ??
|
|
||||||
$settings?.params?.stream_response ??
|
|
||||||
params?.stream_response ??
|
|
||||||
true;
|
|
||||||
|
|
||||||
let messages = [
|
let messages = [
|
||||||
params?.system || $settings.system
|
params?.system || $settings.system
|
||||||
|
|
|
||||||
|
|
@ -1021,19 +1021,19 @@
|
||||||
bind:folderRegistry
|
bind:folderRegistry
|
||||||
{folders}
|
{folders}
|
||||||
{shiftKey}
|
{shiftKey}
|
||||||
onDelete={(folderId) => {
|
onDelete={async (folderId) => {
|
||||||
selectedFolder.set(null);
|
selectedFolder.set(null);
|
||||||
initChatList();
|
await initChatList();
|
||||||
}}
|
}}
|
||||||
on:update={() => {
|
on:update={async () => {
|
||||||
initChatList();
|
await initChatList();
|
||||||
}}
|
}}
|
||||||
on:import={(e) => {
|
on:import={(e) => {
|
||||||
const { folderId, items } = e.detail;
|
const { folderId, items } = e.detail;
|
||||||
importChatHandler(items, false, folderId);
|
importChatHandler(items, false, folderId);
|
||||||
}}
|
}}
|
||||||
on:change={async () => {
|
on:change={async () => {
|
||||||
initChatList();
|
await initChatList();
|
||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
</Folder>
|
</Folder>
|
||||||
|
|
@ -1085,7 +1085,7 @@
|
||||||
const res = await toggleChatPinnedStatusById(localStorage.token, chat.id);
|
const res = await toggleChatPinnedStatusById(localStorage.token, chat.id);
|
||||||
}
|
}
|
||||||
|
|
||||||
initChatList();
|
await initChatList();
|
||||||
}
|
}
|
||||||
} else if (type === 'folder') {
|
} else if (type === 'folder') {
|
||||||
if (folders[id].parent_id === null) {
|
if (folders[id].parent_id === null) {
|
||||||
|
|
@ -1154,7 +1154,7 @@
|
||||||
const res = await toggleChatPinnedStatusById(localStorage.token, chat.id);
|
const res = await toggleChatPinnedStatusById(localStorage.token, chat.id);
|
||||||
}
|
}
|
||||||
|
|
||||||
initChatList();
|
await initChatList();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
|
|
@ -1177,7 +1177,7 @@
|
||||||
selectedChatId = null;
|
selectedChatId = null;
|
||||||
}}
|
}}
|
||||||
on:change={async () => {
|
on:change={async () => {
|
||||||
initChatList();
|
await initChatList();
|
||||||
}}
|
}}
|
||||||
on:tag={(e) => {
|
on:tag={(e) => {
|
||||||
const { type, name } = e.detail;
|
const { type, name } = e.detail;
|
||||||
|
|
@ -1237,7 +1237,7 @@
|
||||||
selectedChatId = null;
|
selectedChatId = null;
|
||||||
}}
|
}}
|
||||||
on:change={async () => {
|
on:change={async () => {
|
||||||
initChatList();
|
await initChatList();
|
||||||
}}
|
}}
|
||||||
on:tag={(e) => {
|
on:tag={(e) => {
|
||||||
const { type, name } = e.detail;
|
const { type, name } = e.detail;
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue