mirror of
https://github.com/open-webui/open-webui.git
synced 2026-01-03 23:25:21 +00:00
Compare commits
326 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a7271532f8 | ||
|
|
5088583721 | ||
|
|
0a78323406 | ||
|
|
423983f5a7 | ||
|
|
4ab917c74b | ||
|
|
7f0ca8c4b7 | ||
|
|
5c5f87a63c | ||
|
|
def8a0027d | ||
|
|
d95f533214 | ||
|
|
de97f797ce | ||
|
|
f0a04427e2 | ||
|
|
088f62f1e1 | ||
|
|
cfa8b3a396 | ||
|
|
a8bb000616 | ||
|
|
837f7022d5 | ||
|
|
67fd2ec759 | ||
|
|
baf0f51827 | ||
|
|
f7d5750210 | ||
|
|
446cc0ac60 | ||
|
|
01e88c6ac2 | ||
|
|
45e3237756 | ||
|
|
bd7f1869ce | ||
|
|
f1bf4f20c5 | ||
|
|
a3458f492c | ||
|
|
96ec66a89c | ||
|
|
a1155dfeb7 | ||
|
|
fa924946b0 | ||
|
|
b35aeb8f46 | ||
|
|
60c93b4ccc | ||
|
|
ef43e81f9a | ||
|
|
f826d3ed75 | ||
|
|
b3904b6ecb | ||
|
|
c96549eaa7 | ||
|
|
7746e9f4b8 | ||
|
|
4fd790f7dd | ||
|
|
ae203d8952 | ||
|
|
48ccb1e170 | ||
|
|
5077676d33 | ||
|
|
0dd2cfe1f2 | ||
|
|
53c1ca64b7 | ||
|
|
870e29e373 | ||
|
|
7e4848e10b | ||
|
|
6993b0b40b | ||
|
|
360289db21 | ||
|
|
0eeda79048 | ||
|
|
8eddff83cb | ||
|
|
28b2fcab0c | ||
|
|
0b0962a2ae | ||
|
|
90c0c5da00 | ||
|
|
fe653a1336 | ||
|
|
4c2e5c93e9 | ||
|
|
d4bd938a77 | ||
|
|
37085ed42b | ||
|
|
2e7c7d635d | ||
|
|
b0d8372d31 | ||
|
|
afaa404fe4 | ||
|
|
8890555dab | ||
|
|
b5bd8704fe | ||
|
|
823b9a6dd9 | ||
|
|
d65116282c | ||
|
|
e61724d2b1 | ||
|
|
cd170735c5 | ||
|
|
3d5aaa9ead | ||
|
|
149bb9dae2 | ||
|
|
ed8bb028a0 | ||
|
|
ffbd774d32 | ||
|
|
05a3aae59c | ||
|
|
f509f5542d | ||
|
|
0eb33e8e12 | ||
|
|
59d6eb2bad | ||
|
|
7bd43f00b1 | ||
|
|
cad4c61a25 | ||
|
|
dce52f33c0 | ||
|
|
0413c973e7 | ||
|
|
bc681f8258 | ||
|
|
195801f74a | ||
|
|
07d28ad781 | ||
|
|
8f7f5db1fd | ||
|
|
06038cd73d | ||
|
|
e11c6dca75 | ||
|
|
a1bde74e97 | ||
|
|
1c62be4406 | ||
|
|
67290e1c2a | ||
|
|
64dbc71643 | ||
|
|
90209a30a8 | ||
|
|
18e6cfb1fd | ||
|
|
8e661a4e73 | ||
|
|
379f888c9d | ||
|
|
4c4b9d19a1 | ||
|
|
7364b67455 | ||
|
|
3418f53d07 | ||
|
|
3b3e12b43a | ||
|
|
4d9a51ba33 | ||
|
|
4b4241273d | ||
|
|
db95e96688 | ||
|
|
99c820d607 | ||
|
|
282c541427 | ||
|
|
b364cf43d3 | ||
|
|
b9676cf36f | ||
|
|
258caaeced | ||
|
|
6e99b10163 | ||
|
|
a2a9a9bcf4 | ||
|
|
0addc1ea46 | ||
|
|
6812d3b9d1 | ||
|
|
ceae3d48e6 | ||
|
|
3ed1df2e53 | ||
|
|
68219d84a9 | ||
|
|
6068e23590 | ||
|
|
d7467a86e2 | ||
|
|
d098c57d4d | ||
|
|
693636d971 | ||
|
|
a6ef82c5ed | ||
|
|
79cfe29bb2 | ||
|
|
d1d42128e5 | ||
|
|
2bccf8350d | ||
|
|
c15201620d | ||
|
|
f31ca75892 | ||
|
|
a7993f6f4e | ||
|
|
ae47101dc6 | ||
|
|
cf6a1300ca | ||
|
|
a934dc997e | ||
|
|
ed2db0d04b | ||
|
|
4ecacda28c | ||
|
|
94a8439105 | ||
|
|
7b0b16ebbd | ||
|
|
49d54c5821 | ||
|
|
0eafc09965 | ||
|
|
6a75620fcb | ||
|
|
205c711120 | ||
|
|
3af96c9d4e | ||
|
|
6e0badde67 | ||
|
|
b29e7fd0be | ||
|
|
02df867843 | ||
|
|
00c2b6ca40 | ||
|
|
65d4b22c7c | ||
|
|
a4fe823893 | ||
|
|
103ff0c5e4 | ||
|
|
4363df175d | ||
|
|
307b37d5e2 | ||
|
|
9b24cddef6 | ||
|
|
1ea555a5ac | ||
|
|
c24b1207a0 | ||
|
|
44e41806f2 | ||
|
|
2b1a29d44b | ||
|
|
f5fbbaf060 | ||
|
|
ba158d378f | ||
|
|
b02397e460 | ||
|
|
bcd50ed8f1 | ||
|
|
8cea0cf746 | ||
|
|
ce945a9334 | ||
|
|
3c8f1cf8e5 | ||
|
|
4d4ed743ae | ||
|
|
aa9c0389c3 | ||
|
|
492c8bac09 | ||
|
|
0617ede0eb | ||
|
|
7e6b8a9a71 | ||
|
|
dcdbafa5ae | ||
|
|
cdd75ade50 | ||
|
|
508fde70ff | ||
|
|
2a8a2f1ba3 | ||
|
|
d90d036d23 | ||
|
|
2f68ac33b3 | ||
|
|
01284b92db | ||
|
|
1779090bdb | ||
|
|
d27e019af3 | ||
|
|
5c2df97f04 | ||
|
|
6fe737bf8f | ||
|
|
7b126b23d5 | ||
|
|
22f1b764a7 | ||
|
|
39f778c275 | ||
|
|
54b7ec56d6 | ||
|
|
133618aaf0 | ||
|
|
b560775713 | ||
|
|
964333ed65 | ||
|
|
85b1e21765 | ||
|
|
c86fcb75b7 | ||
|
|
6f1486ffd0 | ||
|
|
73f7e91dec | ||
|
|
8361f73ca6 | ||
|
|
11efb982c1 | ||
|
|
9d87688ecc | ||
|
|
4f9677ffcf | ||
|
|
a49e1d87ad | ||
|
|
9a65ed2260 | ||
|
|
864d54095f | ||
|
|
b29fdc2a0c | ||
|
|
12f237ff80 | ||
|
|
192c2af7ba | ||
|
|
17bfd38696 | ||
|
|
a7e614ca4c | ||
|
|
e5c6b739c2 | ||
|
|
34169b3581 | ||
|
|
01868e856a | ||
|
|
e301d1962e | ||
|
|
9f6c91987f | ||
|
|
d19023288e | ||
|
|
29236aefe8 | ||
|
|
6ce9afd95d | ||
|
|
39f7575b64 | ||
|
|
954aaa6bdc | ||
|
|
aa589fcbd9 | ||
|
|
9f42b9369f | ||
|
|
143d3fbce2 | ||
|
|
6e531679f4 | ||
|
|
562f22960c | ||
|
|
5388cc1bc6 | ||
|
|
0a14196afb | ||
|
|
7b16637043 | ||
|
|
734c04ebf0 | ||
|
|
4f50571b53 | ||
|
|
52ccab8fc0 | ||
|
|
f5e8d4d5a0 | ||
|
|
51621ba91a | ||
|
|
dba86bc980 | ||
|
|
21f3411692 | ||
|
|
91473c788c | ||
|
|
25f0c26b25 | ||
|
|
9791c9bd8b | ||
|
|
c62609faba | ||
|
|
88decab9be | ||
|
|
d499c3aed8 | ||
|
|
277f3a91f1 | ||
|
|
1818f2b3d9 | ||
|
|
a0826ec9fe | ||
|
|
3c846617cd | ||
|
|
39645102d1 | ||
|
|
3f1d9ccbf8 | ||
|
|
781aeebd2a | ||
|
|
f589b7c189 | ||
|
|
696f356881 | ||
|
|
515f85fe1c | ||
|
|
4d74e6cefa | ||
|
|
3ebb3e2143 | ||
|
|
69b82edd63 | ||
|
|
9d39b9b42c | ||
|
|
e65d92fc6f | ||
|
|
f3c8c7045d | ||
|
|
c9185aaf44 | ||
|
|
05e79bdd0c | ||
|
|
fb6b18faef | ||
|
|
b56adf01e3 | ||
|
|
356e982d30 | ||
|
|
bb4b547574 | ||
|
|
20340c3e4e | ||
|
|
6c53bf7175 | ||
|
|
ff121413da | ||
|
|
c1d760692f | ||
|
|
a7c7993bbf | ||
|
|
0f3156651c | ||
|
|
c8071a3180 | ||
|
|
25994dd3da | ||
|
|
b9e849f17d | ||
|
|
80fbb29ccc | ||
|
|
7b1895ec8a | ||
|
|
aae2fce173 | ||
|
|
451907cc92 | ||
|
|
1b095d12ff | ||
|
|
0518749d51 | ||
|
|
fc06c16dd4 | ||
|
|
33b59adf27 | ||
|
|
70948f8803 | ||
|
|
c2634d45ad | ||
|
|
8ef482a52a | ||
|
|
dcf50c4758 | ||
|
|
742832a850 | ||
|
|
0a4358c3d1 | ||
|
|
369298a83e | ||
|
|
b99c9b277a | ||
|
|
4b6773885c | ||
|
|
d232e433e8 | ||
|
|
848f3fd4d8 | ||
|
|
453ea9b9a1 | ||
|
|
6ee50770cd | ||
|
|
15dc607779 | ||
|
|
32c888c280 | ||
|
|
99a7823e01 | ||
|
|
022f9ff3a5 | ||
|
|
ad86707605 | ||
|
|
289801b608 | ||
|
|
6bb204eb80 | ||
|
|
560702a8f7 | ||
|
|
6752772c1d | ||
|
|
d645cdbaf3 | ||
|
|
3b4d7d568b | ||
|
|
d5d0e72590 | ||
|
|
f1a7de94ba | ||
|
|
acccb9afdd | ||
|
|
f2c56fc839 | ||
|
|
dd6b808e69 | ||
|
|
7a374ca2a5 | ||
|
|
421aba7cd7 | ||
|
|
09b6ea38c5 | ||
|
|
28659f6af5 | ||
|
|
64b4d5d9c2 | ||
|
|
c7a48c50a3 | ||
|
|
b5e5617a41 | ||
|
|
ff4b1b9824 | ||
|
|
86cdcda29a | ||
|
|
5a32ea9b49 | ||
|
|
457af65df6 | ||
|
|
04b337323a | ||
|
|
384753c6ca | ||
|
|
3fe5a47050 | ||
|
|
d1bbf6ba92 | ||
|
|
9f89cc5adc | ||
|
|
fa0efae4d5 | ||
|
|
f2d6a425de | ||
|
|
d071cdf7d4 | ||
|
|
4b21704498 | ||
|
|
9fca4969db | ||
|
|
4370dee79e | ||
|
|
c631659327 | ||
|
|
4df5b7eb2e | ||
|
|
8b2015a97b | ||
|
|
477097c2e4 | ||
|
|
c5b73d7184 | ||
|
|
c7eb713689 | ||
|
|
1bfe2c92ba | ||
|
|
69722ba973 | ||
|
|
140605e660 | ||
|
|
f3547568e4 | ||
|
|
15c6860a49 | ||
|
|
363ef194d8 | ||
|
|
33a52628e6 | ||
|
|
35ab6b7667 | ||
|
|
97ba5b8436 |
372 changed files with 18057 additions and 6390 deletions
3
.github/workflows/deploy-to-hf-spaces.yml
vendored
3
.github/workflows/deploy-to-hf-spaces.yml
vendored
|
|
@ -57,7 +57,8 @@ jobs:
|
|||
git lfs install
|
||||
git lfs track "*.ttf"
|
||||
git lfs track "*.jpg"
|
||||
rm demo.gif
|
||||
rm demo.png
|
||||
rm banner.png
|
||||
git add .
|
||||
git commit -m "GitHub deploy: ${{ github.sha }}"
|
||||
git push --force https://open-webui:${HF_TOKEN}@huggingface.co/spaces/open-webui/open-webui main
|
||||
|
|
|
|||
|
|
@ -3,8 +3,6 @@ pnpm-lock.yaml
|
|||
package-lock.json
|
||||
yarn.lock
|
||||
|
||||
kubernetes/
|
||||
|
||||
# Copy of .gitignore
|
||||
.DS_Store
|
||||
node_modules
|
||||
|
|
|
|||
194
CHANGELOG.md
194
CHANGELOG.md
|
|
@ -5,7 +5,199 @@ All notable changes to this project will be documented in this file.
|
|||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
### [0.6.39] - 2025-11-25
|
||||
## [0.6.43] - 2025-12-22
|
||||
|
||||
### Fixed
|
||||
|
||||
- 🐍 **Python dependency installation issues** were resolved by correcting pip dependency handling, preventing installation failures in certain environments and improving setup reliability. [Commit](https://github.com/open-webui/open-webui/commit/5c5f87a)
|
||||
- 🎙️ **Speech-to-Text default content type handling** was fixed and refactored to ensure correct MIME type usage, improving compatibility across STT providers and preventing transcription errors caused by incorrect defaults. [Commit](https://github.com/open-webui/open-webui/commit/4ab917c)
|
||||
- 🖼️ **Temporary chat image handling** was fixed and refactored, ensuring images generated or edited in temporary chats are correctly processed, stored, and displayed without inconsistencies or missing references. [Commit](https://github.com/open-webui/open-webui/commit/423983f)
|
||||
- 🎨 **Image action button fixed**, restoring the ability to trigger image generation, editing, and related image actions from the chat UI. [Commit](https://github.com/open-webui/open-webui/commit/def8a00)
|
||||
|
||||
## [0.6.42] - 2025-12-21
|
||||
|
||||
### Added
|
||||
|
||||
- 📚 Knowledge base file management was overhauled with server-side pagination loading 30 files at a time instead of loading entire collections at once, dramatically improving performance and responsiveness for large knowledge bases with hundreds or thousands of files, reducing initial load times and memory usage while adding server-side search and filtering, view options for files added by the user versus shared files, customizable sorting by name or date, and file authorship tracking with upload timestamps. [Commit](https://github.com/open-webui/open-webui/commit/94a8439105f30203ea9d729787c9c5978f5c22a2)
|
||||
- ✨ Knowledge base file management was enhanced with automatic list refresh after file operations ensuring immediate UI updates, improved permission validation at the model layer, and automatic channel-file association for files uploaded with channel metadata. [Commit](https://github.com/open-webui/open-webui/commit/c15201620d03a9b60b800a34d8dc3426722c5b8b)
|
||||
- 🔎 Knowledge command in chat input now uses server-side search for massive performance increases when selecting knowledge bases and files. [Commit](https://github.com/open-webui/open-webui/commit/0addc1ea461d7b4eee8fe0ca2fedd615b3988b0e)
|
||||
- 🗂️ Knowledge workspace listing now uses server-side pagination loading 30 collections at a time with new search endpoints supporting query filtering and view options for created versus shared collections. [Commit](https://github.com/open-webui/open-webui/commit/ceae3d48e603f53313d5483abe94099e20e914e8)
|
||||
- 📖 Knowledge workspace now displays all collections with read access including shared read-only collections, enabling users to discover and explore knowledge bases they don't own while maintaining proper access controls through visual "Read Only" badges and automatically disabled editing controls for name, description, file uploads, content editing, and deletion operations. [Commit](https://github.com/open-webui/open-webui/commit/693636d971d0e8398fa0c9ec3897686750007af5)
|
||||
- 📁 Bulk website and YouTube video attachment now supports adding multiple URLs at once (newline-separated) with automatic YouTube detection and transcript retrieval, processed sequentially to prevent resource strain, and both websites and videos can now be added directly to knowledge bases through the workspace UI. [Commit](https://github.com/open-webui/open-webui/commit/7746e9f4b831f09953ad2b659b96e0fd52911031), [#6202](https://github.com/open-webui/open-webui/issues/6202), [#19587](https://github.com/open-webui/open-webui/pull/19587), [#8231](https://github.com/open-webui/open-webui/pull/8231)
|
||||
- 🪟 Sidebar width is now resizable on desktop devices with persistent storage in localStorage, enforcing minimum and maximum width constraints (220px to 480px) while all layout components now reference the dynamic sidebar width via CSS variables for consistent responsive behavior. [Commit](https://github.com/open-webui/open-webui/commit/b364cf43d3e8fd3557f65f17bc285bfaca5ed368)
|
||||
- 📝 Notes feature now supports server-side search and filtering with view options for notes created by the user versus notes shared with them, customizable sorting by name or date in both list and grid view modes within a redesigned interface featuring consolidated note management controls in a unified header, group-based permission sharing with read, write, and read-only access control displaying note authorship and sharing status for better collaboration, and paginated infinite scroll for improved performance with large note collections. [Commit](https://github.com/open-webui/open-webui/commit/9b24cddef6c4862bd899eb8d6332cafff54e871d)
|
||||
- 👁️ Notes now support read-only access permissions, allowing users to share notes for viewing without granting edit rights, with the editor automatically becoming non-editable and appropriate UI indicators when read-only access is detected. [Commit](https://github.com/open-webui/open-webui/commit/4363df175d50e0f9729381ac2ba9b37a3c3a966d)
|
||||
- 📄 Notes can now be created directly from the chat input field, allowing users to save drafted messages or content as notes without navigation or retyping. [Commit](https://github.com/open-webui/open-webui/commit/00c2b6ca405d617e3d7520953a00a36c19c790ec)
|
||||
- 🪟 Sidebar folders, channels, and pinned models sections now automatically expand when creating new items or pinning models, providing immediate visual feedback for user actions. [Commit](https://github.com/open-webui/open-webui/commit/f826d3ed75213a0a1b31b50d030bfb1d5e91d199), [#19929](https://github.com/open-webui/open-webui/pull/19929)
|
||||
- 📋 Chat file associations are now properly tracked in the database through a new "chat_file" table, enabling accurate file management across chats and ensuring proper cleanup of files when chats are deleted, while improving database consistency in multi-node deployments. [Commit](https://github.com/open-webui/open-webui/commit/f1bf4f20c53e6493f0eb6fa2f12cb84c2d22da52)
|
||||
- 🖼️ User-uploaded images are now automatically converted from base64 to actual file storage on the server, eliminating large inline base64 strings from being stored in chat history and reducing message payload sizes while enabling better image management and sharing across multiple chats. [Commit](https://github.com/open-webui/open-webui/commit/f1bf4f20c53e6493f0eb6fa2f12cb84c2d22da52)
|
||||
- 📸 Shared chats with generated or edited images now correctly display images when accessed by other users by properly linking generated images to their chat and message through the chat_file table, ensuring images remain accessible in shared chat links. [Commit](https://github.com/open-webui/open-webui/commit/446cc0ac6063402a743e949f50612376ed5a8437), [#19393](https://github.com/open-webui/open-webui/issues/19393)
|
||||
- 📊 File viewer modal was significantly enhanced with native-like viewers for Excel/CSV spreadsheets rendering as interactive scrollable tables with multi-sheet navigation support, Markdown documents displaying with full typography including headers, lists, links, and tables, and source code files showing syntax highlighting, all accessible through a tabbed interface defaulting to raw text view. [#20035](https://github.com/open-webui/open-webui/pull/20035), [#2867](https://github.com/open-webui/open-webui/issues/2867)
|
||||
- 📏 Chat input now displays an expand button in the top-right corner when messages exceed two lines, providing optional access to a full-screen editor for composing longer messages with enhanced workspace and visibility while temporarily disabling the main input to prevent editing conflicts. [Commit](https://github.com/open-webui/open-webui/commit/205c7111200c22da42e9b5fe1e676aec9cca6daa)
|
||||
- 💬 Channel message data lazy loading was implemented, deferring attachment and file metadata retrieval until needed to improve initial message list load performance. [Commit](https://github.com/open-webui/open-webui/commit/54b7ec56d6bcd2d79addc1694b757dab18cf18c5)
|
||||
- 🖼️ Channel image upload handling was optimized to process and store compressed images directly as files rather than inline data, improving memory efficiency and message load times. [Commit](https://github.com/open-webui/open-webui/commit/22f1b764a7ea1add0a896906a9ef00b4b6743adc)
|
||||
- 🎥 Video file playback support was added to channel messages, enabling inline video viewing with native player controls. [Commit](https://github.com/open-webui/open-webui/commit/7b126b23d50a0bd36a350fe09dc1dbe3df105318)
|
||||
- 🔐 LDAP authentication now supports user entries with multiple username attributes, correctly handling cases where the username field contains a list of values. [Commit](https://github.com/open-webui/open-webui/commit/379f888c9dc6dce21c3ef7a1fc455258aff993dc), [#19878](https://github.com/open-webui/open-webui/issues/19878)
|
||||
- 👨👩👧👦 The "ENABLE_PUBLIC_ACTIVE_USERS_COUNT" environment variable now allows restricting active user count visibility to administrators, reducing backend load and addressing privacy concerns in large deployments. [#20027](https://github.com/open-webui/open-webui/pull/20027), [#13026](https://github.com/open-webui/open-webui/issues/13026)
|
||||
- 🚀 Models page search input performance was optimized with a 300ms debounce to reduce server load and improve responsiveness. [#19832](https://github.com/open-webui/open-webui/pull/19832)
|
||||
- 💨 Frontend performance was optimized by preventing unnecessary API calls for API Keys and Channels features when they are disabled in admin settings, reducing backend noise and improving overall system efficiency. [#20043](https://github.com/open-webui/open-webui/pull/20043), [#19967](https://github.com/open-webui/open-webui/issues/19967)
|
||||
- 📎 Channel file association tracking was implemented, automatically linking uploaded files to their respective channels with a dedicated association table enabling better organization and future file management features within channels. [Commit](https://github.com/open-webui/open-webui/commit/2bccf8350d0915f69b8020934bb179c52e81b7b5)
|
||||
- 👥 User profile previews now display group membership information for easier identification of user roles and permissions. [Commit](https://github.com/open-webui/open-webui/commit/2b1a29d44bde9fbc20ff9f0a5ded1ce8ded9d90d)
|
||||
- 🌍 The "SEARXNG_LANGUAGE" environment variable now allows configuring search language for SearXNG queries, replacing the hardcoded "en-US" default with a configurable setting that defaults to "all". [#19909](https://github.com/open-webui/open-webui/pull/19909)
|
||||
- ⏳ The "MINERU_API_TIMEOUT" environment variable now allows configuring request timeouts for MinerU document processing operations. [#20016](https://github.com/open-webui/open-webui/pull/20016), [#18495](https://github.com/open-webui/open-webui/issues/18495)
|
||||
- 🔧 The "RAG_EXTERNAL_RERANKER_TIMEOUT" environment variable now allows configuring request timeouts for external reranker operations. [#20049](https://github.com/open-webui/open-webui/pull/20049), [#19900](https://github.com/open-webui/open-webui/issues/19900)
|
||||
- 🎨 OpenAI GPT-IMAGE 1.5 model support was added for image generation and editing with automatic image size capabilities. [Commit](https://github.com/open-webui/open-webui/commit/4c2e5c93e9287479f56f780708656136849ccaee)
|
||||
- 🔑 The "OAUTH_AUDIENCE" environment variable now allows OAuth providers to specify audience parameters for JWT access token generation. [#19768](https://github.com/open-webui/open-webui/pull/19768)
|
||||
- ⏰ The "REDIS_SOCKET_CONNECT_TIMEOUT" environment variable now allows configuring socket connection timeouts for Redis and Sentinel connections, addressing potential failover and responsiveness issues in distributed deployments. [#19799](https://github.com/open-webui/open-webui/pull/19799), [Docs:#882](https://github.com/open-webui/docs/pull/882)
|
||||
- ⏱️ The "WEB_LOADER_TIMEOUT" environment variable now allows configuring request timeouts for SafeWebBaseLoader operations. [#19804](https://github.com/open-webui/open-webui/pull/19804), [#19734](https://github.com/open-webui/open-webui/issues/19734)
|
||||
- 🚀 Models API endpoint performance was optimized through batched model loading, eliminating N+1 queries and significantly reducing response times when filtering models by user permissions. [Commit](https://github.com/open-webui/open-webui/commit/0dd2cfe1f273fbacdbe90300a97c021f2e678656)
|
||||
- 🔀 Custom model fallback handling was added, allowing workspace-created custom models to automatically fall back to the default chat model when their configured base model is not found; set "ENABLE_CUSTOM_MODEL_FALLBACK" to true to enable, preventing workflow disruption when base models are removed or renamed, while ensuring other requests remain unaffected. [Commit](https://github.com/open-webui/open-webui/commit/b35aeb8f46e0e278c6f4538382c2b6838e24cc5a), [#19985](https://github.com/open-webui/open-webui/pull/19985)
|
||||
- 📡 A new /feedbacks/all/ids API endpoint was added to return only feedback IDs without metadata, significantly improving performance for external integrations working with large feedback collections. [Commit](https://github.com/open-webui/open-webui/commit/53c1ca64b7205d85f6de06bd69e3e265d15546b8)
|
||||
- 📈 An experimental chat usage statistics endpoint (GET /api/v1/chats/stats/usage) was added with pagination support (50 chats per page) and comprehensive per-chat analytics including model usage counts, user and assistant message breakdowns, average response times calculated from message timestamps, average content lengths, and last activity timestamps; this endpoint remains experimental and not suitable for production use as it performs intensive calculations by processing entire message histories for each chat without caching. [Commit](https://github.com/open-webui/open-webui/commit/a7993f6f4e4591cd2aaa4718ece9e5623557d019)
|
||||
- 🔄 Various improvements were implemented across the frontend and backend to enhance performance, stability, and security.
|
||||
- 🌐 Translations for German, Danish, Finnish, Korean, Portuguese (Brazil), Simplified Chinese, Traditional Chinese, Catalan, and Spanish were enhanced and expanded.
|
||||
|
||||
### Fixed
|
||||
|
||||
- ⚡ External reranker operations were optimized to prevent event loop blocking by offloading synchronous HTTP requests to a thread pool using asyncio.to_thread(), eliminating application freezes during RAG reranking queries. [#20049](https://github.com/open-webui/open-webui/pull/20049), [#19900](https://github.com/open-webui/open-webui/issues/19900)
|
||||
- 💭 Text loss in the explanation feature when using the "CHAT_STREAM_RESPONSE_CHUNK_MAX_BUFFER_SIZE" environment variable was resolved by correcting newline handling in streaming responses. [#19829](https://github.com/open-webui/open-webui/pull/19829)
|
||||
- 📚 Knowledge base batch file addition failures caused by Pydantic validation errors are now prevented by making the meta field optional in file metadata responses, allowing files without metadata to be processed correctly. [#20022](https://github.com/open-webui/open-webui/pull/20022), [#14220](https://github.com/open-webui/open-webui/issues/14220)
|
||||
- 🗄️ PostgreSQL null byte insertion failures when attaching web pages or processing embedded content are now prevented by consolidating text sanitization logic across chat messages, web search results, and knowledge base documents, removing null bytes and invalid UTF-8 surrogates before database insertion. [#20072](https://github.com/open-webui/open-webui/pull/20072), [#19867](https://github.com/open-webui/open-webui/issues/19867), [#18201](https://github.com/open-webui/open-webui/issues/18201), [#15616](https://github.com/open-webui/open-webui/issues/15616)
|
||||
- 🎫 MCP OAuth 2.1 token exchange failures are now fixed by removing duplicate credential passing that caused "ID1,ID1" concatenation and 401 errors from the token endpoint. [#20076](https://github.com/open-webui/open-webui/pull/20076), [#19823](https://github.com/open-webui/open-webui/issues/19823)
|
||||
- 📝 Notes "Improve" action now works correctly after the streaming API change in v0.6.41 by ensuring uploaded files are fully retrieved with complete metadata before processing, restoring note improvement and summarization functionality. [Commit](https://github.com/open-webui/open-webui/commit/a3458f492c53a3b00405f59fbe1ea953fe364f18), [#20078](https://github.com/open-webui/open-webui/discussions/20078)
|
||||
- 🔑 MCP OAuth 2.1 tool servers now work correctly in multi-node deployments through lazy-loading of OAuth clients from Redis-synced configuration, eliminating 404 errors when load balancers route requests to nodes that didn't process the original config update. [#20076](https://github.com/open-webui/open-webui/pull/20076), [#19902](https://github.com/open-webui/open-webui/pull/19902), [#19901](https://github.com/open-webui/open-webui/issues/19901)
|
||||
- 🧩 Chat loading failures when channels permissions were disabled are now prevented through graceful error handling. [Commit](https://github.com/open-webui/open-webui/commit/5c2df97f04cce5cb7087d288f816f91a739688c1)
|
||||
- 🔍 Search bar freezing and crashing issues in Models, Chat, and Archived Chat pages caused by excessively long queries exceeding server URL limits were resolved by truncating queries to 500 characters, and knowledge base layout shifting with long names was fixed by adjusting flex container properties. [#19832](https://github.com/open-webui/open-webui/pull/19832)
|
||||
- 🎛️ Rate limiting errors (HTTP 429) with Brave Search free tier when generating multiple queries are now prevented through asyncio.Semaphore-based concurrency control applied globally to all search engines. [#20070](https://github.com/open-webui/open-webui/pull/20070), [#20003](https://github.com/open-webui/open-webui/issues/20003), [#14107](https://github.com/open-webui/open-webui/issues/14107), [#15134](https://github.com/open-webui/open-webui/issues/15134)
|
||||
- 💥 UI crashes and white screen errors caused by null chat lists during loading or network failures were prevented by adding null safety checks to chat iteration in folder placeholders and archived chat modals. [#19898](https://github.com/open-webui/open-webui/pull/19898)
|
||||
- 🧩 Chat overview tab crashes caused by undefined model references were resolved by adding proper null checks when accessing deleted or ejected models. [#19935](https://github.com/open-webui/open-webui/pull/19935)
|
||||
- 🔄 MultiResponseMessages component crashes when navigating chat history after removing or changing selected models are now prevented through proper component re-initialization. [Commit](https://github.com/open-webui/open-webui/commit/870e29e3738da968c396b70532f365a3c2f71995), [#18599](https://github.com/open-webui/open-webui/issues/18599)
|
||||
- 🚫 Channel API endpoint access is now correctly blocked when channels are globally disabled, preventing users with channel permissions from accessing channel data via API requests when the feature is turned off in admin settings. [#19957](https://github.com/open-webui/open-webui/pull/19957), [#19914](https://github.com/open-webui/open-webui/issues/19914)
|
||||
- 👤 User list popup display in the admin panel was fixed to correctly track user identity when sorting or filtering changes the list order, preventing popups from showing incorrect user information. [Commit](https://github.com/open-webui/open-webui/commit/ae47101dc6aef2c7d8ae0d843985341fff820057), [#20046](https://github.com/open-webui/open-webui/issues/20046)
|
||||
- 👥 User selection in the "Edit User Group" modal now preserves pagination position, allowing administrators to select multiple users across pages without resetting to page 1. [#19959](https://github.com/open-webui/open-webui/pull/19959)
|
||||
- 📸 Model avatar images now update immediately in the admin models list through proper Cache-Control headers, eliminating the need for manual cache clearing. [#19959](https://github.com/open-webui/open-webui/pull/19959)
|
||||
- 🔒 Temporary chat permission enforcement now correctly prevents users from enabling the feature through personal settings when disabled in default or group permissions. [#19785](https://github.com/open-webui/open-webui/issues/19785)
|
||||
- 🎨 Image editing with reference images now correctly uses both previously generated images and newly uploaded reference images. [Commit](https://github.com/open-webui/open-webui/commit/bcd50ed8f1b7387fd700538ae0d74fc72f3c53d0)
|
||||
- 🧠 Image generation and editing operations are now explicitly injected into system context, improving LLM comprehension even for weaker models so they reliably acknowledge operations instead of incorrectly claiming they cannot generate images. [Commit](https://github.com/open-webui/open-webui/commit/28b2fcab0cd036dbe646a66fe81890f288c77121)
|
||||
- 📑 Source citation rendering errors when citation syntax appeared in user messages or contexts without source data were resolved. [Commit](https://github.com/open-webui/open-webui/commit/3c8f1cf8e58d52e86375634b0381374298b1b4f3)
|
||||
- 📄 DOCX file parsing now works correctly in temporary chats through client-side text extraction, preventing raw data from being displayed. [Commit](https://github.com/open-webui/open-webui/commit/6993b0b40b10af8cdbe6626702cc94080fff9e22)
|
||||
- 🔧 Pipeline settings save failures when valve properties contain null values are now handled correctly. [#19791](https://github.com/open-webui/open-webui/pull/19791)
|
||||
- ⚙️ Model usage settings are now correctly preserved when switching between models instead of being unexpectedly cleared or reset. [#19868](https://github.com/open-webui/open-webui/pull/19868), [#19549](https://github.com/open-webui/open-webui/issues/19549)
|
||||
- 🛡️ Invalid PASSWORD_VALIDATION_REGEX_PATTERN configurations no longer cause startup warnings, with automatic fallback to the default pattern when regex compilation fails. [#20058](https://github.com/open-webui/open-webui/pull/20058)
|
||||
- 🎯 The DefaultFiltersSelector component in model settings now correctly displays when only global toggleable filters are present, enabling per-model default configuration. [#20066](https://github.com/open-webui/open-webui/pull/20066)
|
||||
- 🎤 Audio file upload failures caused by MIME type matching issues with spacing variations and codec parameters were resolved by implementing proper MIME type parsing. [#17771](https://github.com/open-webui/open-webui/pull/17771), [#17761](https://github.com/open-webui/open-webui/issues/17761)
|
||||
- ⌨️ Regenerate response keyboard shortcut now only activates when chat input is selected, preventing unintended regeneration when modals are open or other UI elements are focused. [#19875](https://github.com/open-webui/open-webui/pull/19875)
|
||||
- 📋 Log truncation issues in Docker deployments during application crashes were resolved by disabling Python stdio buffering, ensuring complete diagnostic output is captured. [#19844](https://github.com/open-webui/open-webui/issues/19844)
|
||||
- 🔴 Redis cluster compatibility issues with disabled KEYS command were resolved by replacing blocking KEYS operations with production-safe SCAN iterations. [#19871](https://github.com/open-webui/open-webui/pull/19871), [#15834](https://github.com/open-webui/open-webui/issues/15834)
|
||||
- 🔤 File attachment container layout issues when using RTL languages were resolved by applying chat direction settings to file containers across all message types. [#19891](https://github.com/open-webui/open-webui/pull/19891), [#19742](https://github.com/open-webui/open-webui/issues/19742)
|
||||
- 🔃 Ollama model list now automatically refreshes after model deletion, preventing deleted models from persisting in the UI and being inadvertently re-downloaded during subsequent pull operations. [#19912](https://github.com/open-webui/open-webui/pull/19912)
|
||||
- 🌐 Ollama Cloud web search now correctly applies domain filtering to search results. [Commit](https://github.com/open-webui/open-webui/commit/d4bd938a77c22409a1643c058b937a06e07baca9)
|
||||
- 📜 Tool specification serialization now preserves non-ASCII characters including Chinese text, improving LLM comprehension and tool selection accuracy by avoiding Unicode escape sequences. [#19942](https://github.com/open-webui/open-webui/pull/19942)
|
||||
- 🛟 Model editor stability was improved with null safety checks for tools, functions, and file input operations, preventing crashes when stores are undefined or file objects are invalid. [#19939](https://github.com/open-webui/open-webui/pull/19939)
|
||||
- 🗣️ MoA completion handling stability was improved with null safety checks for response objects, boolean casting for settings, and proper timeout type definitions. [#19921](https://github.com/open-webui/open-webui/pull/19921)
|
||||
- 🎛️ Chat functionality failures caused by empty logit_bias parameter values are now prevented by properly handling empty strings in the parameter parsing middleware. [#19982](https://github.com/open-webui/open-webui/issues/19982)
|
||||
- 🔏 Administrators can now delete read-only knowledge bases from deleted users, resolving permission issues that previously prevented cleanup of orphaned read-only content. [Commit](https://github.com/open-webui/open-webui/commit/59d6eb2badf46f9c2b1e879484ac33432915b575)
|
||||
- 💾 Cloned prompts and tools now correctly preserve their access control settings instead of being reset to null, preventing unintended visibility changes when duplicating private or restricted items. [#19960](https://github.com/open-webui/open-webui/pull/19960), [#19360](https://github.com/open-webui/open-webui/issues/19360)
|
||||
- 🎚️ Text scale adjustment buttons in Interface Settings were fixed to correctly increment and decrement the scale value. [#19699](https://github.com/open-webui/open-webui/pull/19699)
|
||||
- 🎭 Group channel invite button text visibility in light theme was corrected to display properly against dark backgrounds. [#19828](https://github.com/open-webui/open-webui/issues/19828)
|
||||
- 📁 The move button is now hidden when no folders exist, preventing display of non-functional controls. [#19705](https://github.com/open-webui/open-webui/pull/19705)
|
||||
- 📦 Qdrant client dependency was updated to resolve startup version incompatibility warnings. [#19757](https://github.com/open-webui/open-webui/pull/19757)
|
||||
- 🧮 The "ENABLE_ASYNC_EMBEDDING" environment variable is now correctly applied to embedding operations when configured exclusively via environment variables. [#19748](https://github.com/open-webui/open-webui/pull/19748)
|
||||
- 🌄 The "COMFYUI_WORKFLOW_NODES" and "IMAGES_EDIT_COMFYUI_WORKFLOW_NODES" environment variables are now correctly loaded and parsed as JSON lists, and the configuration key name was corrected from "COMFYUI_WORKFLOW" to "COMFYUI_WORKFLOW_NODES". [#19918](https://github.com/open-webui/open-webui/pull/19918), [#19886](https://github.com/open-webui/open-webui/issues/19886)
|
||||
- 💫 Channel name length is now limited to 128 characters with validation to prevent display issues caused by excessively long names. [Commit](https://github.com/open-webui/open-webui/commit/f509f5542dde384d34402f6df763f49a06bea109)
|
||||
- 🔐 Invalid PASSWORD_VALIDATION_REGEX_PATTERN configurations no longer cause startup warnings, with automatic fallback to the default pattern when regex compilation fails. [#20058](https://github.com/open-webui/open-webui/pull/20058)
|
||||
- 🔎 Bocha search with filter list functionality now works correctly by returning results as a list instead of a dictionary wrapper, ensuring compatibility with result filtering operations. [Commit](https://github.com/open-webui/open-webui/commit/b5bd8704fe1672da839bb3be6210d7cb494797ce), [#19733](https://github.com/open-webui/open-webui/issues/19733)
|
||||
|
||||
### Changed
|
||||
|
||||
- ⚠️ This release includes database schema changes; multi-worker, multi-server, or load-balanced deployments must update all instances simultaneously rather than performing rolling updates, as running mixed versions will cause application failures due to schema incompatibility between old and new instances.
|
||||
- 📡 WEB_SEARCH_CONCURRENT_REQUESTS default changed from 10 to 0 (unlimited) — This setting now applies to all search engines instead of only DuckDuckGo; previously users were implicitly limited to 10 concurrent queries, but now have unlimited parallel requests by default; set to 1 for sequential execution if using rate-limited APIs like Brave free tier. [#20070](https://github.com/open-webui/open-webui/pull/20070)
|
||||
- 💾 SQLCipher absolute path handling was fixed to properly support absolute database paths (e.g., "/app/data.db") instead of incorrectly stripping leading slashes and converting them to relative paths; this restores functionality for Docker volume mounts and explicit absolute path configurations while maintaining backward compatibility with relative paths. [#20074](https://github.com/open-webui/open-webui/pull/20074)
|
||||
- 🔌 Knowledge base file listing API was redesigned with paginated responses and new filtering parameters; the GET /knowledge/{id}/files endpoint now returns paginated results with user attribution instead of embedding all files in the knowledge object, which may require updates to custom integrations or scripts accessing knowledge base data programmatically. [Commit](https://github.com/open-webui/open-webui/commit/94a8439105f30203ea9d729787c9c5978f5c22a2)
|
||||
- 🗑️ Legacy knowledge base support for deprecated document collections and tag-based collections was removed; users with pre-knowledge base documents must migrate to the current knowledge base system as legacy items will no longer appear in selectors or command menus. [Commit](https://github.com/open-webui/open-webui/commit/a934dc997ed67a036dd7975e380f8036c447d3ed)
|
||||
- 🔨 Source-level log environment variables (AUDIO_LOG_LEVEL, CONFIG_LOG_LEVEL, MODELS_LOG_LEVEL, etc.) were removed as they provided limited configuration options and added significant complexity across 100+ files; the GLOBAL_LOG_LEVEL environment variable, which already took precedence over source-level settings, now serves as the exclusive logging configuration method. [#20045](https://github.com/open-webui/open-webui/pull/20045)
|
||||
- 🐍 LangChain was upgraded to version 1.2.0, representing a major dependency update and significant progress toward Python 3.13 compatibility while improving RAG pipeline functionality for document loading and retrieval operations. [#19991](https://github.com/open-webui/open-webui/pull/19991)
|
||||
|
||||
## [0.6.41] - 2025-12-02
|
||||
|
||||
### Added
|
||||
|
||||
- 🚦 Sign-in rate limiting was implemented to protect against brute force attacks, limiting login attempts to 15 per 3-minute window per email address using Redis with automatic fallback to in-memory storage when Redis is unavailable. [Commit](https://github.com/open-webui/open-webui/commit/7b166370432414ce8f186747fb098e0c70fb2d6b)
|
||||
- 📂 Administrators can now globally disable the folders feature and control user-level folder permissions through the admin panel, enabling minimalist interface configurations for deployments that don't require workspace organization features. [#19529](https://github.com/open-webui/open-webui/pull/19529), [#19210](https://github.com/open-webui/open-webui/discussions/19210), [#18459](https://github.com/open-webui/open-webui/discussions/18459), [#18299](https://github.com/open-webui/open-webui/discussions/18299)
|
||||
- 👥 Group channels were introduced as a new channel type enabling membership-based collaboration spaces where users explicitly join as members rather than accessing through permissions, with support for public or private visibility, automatic member inclusion from specified user groups, member role tracking with invitation metadata, and post-creation member management allowing channel managers to add or remove members through the channel info modal. [Commit](https://github.com/open-webui/open-webui/commit/f589b7c1895a6a77166c047891acfa21bc0936c4), [Commit](https://github.com/open-webui/open-webui/commit/3f1d9ccbf8443a2fa5278f36202bad930a216680)
|
||||
- 💬 Direct Message channels were introduced with a dedicated channel type selector and multi-user member selection interface, enabling private conversations between specific users without requiring full channel visibility. [Commit](https://github.com/open-webui/open-webui/commit/64b4d5d9c280b926746584aaf92b447d09deb386)
|
||||
- 📨 Direct Message channels now support a complete user-to-user messaging system with member-based access control, automatic deduplication for one-on-one conversations, optional channel naming, and distinct visual presentation using participant avatars instead of channel icons. [Commit](https://github.com/open-webui/open-webui/commit/acccb9afdd557274d6296c70258bb897bbb6652f)
|
||||
- 🙈 Users can now hide Direct Message channels from their sidebar while preserving message history, with automatic reactivation when new messages arrive from other participants, providing a cleaner interface for managing active conversations. [Commit](https://github.com/open-webui/open-webui/commit/acccb9afdd557274d6296c70258bb897bbb6652f)
|
||||
- ☑️ A comprehensive user selection component was added to the channel creation modal, featuring search functionality, sortable user lists, pagination support, and multi-select checkboxes for building Direct Message participant lists. [Commit](https://github.com/open-webui/open-webui/commit/acccb9afdd557274d6296c70258bb897bbb6652f)
|
||||
- 🔴 Channel unread message count tracking was implemented with visual badge indicators in the sidebar, automatically updating counts in real-time and marking messages as read when users view channels, with join/leave functionality to manage membership status. [Commit](https://github.com/open-webui/open-webui/commit/64b4d5d9c280b926746584aaf92b447d09deb386)
|
||||
- 📌 Message pinning functionality was added to channels, allowing users to pin important messages for easy reference with visual highlighting, a dedicated pinned messages modal accessible from the navbar, and complete backend support for tracking pinned status, pin timestamp, and the user who pinned each message. [Commit](https://github.com/open-webui/open-webui/commit/64b4d5d9c280b926746584aaf92b447d09deb386), [Commit](https://github.com/open-webui/open-webui/commit/aae2fce17355419d9c29f8100409108037895201)
|
||||
- 🟢 Direct Message channels now display an active status indicator for one-on-one conversations, showing a green dot when the other participant is currently online or a gray dot when offline. [Commit](https://github.com/open-webui/open-webui/commit/4b6773885cd7527c5a56b963781dac5e95105eec), [Commit](https://github.com/open-webui/open-webui/commit/39645102d14f34e71b34e5ddce0625790be33f6f)
|
||||
- 🆔 Users can now start Direct Message conversations directly from user profile previews by clicking the "Message" button, enabling quick access to private messaging without navigating away from the current channel. [Commit](https://github.com/open-webui/open-webui/commit/a0826ec9fedb56320532616d568fa59dda831d4e)
|
||||
- ⚡ Channel messages now appear instantly when sent using optimistic UI rendering, displaying with a pending state while the server confirms delivery, providing a more responsive messaging experience. [Commit](https://github.com/open-webui/open-webui/commit/25994dd3da90600401f53596d4e4fb067c1b8eaa)
|
||||
- 👍 Channel message reactions now display the names of users who reacted when hovering over the emoji, showing up to three names with a count for additional reactors. [Commit](https://github.com/open-webui/open-webui/commit/05e79bdd0c7af70b631e958924e3656db1013b80)
|
||||
- 🛠️ Channel creators can now edit and delete their own group and DM channels without requiring administrator privileges, enabling users to manage the channels they create independently. [Commit](https://github.com/open-webui/open-webui/commit/f589b7c1895a6a77166c047891acfa21bc0936c4)
|
||||
- 🔌 A new API endpoint was added to directly get or create a Direct Message channel with a specific user by their ID, streamlining programmatic DM channel creation for integrations and frontend workflows. [Commit](https://github.com/open-webui/open-webui/commit/f589b7c1895a6a77166c047891acfa21bc0936c4)
|
||||
- 💭 Users can now set a custom status with an emoji and message that displays in profile previews, the sidebar user menu, and Direct Message channel items in the sidebar, with the ability to clear status at any time, providing visibility into availability or current focus similar to team communication platforms. [Commit](https://github.com/open-webui/open-webui/commit/51621ba91a982e52da168ce823abffd11ad3e4fa), [Commit](https://github.com/open-webui/open-webui/commit/f5e8d4d5a004115489c35725408b057e24dfe318)
|
||||
- 📤 A group export API endpoint was added, enabling administrators to export complete group data including member lists for backup and migration purposes. [Commit](https://github.com/open-webui/open-webui/commit/09b6ea38c579659f8ca43ae5ea3746df3ac561ad)
|
||||
- 📡 A new API endpoint was added to retrieve all users belonging to a specific group, enabling programmatic access to group membership information for administrative workflows. [Commit](https://github.com/open-webui/open-webui/commit/01868e856a10f474f74fbd1b4425dafdf949222f)
|
||||
- 👁️ The admin user list now displays an active status indicator next to each user, showing a visual green dot for users who have been active within the last three minutes. [Commit](https://github.com/open-webui/open-webui/commit/1b095d12ff2465b83afa94af89ded9593f8a8655)
|
||||
- 🔑 The admin user edit modal now displays OAuth identity information with a per-provider breakdown, showing each linked identity provider and its associated subject identifier separately. [#19573](https://github.com/open-webui/open-webui/pull/19573)
|
||||
- 🧩 OAuth role claim parsing now respects the "OAUTH_ROLES_SEPARATOR" configuration, enabling proper parsing of roles returned as comma-separated strings and providing consistent behavior with group claim handling. [#19514](https://github.com/open-webui/open-webui/pull/19514)
|
||||
- 🎛️ Channel feature access can now be controlled through both the "USER_PERMISSIONS_FEATURES_CHANNELS" environment variable and group permission toggles in the admin panel, allowing administrators to restrict channel functionality for specific users or groups while defaulting to enabled for all users. [Commit](https://github.com/open-webui/open-webui/commit/f589b7c1895a6a77166c047891acfa21bc0936c4)
|
||||
- 🎨 The model editor interface was refined with access control settings moved to a dedicated modal, group member counts now displayed when configuring permissions, reorganized layout with improved visual hierarchy, and redesigned prompt suggestions cards with tooltips for field guidance. [Commit](https://github.com/open-webui/open-webui/commit/e65d92fc6f49da5ca059e1c65a729e7973354b99), [Commit](https://github.com/open-webui/open-webui/commit/9d39b9b42c653ee2acf2674b2df343ecbceb4954)
|
||||
- 🏗️ Knowledge base file management was rebuilt with a dedicated database table replacing the previous JSON array storage, enabling pagination support for large knowledge bases, significantly faster file listing performance, and more reliable file-knowledge base relationship tracking. [Commit](https://github.com/open-webui/open-webui/commit/d19023288e2ca40f86e2dc3fd9f230540f3e70d7)
|
||||
- ☁️ Azure Document Intelligence model selection was added, allowing administrators to specify which model to use for document processing via the "DOCUMENT_INTELLIGENCE_MODEL" environment variable or admin UI setting, with "prebuilt-layout" as the default. [#19692](https://github.com/open-webui/open-webui/pull/19692), [Docs:#872](https://github.com/open-webui/docs/pull/872)
|
||||
- 🚀 Milvus multitenancy vector database performance was improved by removing manual flush calls after upsert operations, eliminating rate limit errors and reducing load on etcd and MinIO/S3 storage by allowing Milvus to manage segment persistence automatically via its WAL and auto-flush policies. [#19680](https://github.com/open-webui/open-webui/pull/19680)
|
||||
- ✨ Various improvements were implemented across the frontend and backend to enhance performance, stability, and security.
|
||||
- 🌍 Translations for German, French, Portuguese (Brazil), Catalan, Simplified Chinese, and Traditional Chinese were enhanced and expanded.
|
||||
|
||||
### Fixed
|
||||
|
||||
- 🔄 Tool call response token duplication was fixed by removing redundant message history additions in non-native function calling mode, resolving an issue where tool results were included twice in the context and causing 2x token consumption. [#19656](https://github.com/open-webui/open-webui/issues/19656), [Commit](https://github.com/open-webui/open-webui/commit/52ccab8)
|
||||
- 🛡️ Web search domain filtering was corrected to properly block results when any resolved hostname or IP address matches a blocked domain, preventing blocked sites from appearing in search results due to permissive hostname resolution logic that previously allowed results through if any single resolved address passed the filter. [#19670](https://github.com/open-webui/open-webui/pull/19670), [#19669](https://github.com/open-webui/open-webui/issues/19669)
|
||||
- 🧠 Custom models based on Ollama or OpenAI now properly inherit the connection type from their base model, ensuring they appear correctly in the "Local" or "External" model selection tabs instead of only appearing under "All". [#19183](https://github.com/open-webui/open-webui/issues/19183), [Commit](https://github.com/open-webui/open-webui/commit/39f7575)
|
||||
- 🐍 SentenceTransformers embedding initialization was fixed by updating the transformers dependency to version 4.57.3, resolving a regression in v0.6.40 where document ingestion failed with "'NoneType' object has no attribute 'encode'" errors due to a bug in transformers 4.57.2. [#19512](https://github.com/open-webui/open-webui/issues/19512), [#19513](https://github.com/open-webui/open-webui/pull/19513)
|
||||
- 📈 Active user count accuracy was significantly improved by replacing the socket-based USER_POOL tracking with a database-backed heartbeat mechanism, resolving long-standing issues where Redis deployments displayed inflated user counts due to stale sessions never being cleaned up on disconnect. [#16074](https://github.com/open-webui/open-webui/discussions/16074), [Commit](https://github.com/open-webui/open-webui/commit/70948f8803e417459d5203839f8077fdbfbbb213)
|
||||
- 👥 Default group assignment now applies consistently across all user registration methods including OAuth/SSO, LDAP, and admin-created users, fixing an issue where the "DEFAULT_GROUP_ID" setting was only being applied to users who signed up via the email/password signup form. [#19685](https://github.com/open-webui/open-webui/pull/19685)
|
||||
- 🔦 Model list filtering in workspaces was corrected to properly include models shared with user groups, ensuring members can view models they have write access to through group permissions. [#19461](https://github.com/open-webui/open-webui/issues/19461), [Commit](https://github.com/open-webui/open-webui/commit/69722ba973768a5f689f2e2351bf583a8db9bba8)
|
||||
- 🖼️ User profile image display in preview contexts was fixed by resolving a Pydantic validation error that prevented proper rendering. [Commit](https://github.com/open-webui/open-webui/commit/c7eb7136893b0ddfdc5d55ffc7a05bd84a00f5d6)
|
||||
- 🔒 Redis TLS connection failures were resolved by updating the python-socketio dependency to version 5.15.0, restoring support for the "rediss://" URL schema. [#19480](https://github.com/open-webui/open-webui/issues/19480), [#19488](https://github.com/open-webui/open-webui/pull/19488)
|
||||
- 📝 MCP tool server configuration was corrected to properly handle the "Function Name Filter List" as both string and list types, preventing AttributeError when the field is empty and ensuring backward compatibility. [#19486](https://github.com/open-webui/open-webui/issues/19486), [Commit](https://github.com/open-webui/open-webui/commit/c5b73d71843edc024325d4a6e625ec939a747279), [Commit](https://github.com/open-webui/open-webui/commit/477097c2e42985c14892301d0127314629d07df1)
|
||||
- 📎 Web page attachment failures causing TypeError on metadata checks were resolved by correcting async threadpool parameter passing in vector database operations. [#19493](https://github.com/open-webui/open-webui/issues/19493), [Commit](https://github.com/open-webui/open-webui/commit/4370dee79e19d77062c03fba81780cb3b779fca3)
|
||||
- 💾 Model allowlist persistence in multi-worker deployments was fixed by implementing Redis-based shared state for the internal models dictionary, ensuring configuration changes are consistently visible across all worker processes. [#19395](https://github.com/open-webui/open-webui/issues/19395), [Commit](https://github.com/open-webui/open-webui/commit/b5e5617d7f7ad3e4eec9f15f4cc7f07cb5afc2fa)
|
||||
- ⏳ Chat history infinite loading was prevented by enhancing message data structure to properly track parent message relationships, resolving issues where missing parentId fields caused perpetual loading states. [#19225](https://github.com/open-webui/open-webui/issues/19225), [Commit](https://github.com/open-webui/open-webui/commit/ff4b1b9862d15adfa15eac17d2ce066c3d8ae38f)
|
||||
- 🩹 Database migration robustness was improved by automatically detecting and correcting missing primary key constraints on the user table, ensuring successful schema upgrades for databases with non-standard configurations. [#19487](https://github.com/open-webui/open-webui/discussions/19487), [Commit](https://github.com/open-webui/open-webui/commit/453ea9b9a167c0b03d86c46e6efd086bf10056ce)
|
||||
- 🏷️ OAuth group assignment now updates correctly on first login when users transition from admin to user role, ensuring group memberships reflect immediately when group management is enabled. [#19475](https://github.com/open-webui/open-webui/issues/19475), [#19476](https://github.com/open-webui/open-webui/pull/19476)
|
||||
- 💡 Knowledge base file tooltips now properly display the parent collection name when referencing files with the hash symbol, preventing confusion between identically-named files in different collections. [#19491](https://github.com/open-webui/open-webui/issues/19491), [Commit](https://github.com/open-webui/open-webui/commit/3fe5a47b0ff84ac97f8e4ff56a19fa2ec065bf66)
|
||||
- 🔐 Knowledge base file access inconsistencies were resolved where authorized non-admin users received "Not found" or permission errors for certain files due to race conditions during upload causing mismatched collection_name values, with file access validation now properly checking against knowledge base file associations. [#18689](https://github.com/open-webui/open-webui/issues/18689), [#19523](https://github.com/open-webui/open-webui/pull/19523), [Commit](https://github.com/open-webui/open-webui/commit/e301d1962e45900ababd3eabb7e9a2ad275a5761)
|
||||
- 📦 Knowledge API batch file addition endpoint was corrected to properly handle async operations, resolving 500 Internal Server Error responses when adding multiple files simultaneously. [#19538](https://github.com/open-webui/open-webui/issues/19538), [Commit](https://github.com/open-webui/open-webui/commit/28659f60d94feb4f6a99bb1a5b54d7f45e5ea10f)
|
||||
- 🤖 Embedding model auto-update functionality was fixed to properly respect the "RAG_EMBEDDING_MODEL_AUTO_UPDATE" setting by correctly passing the flag to the model path resolver, ensuring models update as expected when the auto-update option is enabled. [#19687](https://github.com/open-webui/open-webui/pull/19687)
|
||||
- 📉 API response payload sizes were dramatically reduced by removing base64-encoded profile images from most endpoints, eliminating multi-megabyte responses caused by high-resolution avatars and enabling better browser caching. [#19519](https://github.com/open-webui/open-webui/issues/19519), [Commit](https://github.com/open-webui/open-webui/commit/384753c4c17f62a68d38af4bbcf55a21ee08e0f2)
|
||||
- 📞 Redundant API calls on the admin user overview page were eliminated by consolidating reactive statements, reducing four duplicate requests to a single efficient call and significantly improving page load performance. [#19509](https://github.com/open-webui/open-webui/issues/19509), [Commit](https://github.com/open-webui/open-webui/commit/9f89cc5e9f7e1c6c9e2bc91177e08df7c79f66f9)
|
||||
- 🧹 Duplicate API calls on the workspace models page were eliminated by removing redundant model list fetching, reducing two identical requests to a single call and improving page responsiveness. [#19517](https://github.com/open-webui/open-webui/issues/19517), [Commit](https://github.com/open-webui/open-webui/commit/d1bbf6be7a4d1d53fa8ad46ca4f62fc4b2e6a8cb)
|
||||
- 🔘 The model valves button was corrected to prevent unintended form submission by adding explicit button type attribute, ensuring it no longer triggers message sending when the input area contains text. [#19534](https://github.com/open-webui/open-webui/pull/19534)
|
||||
- 🗑️ Ollama model deletion was fixed by correcting the request payload format and ensuring the model selector properly displays the placeholder option. [Commit](https://github.com/open-webui/open-webui/commit/0f3156651c64bc5af188a65fc2908bdcecf30c74)
|
||||
- 🎨 Image generation in temporary chats was fixed by correctly handling local chat sessions that are not persisted to the database. [Commit](https://github.com/open-webui/open-webui/commit/a7c7993bbf3a21cb7ba416525b89233cf2ad877f)
|
||||
- 🕵️♂️ Audit logging was fixed by correctly awaiting the async user authentication call, resolving failures where coroutine objects were passed instead of user data. [#19658](https://github.com/open-webui/open-webui/pull/19658), [Commit](https://github.com/open-webui/open-webui/commit/dba86bc)
|
||||
- 🌙 Dark mode select dropdown styling was corrected to use proper background colors, fixing an issue where dropdown borders and hover states appeared white instead of matching the dark theme. [#19693](https://github.com/open-webui/open-webui/pull/19693), [#19442](https://github.com/open-webui/open-webui/issues/19442)
|
||||
- 🔍 Milvus vector database query filtering was fixed by correcting string quote handling in filter expressions and using the proper parameter name for queries, resolving false "duplicate content detected" errors that prevented uploading multiple files to knowledge bases. [#19602](https://github.com/open-webui/open-webui/pull/19602), [#18119](https://github.com/open-webui/open-webui/issues/18119), [#16345](https://github.com/open-webui/open-webui/issues/16345), [#17088](https://github.com/open-webui/open-webui/issues/17088), [#18485](https://github.com/open-webui/open-webui/issues/18485)
|
||||
- 🆙 Milvus multitenancy vector database was updated to use query_iterator() for improved robustness and consistency with the standard Milvus implementation, fixing the same false duplicate detection errors and improving handling of large result sets in multi-tenant deployments. [#19695](https://github.com/open-webui/open-webui/pull/19695)
|
||||
|
||||
### Changed
|
||||
|
||||
- ⚠️ **IMPORTANT for Multi-Instance Deployments** — This release includes database schema changes; multi-worker, multi-server, or load-balanced deployments must update all instances simultaneously rather than performing rolling updates, as running mixed versions will cause application failures due to schema incompatibility between old and new instances.
|
||||
- 👮 Channel creation is now restricted to administrators only, with the channel add button hidden for regular users to maintain organizational control over communication channels. [Commit](https://github.com/open-webui/open-webui/commit/421aba7cd7cd708168b1f2565026c74525a67905)
|
||||
- ➖ The active user count indicator was removed from the bottom-left user menu in the sidebar to streamline the interface. [Commit](https://github.com/open-webui/open-webui/commit/848f3fd4d86ca66656e0ff0335773945af8d7d8d)
|
||||
- 🗂️ The user table was restructured with API keys migrated to a dedicated table supporting future multi-key functionality, OAuth data storage converted to a JSON structure enabling multiple identity providers per user account, and internal column types optimized from TEXT to JSON for the "info" and "settings" fields, with automatic migration preserving all existing data and associations. [#19573](https://github.com/open-webui/open-webui/pull/19573)
|
||||
- 🔄 The knowledge base API was restructured to support the new file relationship model.
|
||||
|
||||
## [0.6.40] - 2025-11-25
|
||||
|
||||
### Fixed
|
||||
|
||||
- 🗄️ A critical PostgreSQL user listing performance issue was resolved by removing a redundant count operation that caused severe database slowdowns and potential timeouts when viewing user lists in admin panels.
|
||||
|
||||
## [0.6.39] - 2025-11-25
|
||||
|
||||
### Added
|
||||
|
||||
|
|
|
|||
|
|
@ -55,6 +55,9 @@ ARG USE_RERANKING_MODEL
|
|||
ARG UID
|
||||
ARG GID
|
||||
|
||||
# Python settings
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
||||
## Basis ##
|
||||
ENV ENV=prod \
|
||||
PORT=8080 \
|
||||
|
|
|
|||
|
|
@ -1,35 +0,0 @@
|
|||
### Installing Both Ollama and Open WebUI Using Kustomize
|
||||
|
||||
For cpu-only pod
|
||||
|
||||
```bash
|
||||
kubectl apply -f ./kubernetes/manifest/base
|
||||
```
|
||||
|
||||
For gpu-enabled pod
|
||||
|
||||
```bash
|
||||
kubectl apply -k ./kubernetes/manifest
|
||||
```
|
||||
|
||||
### Installing Both Ollama and Open WebUI Using Helm
|
||||
|
||||
Package Helm file first
|
||||
|
||||
```bash
|
||||
helm package ./kubernetes/helm/
|
||||
```
|
||||
|
||||
For cpu-only pod
|
||||
|
||||
```bash
|
||||
helm install ollama-webui ./ollama-webui-*.tgz
|
||||
```
|
||||
|
||||
For gpu-enabled pod
|
||||
|
||||
```bash
|
||||
helm install ollama-webui ./ollama-webui-*.tgz --set ollama.resources.limits.nvidia.com/gpu="1"
|
||||
```
|
||||
|
||||
Check the `kubernetes/helm/values.yaml` file to know which parameters are available for customization
|
||||
2
LICENSE
2
LICENSE
|
|
@ -1,4 +1,4 @@
|
|||
Copyright (c) 2023-2025 Timothy Jaeryang Baek (Open WebUI)
|
||||
Copyright (c) 2023- Open WebUI Inc. [Created by Timothy Jaeryang Baek]
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
|
|
|
|||
12
README.md
12
README.md
|
|
@ -10,11 +10,13 @@
|
|||
[](https://discord.gg/5rJgQTnV4s)
|
||||
[](https://github.com/sponsors/tjbck)
|
||||
|
||||

|
||||
|
||||
**Open WebUI is an [extensible](https://docs.openwebui.com/features/plugin/), feature-rich, and user-friendly self-hosted AI platform designed to operate entirely offline.** It supports various LLM runners like **Ollama** and **OpenAI-compatible APIs**, with **built-in inference engine** for RAG, making it a **powerful AI deployment solution**.
|
||||
|
||||
Passionate about open-source AI? [Join our team →](https://careers.openwebui.com/)
|
||||
|
||||

|
||||

|
||||
|
||||
> [!TIP]
|
||||
> **Looking for an [Enterprise Plan](https://docs.openwebui.com/enterprise)?** – **[Speak with Our Sales Team Today!](https://docs.openwebui.com/enterprise)**
|
||||
|
|
@ -188,14 +190,6 @@ docker run -d --network=host -v open-webui:/app/backend/data -e OLLAMA_BASE_URL=
|
|||
|
||||
### Keeping Your Docker Installation Up-to-Date
|
||||
|
||||
In case you want to update your local Docker installation to the latest version, you can do it with [Watchtower](https://containrrr.dev/watchtower/):
|
||||
|
||||
```bash
|
||||
docker run --rm --volume /var/run/docker.sock:/var/run/docker.sock containrrr/watchtower --run-once open-webui
|
||||
```
|
||||
|
||||
In the last part of the command, replace `open-webui` with your container name if it is different.
|
||||
|
||||
Check our Updating Guide available in our [Open WebUI Documentation](https://docs.openwebui.com/getting-started/updating).
|
||||
|
||||
### Using the Dev Branch 🌙
|
||||
|
|
|
|||
|
|
@ -583,14 +583,16 @@ OAUTH_ROLES_CLAIM = PersistentConfig(
|
|||
os.environ.get("OAUTH_ROLES_CLAIM", "roles"),
|
||||
)
|
||||
|
||||
SEP = os.environ.get("OAUTH_ROLES_SEPARATOR", ",")
|
||||
OAUTH_ROLES_SEPARATOR = os.environ.get("OAUTH_ROLES_SEPARATOR", ",")
|
||||
|
||||
OAUTH_ALLOWED_ROLES = PersistentConfig(
|
||||
"OAUTH_ALLOWED_ROLES",
|
||||
"oauth.allowed_roles",
|
||||
[
|
||||
role.strip()
|
||||
for role in os.environ.get("OAUTH_ALLOWED_ROLES", f"user{SEP}admin").split(SEP)
|
||||
for role in os.environ.get(
|
||||
"OAUTH_ALLOWED_ROLES", f"user{OAUTH_ROLES_SEPARATOR}admin"
|
||||
).split(OAUTH_ROLES_SEPARATOR)
|
||||
if role
|
||||
],
|
||||
)
|
||||
|
|
@ -600,7 +602,9 @@ OAUTH_ADMIN_ROLES = PersistentConfig(
|
|||
"oauth.admin_roles",
|
||||
[
|
||||
role.strip()
|
||||
for role in os.environ.get("OAUTH_ADMIN_ROLES", "admin").split(SEP)
|
||||
for role in os.environ.get("OAUTH_ADMIN_ROLES", "admin").split(
|
||||
OAUTH_ROLES_SEPARATOR
|
||||
)
|
||||
if role
|
||||
],
|
||||
)
|
||||
|
|
@ -625,6 +629,12 @@ OAUTH_ACCESS_TOKEN_REQUEST_INCLUDE_CLIENT_ID = (
|
|||
== "true"
|
||||
)
|
||||
|
||||
OAUTH_AUDIENCE = PersistentConfig(
|
||||
"OAUTH_AUDIENCE",
|
||||
"oauth.audience",
|
||||
os.environ.get("OAUTH_AUDIENCE", ""),
|
||||
)
|
||||
|
||||
|
||||
def load_oauth_providers():
|
||||
OAUTH_PROVIDERS.clear()
|
||||
|
|
@ -1296,7 +1306,7 @@ USER_PERMISSIONS_WORKSPACE_MODELS_ALLOW_PUBLIC_SHARING = (
|
|||
|
||||
USER_PERMISSIONS_WORKSPACE_KNOWLEDGE_ALLOW_SHARING = (
|
||||
os.environ.get(
|
||||
"USER_PERMISSIONS_WORKSPACE_KNOWLEDGE_ALLOW_PUBLIC_SHARING", "False"
|
||||
"USER_PERMISSIONS_WORKSPACE_KNOWLEDGE_ALLOW_SHARING", "False"
|
||||
).lower()
|
||||
== "true"
|
||||
)
|
||||
|
|
@ -1335,8 +1345,7 @@ USER_PERMISSIONS_WORKSPACE_TOOLS_ALLOW_PUBLIC_SHARING = (
|
|||
|
||||
|
||||
USER_PERMISSIONS_NOTES_ALLOW_SHARING = (
|
||||
os.environ.get("USER_PERMISSIONS_NOTES_ALLOW_PUBLIC_SHARING", "False").lower()
|
||||
== "true"
|
||||
os.environ.get("USER_PERMISSIONS_NOTES_ALLOW_SHARING", "False").lower() == "true"
|
||||
)
|
||||
|
||||
USER_PERMISSIONS_NOTES_ALLOW_PUBLIC_SHARING = (
|
||||
|
|
@ -1443,10 +1452,18 @@ USER_PERMISSIONS_FEATURES_CODE_INTERPRETER = (
|
|||
== "true"
|
||||
)
|
||||
|
||||
USER_PERMISSIONS_FEATURES_FOLDERS = (
|
||||
os.environ.get("USER_PERMISSIONS_FEATURES_FOLDERS", "True").lower() == "true"
|
||||
)
|
||||
|
||||
USER_PERMISSIONS_FEATURES_NOTES = (
|
||||
os.environ.get("USER_PERMISSIONS_FEATURES_NOTES", "True").lower() == "true"
|
||||
)
|
||||
|
||||
USER_PERMISSIONS_FEATURES_CHANNELS = (
|
||||
os.environ.get("USER_PERMISSIONS_FEATURES_CHANNELS", "True").lower() == "true"
|
||||
)
|
||||
|
||||
USER_PERMISSIONS_FEATURES_API_KEYS = (
|
||||
os.environ.get("USER_PERMISSIONS_FEATURES_API_KEYS", "False").lower() == "true"
|
||||
)
|
||||
|
|
@ -1499,12 +1516,16 @@ DEFAULT_USER_PERMISSIONS = {
|
|||
"temporary_enforced": USER_PERMISSIONS_CHAT_TEMPORARY_ENFORCED,
|
||||
},
|
||||
"features": {
|
||||
# General features
|
||||
"api_keys": USER_PERMISSIONS_FEATURES_API_KEYS,
|
||||
"notes": USER_PERMISSIONS_FEATURES_NOTES,
|
||||
"folders": USER_PERMISSIONS_FEATURES_FOLDERS,
|
||||
"channels": USER_PERMISSIONS_FEATURES_CHANNELS,
|
||||
"direct_tool_servers": USER_PERMISSIONS_FEATURES_DIRECT_TOOL_SERVERS,
|
||||
# Chat features
|
||||
"web_search": USER_PERMISSIONS_FEATURES_WEB_SEARCH,
|
||||
"image_generation": USER_PERMISSIONS_FEATURES_IMAGE_GENERATION,
|
||||
"code_interpreter": USER_PERMISSIONS_FEATURES_CODE_INTERPRETER,
|
||||
"notes": USER_PERMISSIONS_FEATURES_NOTES,
|
||||
},
|
||||
}
|
||||
|
||||
|
|
@ -1514,6 +1535,12 @@ USER_PERMISSIONS = PersistentConfig(
|
|||
DEFAULT_USER_PERMISSIONS,
|
||||
)
|
||||
|
||||
ENABLE_FOLDERS = PersistentConfig(
|
||||
"ENABLE_FOLDERS",
|
||||
"folders.enable",
|
||||
os.environ.get("ENABLE_FOLDERS", "True").lower() == "true",
|
||||
)
|
||||
|
||||
ENABLE_CHANNELS = PersistentConfig(
|
||||
"ENABLE_CHANNELS",
|
||||
"channels.enable",
|
||||
|
|
@ -2496,6 +2523,12 @@ MINERU_API_URL = PersistentConfig(
|
|||
os.environ.get("MINERU_API_URL", "http://localhost:8000"),
|
||||
)
|
||||
|
||||
MINERU_API_TIMEOUT = PersistentConfig(
|
||||
"MINERU_API_TIMEOUT",
|
||||
"rag.mineru_api_timeout",
|
||||
os.environ.get("MINERU_API_TIMEOUT", "300"),
|
||||
)
|
||||
|
||||
MINERU_API_KEY = PersistentConfig(
|
||||
"MINERU_API_KEY",
|
||||
"rag.mineru_api_key",
|
||||
|
|
@ -2568,6 +2601,12 @@ DOCUMENT_INTELLIGENCE_KEY = PersistentConfig(
|
|||
os.getenv("DOCUMENT_INTELLIGENCE_KEY", ""),
|
||||
)
|
||||
|
||||
DOCUMENT_INTELLIGENCE_MODEL = PersistentConfig(
|
||||
"DOCUMENT_INTELLIGENCE_MODEL",
|
||||
"rag.document_intelligence_model",
|
||||
os.getenv("DOCUMENT_INTELLIGENCE_MODEL", "prebuilt-layout"),
|
||||
)
|
||||
|
||||
MISTRAL_OCR_API_BASE_URL = PersistentConfig(
|
||||
"MISTRAL_OCR_API_BASE_URL",
|
||||
"rag.MISTRAL_OCR_API_BASE_URL",
|
||||
|
|
@ -2763,6 +2802,12 @@ RAG_EXTERNAL_RERANKER_API_KEY = PersistentConfig(
|
|||
os.environ.get("RAG_EXTERNAL_RERANKER_API_KEY", ""),
|
||||
)
|
||||
|
||||
RAG_EXTERNAL_RERANKER_TIMEOUT = PersistentConfig(
|
||||
"RAG_EXTERNAL_RERANKER_TIMEOUT",
|
||||
"rag.external_reranker_timeout",
|
||||
os.environ.get("RAG_EXTERNAL_RERANKER_TIMEOUT", ""),
|
||||
)
|
||||
|
||||
|
||||
RAG_TEXT_SPLITTER = PersistentConfig(
|
||||
"RAG_TEXT_SPLITTER",
|
||||
|
|
@ -2949,7 +2994,7 @@ WEB_SEARCH_DOMAIN_FILTER_LIST = PersistentConfig(
|
|||
WEB_SEARCH_CONCURRENT_REQUESTS = PersistentConfig(
|
||||
"WEB_SEARCH_CONCURRENT_REQUESTS",
|
||||
"rag.web.search.concurrent_requests",
|
||||
int(os.getenv("WEB_SEARCH_CONCURRENT_REQUESTS", "10")),
|
||||
int(os.getenv("WEB_SEARCH_CONCURRENT_REQUESTS", "0")),
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -2966,6 +3011,12 @@ WEB_LOADER_CONCURRENT_REQUESTS = PersistentConfig(
|
|||
int(os.getenv("WEB_LOADER_CONCURRENT_REQUESTS", "10")),
|
||||
)
|
||||
|
||||
WEB_LOADER_TIMEOUT = PersistentConfig(
|
||||
"WEB_LOADER_TIMEOUT",
|
||||
"rag.web.loader.timeout",
|
||||
os.getenv("WEB_LOADER_TIMEOUT", ""),
|
||||
)
|
||||
|
||||
|
||||
ENABLE_WEB_LOADER_SSL_VERIFICATION = PersistentConfig(
|
||||
"ENABLE_WEB_LOADER_SSL_VERIFICATION",
|
||||
|
|
@ -2992,6 +3043,12 @@ SEARXNG_QUERY_URL = PersistentConfig(
|
|||
os.getenv("SEARXNG_QUERY_URL", ""),
|
||||
)
|
||||
|
||||
SEARXNG_LANGUAGE = PersistentConfig(
|
||||
"SEARXNG_LANGUAGE",
|
||||
"rag.web.search.searxng_language",
|
||||
os.getenv("SEARXNG_LANGUAGE", "all"),
|
||||
)
|
||||
|
||||
YACY_QUERY_URL = PersistentConfig(
|
||||
"YACY_QUERY_URL",
|
||||
"rag.web.search.yacy_query_url",
|
||||
|
|
@ -3422,10 +3479,16 @@ COMFYUI_WORKFLOW = PersistentConfig(
|
|||
os.getenv("COMFYUI_WORKFLOW", COMFYUI_DEFAULT_WORKFLOW),
|
||||
)
|
||||
|
||||
comfyui_workflow_nodes = os.getenv("COMFYUI_WORKFLOW_NODES", "")
|
||||
try:
|
||||
comfyui_workflow_nodes = json.loads(comfyui_workflow_nodes)
|
||||
except json.JSONDecodeError:
|
||||
comfyui_workflow_nodes = []
|
||||
|
||||
COMFYUI_WORKFLOW_NODES = PersistentConfig(
|
||||
"COMFYUI_WORKFLOW",
|
||||
"COMFYUI_WORKFLOW_NODES",
|
||||
"image_generation.comfyui.nodes",
|
||||
[],
|
||||
comfyui_workflow_nodes,
|
||||
)
|
||||
|
||||
IMAGES_OPENAI_API_BASE_URL = PersistentConfig(
|
||||
|
|
@ -3542,10 +3605,16 @@ IMAGES_EDIT_COMFYUI_WORKFLOW = PersistentConfig(
|
|||
os.getenv("IMAGES_EDIT_COMFYUI_WORKFLOW", ""),
|
||||
)
|
||||
|
||||
images_edit_comfyui_workflow_nodes = os.getenv("IMAGES_EDIT_COMFYUI_WORKFLOW_NODES", "")
|
||||
try:
|
||||
images_edit_comfyui_workflow_nodes = json.loads(images_edit_comfyui_workflow_nodes)
|
||||
except json.JSONDecodeError:
|
||||
images_edit_comfyui_workflow_nodes = []
|
||||
|
||||
IMAGES_EDIT_COMFYUI_WORKFLOW_NODES = PersistentConfig(
|
||||
"IMAGES_EDIT_COMFYUI_WORKFLOW_NODES",
|
||||
"images.edit.comfyui.nodes",
|
||||
[],
|
||||
images_edit_comfyui_workflow_nodes,
|
||||
)
|
||||
|
||||
####################################
|
||||
|
|
|
|||
|
|
@ -85,32 +85,7 @@ if "cuda_error" in locals():
|
|||
log.exception(cuda_error)
|
||||
del cuda_error
|
||||
|
||||
log_sources = [
|
||||
"AUDIO",
|
||||
"COMFYUI",
|
||||
"CONFIG",
|
||||
"DB",
|
||||
"IMAGES",
|
||||
"MAIN",
|
||||
"MODELS",
|
||||
"OLLAMA",
|
||||
"OPENAI",
|
||||
"RAG",
|
||||
"WEBHOOK",
|
||||
"SOCKET",
|
||||
"OAUTH",
|
||||
]
|
||||
|
||||
SRC_LOG_LEVELS = {}
|
||||
|
||||
for source in log_sources:
|
||||
log_env_var = source + "_LOG_LEVEL"
|
||||
SRC_LOG_LEVELS[source] = os.environ.get(log_env_var, "").upper()
|
||||
if SRC_LOG_LEVELS[source] not in logging.getLevelNamesMapping():
|
||||
SRC_LOG_LEVELS[source] = GLOBAL_LOG_LEVEL
|
||||
log.info(f"{log_env_var}: {SRC_LOG_LEVELS[source]}")
|
||||
|
||||
log.setLevel(SRC_LOG_LEVELS["CONFIG"])
|
||||
SRC_LOG_LEVELS = {} # Legacy variable, do not remove
|
||||
|
||||
WEBUI_NAME = os.environ.get("WEBUI_NAME", "Open WebUI")
|
||||
if WEBUI_NAME != "Open WebUI":
|
||||
|
|
@ -364,6 +339,11 @@ if DATABASE_USER_ACTIVE_STATUS_UPDATE_INTERVAL is not None:
|
|||
except Exception:
|
||||
DATABASE_USER_ACTIVE_STATUS_UPDATE_INTERVAL = 0.0
|
||||
|
||||
# Enable public visibility of active user count (when disabled, only admins can see it)
|
||||
ENABLE_PUBLIC_ACTIVE_USERS_COUNT = (
|
||||
os.environ.get("ENABLE_PUBLIC_ACTIVE_USERS_COUNT", "True").lower() == "true"
|
||||
)
|
||||
|
||||
RESET_CONFIG_ON_START = (
|
||||
os.environ.get("RESET_CONFIG_ON_START", "False").lower() == "true"
|
||||
)
|
||||
|
|
@ -395,6 +375,13 @@ try:
|
|||
except ValueError:
|
||||
REDIS_SENTINEL_MAX_RETRY_COUNT = 2
|
||||
|
||||
|
||||
REDIS_SOCKET_CONNECT_TIMEOUT = os.environ.get("REDIS_SOCKET_CONNECT_TIMEOUT", "")
|
||||
try:
|
||||
REDIS_SOCKET_CONNECT_TIMEOUT = float(REDIS_SOCKET_CONNECT_TIMEOUT)
|
||||
except ValueError:
|
||||
REDIS_SOCKET_CONNECT_TIMEOUT = None
|
||||
|
||||
####################################
|
||||
# UVICORN WORKERS
|
||||
####################################
|
||||
|
|
@ -439,7 +426,13 @@ PASSWORD_VALIDATION_REGEX_PATTERN = os.environ.get(
|
|||
"^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[^\w\s]).{8,}$",
|
||||
)
|
||||
|
||||
PASSWORD_VALIDATION_REGEX_PATTERN = re.compile(PASSWORD_VALIDATION_REGEX_PATTERN)
|
||||
try:
|
||||
PASSWORD_VALIDATION_REGEX_PATTERN = re.compile(PASSWORD_VALIDATION_REGEX_PATTERN)
|
||||
except Exception as e:
|
||||
log.error(f"Invalid PASSWORD_VALIDATION_REGEX_PATTERN: {e}")
|
||||
PASSWORD_VALIDATION_REGEX_PATTERN = re.compile(
|
||||
"^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[^\w\s]).{8,}$"
|
||||
)
|
||||
|
||||
|
||||
BYPASS_MODEL_ACCESS_CONTROL = (
|
||||
|
|
@ -546,6 +539,10 @@ if LICENSE_PUBLIC_KEY:
|
|||
# MODELS
|
||||
####################################
|
||||
|
||||
ENABLE_CUSTOM_MODEL_FALLBACK = (
|
||||
os.environ.get("ENABLE_CUSTOM_MODEL_FALLBACK", "False").lower() == "true"
|
||||
)
|
||||
|
||||
MODELS_CACHE_TTL = os.environ.get("MODELS_CACHE_TTL", "1")
|
||||
if MODELS_CACHE_TTL == "":
|
||||
MODELS_CACHE_TTL = None
|
||||
|
|
@ -620,9 +617,16 @@ ENABLE_WEBSOCKET_SUPPORT = (
|
|||
WEBSOCKET_MANAGER = os.environ.get("WEBSOCKET_MANAGER", "")
|
||||
|
||||
WEBSOCKET_REDIS_OPTIONS = os.environ.get("WEBSOCKET_REDIS_OPTIONS", "")
|
||||
|
||||
|
||||
if WEBSOCKET_REDIS_OPTIONS == "":
|
||||
log.debug("No WEBSOCKET_REDIS_OPTIONS provided, defaulting to None")
|
||||
WEBSOCKET_REDIS_OPTIONS = None
|
||||
if REDIS_SOCKET_CONNECT_TIMEOUT:
|
||||
WEBSOCKET_REDIS_OPTIONS = {
|
||||
"socket_connect_timeout": REDIS_SOCKET_CONNECT_TIMEOUT
|
||||
}
|
||||
else:
|
||||
log.debug("No WEBSOCKET_REDIS_OPTIONS provided, defaulting to None")
|
||||
WEBSOCKET_REDIS_OPTIONS = None
|
||||
else:
|
||||
try:
|
||||
WEBSOCKET_REDIS_OPTIONS = json.loads(WEBSOCKET_REDIS_OPTIONS)
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ from open_webui.utils.plugin import (
|
|||
from open_webui.utils.tools import get_tools
|
||||
from open_webui.utils.access_control import has_access
|
||||
|
||||
from open_webui.env import SRC_LOG_LEVELS, GLOBAL_LOG_LEVEL
|
||||
from open_webui.env import GLOBAL_LOG_LEVEL
|
||||
|
||||
from open_webui.utils.misc import (
|
||||
add_or_update_system_message,
|
||||
|
|
@ -54,7 +54,6 @@ from open_webui.utils.payload import (
|
|||
|
||||
logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL)
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MAIN"])
|
||||
|
||||
|
||||
def get_function_module_by_id(request: Request, pipe_id: str):
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ from open_webui.env import (
|
|||
OPEN_WEBUI_DIR,
|
||||
DATABASE_URL,
|
||||
DATABASE_SCHEMA,
|
||||
SRC_LOG_LEVELS,
|
||||
DATABASE_POOL_MAX_OVERFLOW,
|
||||
DATABASE_POOL_RECYCLE,
|
||||
DATABASE_POOL_SIZE,
|
||||
|
|
@ -25,7 +24,6 @@ from sqlalchemy.sql.type_api import _T
|
|||
from typing_extensions import Self
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["DB"])
|
||||
|
||||
|
||||
class JSONField(types.TypeDecorator):
|
||||
|
|
@ -92,8 +90,6 @@ if SQLALCHEMY_DATABASE_URL.startswith("sqlite+sqlcipher://"):
|
|||
|
||||
# Extract database path from SQLCipher URL
|
||||
db_path = SQLALCHEMY_DATABASE_URL.replace("sqlite+sqlcipher://", "")
|
||||
if db_path.startswith("/"):
|
||||
db_path = db_path[1:] # Remove leading slash for relative paths
|
||||
|
||||
# Create a custom creator function that uses sqlcipher3
|
||||
def create_sqlcipher_connection():
|
||||
|
|
|
|||
|
|
@ -2,7 +2,6 @@ import logging
|
|||
import os
|
||||
from contextvars import ContextVar
|
||||
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from peewee import *
|
||||
from peewee import InterfaceError as PeeWeeInterfaceError
|
||||
from peewee import PostgresqlDatabase
|
||||
|
|
@ -10,7 +9,6 @@ from playhouse.db_url import connect, parse
|
|||
from playhouse.shortcuts import ReconnectMixin
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["DB"])
|
||||
|
||||
db_state_default = {"closed": None, "conn": None, "ctx": None, "transactions": None}
|
||||
db_state = ContextVar("db_state", default=db_state_default.copy())
|
||||
|
|
@ -56,8 +54,6 @@ def register_connection(db_url):
|
|||
# Parse the database path from SQLCipher URL
|
||||
# Convert sqlite+sqlcipher:///path/to/db.sqlite to /path/to/db.sqlite
|
||||
db_path = db_url.replace("sqlite+sqlcipher://", "")
|
||||
if db_path.startswith("/"):
|
||||
db_path = db_path[1:] # Remove leading slash for relative paths
|
||||
|
||||
# Use Peewee's native SqlCipherDatabase with encryption
|
||||
db = SqlCipherDatabase(db_path, passphrase=database_password)
|
||||
|
|
|
|||
|
|
@ -61,11 +61,11 @@ from open_webui.utils import logger
|
|||
from open_webui.utils.audit import AuditLevel, AuditLoggingMiddleware
|
||||
from open_webui.utils.logger import start_logger
|
||||
from open_webui.socket.main import (
|
||||
MODELS,
|
||||
app as socket_app,
|
||||
periodic_usage_pool_cleanup,
|
||||
get_event_emitter,
|
||||
get_models_in_use,
|
||||
get_active_user_ids,
|
||||
)
|
||||
from open_webui.routers import (
|
||||
audio,
|
||||
|
|
@ -208,6 +208,7 @@ from open_webui.config import (
|
|||
FIRECRAWL_API_KEY,
|
||||
WEB_LOADER_ENGINE,
|
||||
WEB_LOADER_CONCURRENT_REQUESTS,
|
||||
WEB_LOADER_TIMEOUT,
|
||||
WHISPER_MODEL,
|
||||
WHISPER_VAD_FILTER,
|
||||
WHISPER_LANGUAGE,
|
||||
|
|
@ -226,6 +227,7 @@ from open_webui.config import (
|
|||
RAG_RERANKING_MODEL,
|
||||
RAG_EXTERNAL_RERANKER_URL,
|
||||
RAG_EXTERNAL_RERANKER_API_KEY,
|
||||
RAG_EXTERNAL_RERANKER_TIMEOUT,
|
||||
RAG_RERANKING_MODEL_AUTO_UPDATE,
|
||||
RAG_RERANKING_MODEL_TRUST_REMOTE_CODE,
|
||||
RAG_EMBEDDING_ENGINE,
|
||||
|
|
@ -263,6 +265,7 @@ from open_webui.config import (
|
|||
MINERU_API_MODE,
|
||||
MINERU_API_URL,
|
||||
MINERU_API_KEY,
|
||||
MINERU_API_TIMEOUT,
|
||||
MINERU_PARAMS,
|
||||
DATALAB_MARKER_USE_LLM,
|
||||
EXTERNAL_DOCUMENT_LOADER_URL,
|
||||
|
|
@ -273,6 +276,7 @@ from open_webui.config import (
|
|||
DOCLING_PARAMS,
|
||||
DOCUMENT_INTELLIGENCE_ENDPOINT,
|
||||
DOCUMENT_INTELLIGENCE_KEY,
|
||||
DOCUMENT_INTELLIGENCE_MODEL,
|
||||
MISTRAL_OCR_API_BASE_URL,
|
||||
MISTRAL_OCR_API_KEY,
|
||||
RAG_TEXT_SPLITTER,
|
||||
|
|
@ -296,6 +300,7 @@ from open_webui.config import (
|
|||
SERPAPI_API_KEY,
|
||||
SERPAPI_ENGINE,
|
||||
SEARXNG_QUERY_URL,
|
||||
SEARXNG_LANGUAGE,
|
||||
YACY_QUERY_URL,
|
||||
YACY_USERNAME,
|
||||
YACY_PASSWORD,
|
||||
|
|
@ -352,6 +357,7 @@ from open_webui.config import (
|
|||
ENABLE_API_KEYS,
|
||||
ENABLE_API_KEYS_ENDPOINT_RESTRICTIONS,
|
||||
API_KEYS_ALLOWED_ENDPOINTS,
|
||||
ENABLE_FOLDERS,
|
||||
ENABLE_CHANNELS,
|
||||
ENABLE_NOTES,
|
||||
ENABLE_COMMUNITY_SHARING,
|
||||
|
|
@ -433,6 +439,7 @@ from open_webui.config import (
|
|||
reset_config,
|
||||
)
|
||||
from open_webui.env import (
|
||||
ENABLE_CUSTOM_MODEL_FALLBACK,
|
||||
LICENSE_KEY,
|
||||
AUDIT_EXCLUDED_PATHS,
|
||||
AUDIT_LOG_LEVEL,
|
||||
|
|
@ -445,7 +452,6 @@ from open_webui.env import (
|
|||
GLOBAL_LOG_LEVEL,
|
||||
MAX_BODY_LOG_SIZE,
|
||||
SAFE_MODE,
|
||||
SRC_LOG_LEVELS,
|
||||
VERSION,
|
||||
DEPLOYMENT_ID,
|
||||
INSTANCE_ID,
|
||||
|
|
@ -469,6 +475,7 @@ from open_webui.env import (
|
|||
EXTERNAL_PWA_MANIFEST_URL,
|
||||
AIOHTTP_CLIENT_SESSION_SSL,
|
||||
ENABLE_STAR_SESSIONS_MIDDLEWARE,
|
||||
ENABLE_PUBLIC_ACTIVE_USERS_COUNT,
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -526,7 +533,6 @@ if SAFE_MODE:
|
|||
|
||||
logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL)
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MAIN"])
|
||||
|
||||
|
||||
class SPAStaticFiles(StaticFiles):
|
||||
|
|
@ -767,6 +773,7 @@ app.state.config.WEBHOOK_URL = WEBHOOK_URL
|
|||
app.state.config.BANNERS = WEBUI_BANNERS
|
||||
|
||||
|
||||
app.state.config.ENABLE_FOLDERS = ENABLE_FOLDERS
|
||||
app.state.config.ENABLE_CHANNELS = ENABLE_CHANNELS
|
||||
app.state.config.ENABLE_NOTES = ENABLE_NOTES
|
||||
app.state.config.ENABLE_COMMUNITY_SHARING = ENABLE_COMMUNITY_SHARING
|
||||
|
|
@ -869,11 +876,13 @@ app.state.config.DOCLING_API_KEY = DOCLING_API_KEY
|
|||
app.state.config.DOCLING_PARAMS = DOCLING_PARAMS
|
||||
app.state.config.DOCUMENT_INTELLIGENCE_ENDPOINT = DOCUMENT_INTELLIGENCE_ENDPOINT
|
||||
app.state.config.DOCUMENT_INTELLIGENCE_KEY = DOCUMENT_INTELLIGENCE_KEY
|
||||
app.state.config.DOCUMENT_INTELLIGENCE_MODEL = DOCUMENT_INTELLIGENCE_MODEL
|
||||
app.state.config.MISTRAL_OCR_API_BASE_URL = MISTRAL_OCR_API_BASE_URL
|
||||
app.state.config.MISTRAL_OCR_API_KEY = MISTRAL_OCR_API_KEY
|
||||
app.state.config.MINERU_API_MODE = MINERU_API_MODE
|
||||
app.state.config.MINERU_API_URL = MINERU_API_URL
|
||||
app.state.config.MINERU_API_KEY = MINERU_API_KEY
|
||||
app.state.config.MINERU_API_TIMEOUT = MINERU_API_TIMEOUT
|
||||
app.state.config.MINERU_PARAMS = MINERU_PARAMS
|
||||
|
||||
app.state.config.TEXT_SPLITTER = RAG_TEXT_SPLITTER
|
||||
|
|
@ -891,6 +900,7 @@ app.state.config.RAG_RERANKING_ENGINE = RAG_RERANKING_ENGINE
|
|||
app.state.config.RAG_RERANKING_MODEL = RAG_RERANKING_MODEL
|
||||
app.state.config.RAG_EXTERNAL_RERANKER_URL = RAG_EXTERNAL_RERANKER_URL
|
||||
app.state.config.RAG_EXTERNAL_RERANKER_API_KEY = RAG_EXTERNAL_RERANKER_API_KEY
|
||||
app.state.config.RAG_EXTERNAL_RERANKER_TIMEOUT = RAG_EXTERNAL_RERANKER_TIMEOUT
|
||||
|
||||
app.state.config.RAG_TEMPLATE = RAG_TEMPLATE
|
||||
|
||||
|
|
@ -918,6 +928,7 @@ app.state.config.WEB_SEARCH_CONCURRENT_REQUESTS = WEB_SEARCH_CONCURRENT_REQUESTS
|
|||
|
||||
app.state.config.WEB_LOADER_ENGINE = WEB_LOADER_ENGINE
|
||||
app.state.config.WEB_LOADER_CONCURRENT_REQUESTS = WEB_LOADER_CONCURRENT_REQUESTS
|
||||
app.state.config.WEB_LOADER_TIMEOUT = WEB_LOADER_TIMEOUT
|
||||
|
||||
app.state.config.WEB_SEARCH_TRUST_ENV = WEB_SEARCH_TRUST_ENV
|
||||
app.state.config.BYPASS_WEB_SEARCH_EMBEDDING_AND_RETRIEVAL = (
|
||||
|
|
@ -930,6 +941,7 @@ app.state.config.ENABLE_ONEDRIVE_INTEGRATION = ENABLE_ONEDRIVE_INTEGRATION
|
|||
|
||||
app.state.config.OLLAMA_CLOUD_WEB_SEARCH_API_KEY = OLLAMA_CLOUD_WEB_SEARCH_API_KEY
|
||||
app.state.config.SEARXNG_QUERY_URL = SEARXNG_QUERY_URL
|
||||
app.state.config.SEARXNG_LANGUAGE = SEARXNG_LANGUAGE
|
||||
app.state.config.YACY_QUERY_URL = YACY_QUERY_URL
|
||||
app.state.config.YACY_USERNAME = YACY_USERNAME
|
||||
app.state.config.YACY_PASSWORD = YACY_PASSWORD
|
||||
|
|
@ -980,9 +992,7 @@ app.state.YOUTUBE_LOADER_TRANSLATION = None
|
|||
|
||||
try:
|
||||
app.state.ef = get_ef(
|
||||
app.state.config.RAG_EMBEDDING_ENGINE,
|
||||
app.state.config.RAG_EMBEDDING_MODEL,
|
||||
RAG_EMBEDDING_MODEL_AUTO_UPDATE,
|
||||
app.state.config.RAG_EMBEDDING_ENGINE, app.state.config.RAG_EMBEDDING_MODEL
|
||||
)
|
||||
if (
|
||||
app.state.config.ENABLE_RAG_HYBRID_SEARCH
|
||||
|
|
@ -993,7 +1003,7 @@ try:
|
|||
app.state.config.RAG_RERANKING_MODEL,
|
||||
app.state.config.RAG_EXTERNAL_RERANKER_URL,
|
||||
app.state.config.RAG_EXTERNAL_RERANKER_API_KEY,
|
||||
RAG_RERANKING_MODEL_AUTO_UPDATE,
|
||||
app.state.config.RAG_EXTERNAL_RERANKER_TIMEOUT,
|
||||
)
|
||||
else:
|
||||
app.state.rf = None
|
||||
|
|
@ -1030,6 +1040,7 @@ app.state.EMBEDDING_FUNCTION = get_embedding_function(
|
|||
if app.state.config.RAG_EMBEDDING_ENGINE == "azure_openai"
|
||||
else None
|
||||
),
|
||||
enable_async=app.state.config.ENABLE_ASYNC_EMBEDDING,
|
||||
)
|
||||
|
||||
app.state.RERANKING_FUNCTION = get_reranking_function(
|
||||
|
|
@ -1215,7 +1226,7 @@ app.state.config.VOICE_MODE_PROMPT_TEMPLATE = VOICE_MODE_PROMPT_TEMPLATE
|
|||
#
|
||||
########################################
|
||||
|
||||
app.state.MODELS = {}
|
||||
app.state.MODELS = MODELS
|
||||
|
||||
# Add the middleware to the app
|
||||
if ENABLE_COMPRESSION_MIDDLEWARE:
|
||||
|
|
@ -1529,6 +1540,7 @@ async def chat_completion(
|
|||
|
||||
metadata = {}
|
||||
try:
|
||||
model_info = None
|
||||
if not model_item.get("direct", False):
|
||||
if model_id not in request.app.state.MODELS:
|
||||
raise Exception("Model not found")
|
||||
|
|
@ -1546,7 +1558,6 @@ async def chat_completion(
|
|||
raise e
|
||||
else:
|
||||
model = model_item
|
||||
model_info = None
|
||||
|
||||
request.state.direct = True
|
||||
request.state.model = model
|
||||
|
|
@ -1555,6 +1566,26 @@ async def chat_completion(
|
|||
model_info.params.model_dump() if model_info and model_info.params else {}
|
||||
)
|
||||
|
||||
# Check base model existence for custom models
|
||||
if model_info_params.get("base_model_id"):
|
||||
base_model_id = model_info_params.get("base_model_id")
|
||||
if base_model_id not in request.app.state.MODELS:
|
||||
if ENABLE_CUSTOM_MODEL_FALLBACK:
|
||||
default_models = (
|
||||
request.app.state.config.DEFAULT_MODELS or ""
|
||||
).split(",")
|
||||
|
||||
fallback_model_id = (
|
||||
default_models[0].strip() if default_models[0] else None
|
||||
)
|
||||
|
||||
if fallback_model_id:
|
||||
request.base_model_id = fallback_model_id
|
||||
else:
|
||||
raise Exception("Model not found")
|
||||
else:
|
||||
raise Exception("Model not found")
|
||||
|
||||
# Chat Params
|
||||
stream_delta_chunk_size = form_data.get("params", {}).get(
|
||||
"stream_delta_chunk_size"
|
||||
|
|
@ -1575,6 +1606,8 @@ async def chat_completion(
|
|||
"user_id": user.id,
|
||||
"chat_id": form_data.pop("chat_id", None),
|
||||
"message_id": form_data.pop("id", None),
|
||||
"parent_message": form_data.pop("parent_message", None),
|
||||
"parent_message_id": form_data.pop("parent_id", None),
|
||||
"session_id": form_data.pop("session_id", None),
|
||||
"filter_ids": form_data.pop("filter_ids", []),
|
||||
"tool_ids": form_data.get("tool_ids", None),
|
||||
|
|
@ -1598,15 +1631,38 @@ async def chat_completion(
|
|||
},
|
||||
}
|
||||
|
||||
if metadata.get("chat_id") and (user and user.role != "admin"):
|
||||
if not metadata["chat_id"].startswith("local:"):
|
||||
if metadata.get("chat_id") and user:
|
||||
if not metadata["chat_id"].startswith(
|
||||
"local:"
|
||||
): # temporary chats are not stored
|
||||
|
||||
# Verify chat ownership
|
||||
chat = Chats.get_chat_by_id_and_user_id(metadata["chat_id"], user.id)
|
||||
if chat is None:
|
||||
if chat is None and user.role != "admin": # admins can access any chat
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=ERROR_MESSAGES.DEFAULT(),
|
||||
)
|
||||
|
||||
# Insert chat files from parent message if any
|
||||
parent_message = metadata.get("parent_message", {})
|
||||
parent_message_files = parent_message.get("files", [])
|
||||
if parent_message_files:
|
||||
try:
|
||||
Chats.insert_chat_files(
|
||||
metadata["chat_id"],
|
||||
parent_message.get("id"),
|
||||
[
|
||||
file_item.get("id")
|
||||
for file_item in parent_message_files
|
||||
if file_item.get("type") == "file"
|
||||
],
|
||||
user.id,
|
||||
)
|
||||
except Exception as e:
|
||||
log.debug(f"Error inserting chat files: {e}")
|
||||
pass
|
||||
|
||||
request.state.metadata = metadata
|
||||
form_data["metadata"] = metadata
|
||||
|
||||
|
|
@ -1631,6 +1687,7 @@ async def chat_completion(
|
|||
metadata["chat_id"],
|
||||
metadata["message_id"],
|
||||
{
|
||||
"parentId": metadata.get("parent_message_id", None),
|
||||
"model": model_id,
|
||||
},
|
||||
)
|
||||
|
|
@ -1663,6 +1720,7 @@ async def chat_completion(
|
|||
metadata["chat_id"],
|
||||
metadata["message_id"],
|
||||
{
|
||||
"parentId": metadata.get("parent_message_id", None),
|
||||
"error": {"content": str(e)},
|
||||
},
|
||||
)
|
||||
|
|
@ -1839,9 +1897,11 @@ async def get_app_config(request: Request):
|
|||
"enable_login_form": app.state.config.ENABLE_LOGIN_FORM,
|
||||
"enable_websocket": ENABLE_WEBSOCKET_SUPPORT,
|
||||
"enable_version_update_check": ENABLE_VERSION_UPDATE_CHECK,
|
||||
"enable_public_active_users_count": ENABLE_PUBLIC_ACTIVE_USERS_COUNT,
|
||||
**(
|
||||
{
|
||||
"enable_direct_connections": app.state.config.ENABLE_DIRECT_CONNECTIONS,
|
||||
"enable_folders": app.state.config.ENABLE_FOLDERS,
|
||||
"enable_channels": app.state.config.ENABLE_CHANNELS,
|
||||
"enable_notes": app.state.config.ENABLE_NOTES,
|
||||
"enable_web_search": app.state.config.ENABLE_WEB_SEARCH,
|
||||
|
|
@ -2014,7 +2074,19 @@ async def get_current_usage(user=Depends(get_verified_user)):
|
|||
This is an experimental endpoint and subject to change.
|
||||
"""
|
||||
try:
|
||||
return {"model_ids": get_models_in_use(), "user_ids": get_active_user_ids()}
|
||||
# If public visibility is disabled, only allow admins to access this endpoint
|
||||
if not ENABLE_PUBLIC_ACTIVE_USERS_COUNT and user.role != "admin":
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Access denied. Only administrators can view usage statistics.",
|
||||
)
|
||||
|
||||
return {
|
||||
"model_ids": get_models_in_use(),
|
||||
"user_count": Users.get_active_user_count(),
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
log.error(f"Error getting usage statistics: {e}")
|
||||
raise HTTPException(status_code=500, detail="Internal Server Error")
|
||||
|
|
@ -2077,7 +2149,7 @@ except Exception as e:
|
|||
)
|
||||
|
||||
|
||||
async def register_client(self, request, client_id: str) -> bool:
|
||||
async def register_client(request, client_id: str) -> bool:
|
||||
server_type, server_id = client_id.split(":", 1)
|
||||
|
||||
connection = None
|
||||
|
|
|
|||
|
|
@ -0,0 +1,103 @@
|
|||
"""Update messages and channel member table
|
||||
|
||||
Revision ID: 2f1211949ecc
|
||||
Revises: 37f288994c47
|
||||
Create Date: 2025-11-27 03:07:56.200231
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import open_webui.internal.db
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "2f1211949ecc"
|
||||
down_revision: Union[str, None] = "37f288994c47"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# New columns to be added to channel_member table
|
||||
op.add_column("channel_member", sa.Column("status", sa.Text(), nullable=True))
|
||||
op.add_column(
|
||||
"channel_member",
|
||||
sa.Column(
|
||||
"is_active",
|
||||
sa.Boolean(),
|
||||
nullable=False,
|
||||
default=True,
|
||||
server_default=sa.sql.expression.true(),
|
||||
),
|
||||
)
|
||||
|
||||
op.add_column(
|
||||
"channel_member",
|
||||
sa.Column(
|
||||
"is_channel_muted",
|
||||
sa.Boolean(),
|
||||
nullable=False,
|
||||
default=False,
|
||||
server_default=sa.sql.expression.false(),
|
||||
),
|
||||
)
|
||||
op.add_column(
|
||||
"channel_member",
|
||||
sa.Column(
|
||||
"is_channel_pinned",
|
||||
sa.Boolean(),
|
||||
nullable=False,
|
||||
default=False,
|
||||
server_default=sa.sql.expression.false(),
|
||||
),
|
||||
)
|
||||
|
||||
op.add_column("channel_member", sa.Column("data", sa.JSON(), nullable=True))
|
||||
op.add_column("channel_member", sa.Column("meta", sa.JSON(), nullable=True))
|
||||
|
||||
op.add_column(
|
||||
"channel_member", sa.Column("joined_at", sa.BigInteger(), nullable=False)
|
||||
)
|
||||
op.add_column(
|
||||
"channel_member", sa.Column("left_at", sa.BigInteger(), nullable=True)
|
||||
)
|
||||
|
||||
op.add_column(
|
||||
"channel_member", sa.Column("last_read_at", sa.BigInteger(), nullable=True)
|
||||
)
|
||||
|
||||
op.add_column(
|
||||
"channel_member", sa.Column("updated_at", sa.BigInteger(), nullable=True)
|
||||
)
|
||||
|
||||
# New columns to be added to message table
|
||||
op.add_column(
|
||||
"message",
|
||||
sa.Column(
|
||||
"is_pinned",
|
||||
sa.Boolean(),
|
||||
nullable=False,
|
||||
default=False,
|
||||
server_default=sa.sql.expression.false(),
|
||||
),
|
||||
)
|
||||
op.add_column("message", sa.Column("pinned_at", sa.BigInteger(), nullable=True))
|
||||
op.add_column("message", sa.Column("pinned_by", sa.Text(), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_column("channel_member", "updated_at")
|
||||
op.drop_column("channel_member", "last_read_at")
|
||||
|
||||
op.drop_column("channel_member", "meta")
|
||||
op.drop_column("channel_member", "data")
|
||||
|
||||
op.drop_column("channel_member", "is_channel_pinned")
|
||||
op.drop_column("channel_member", "is_channel_muted")
|
||||
|
||||
op.drop_column("message", "pinned_by")
|
||||
op.drop_column("message", "pinned_at")
|
||||
op.drop_column("message", "is_pinned")
|
||||
|
|
@ -20,18 +20,46 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Ensure 'id' column in 'user' table is unique and primary key (ForeignKey constraint)
|
||||
inspector = sa.inspect(op.get_bind())
|
||||
columns = inspector.get_columns("user")
|
||||
|
||||
pk_columns = inspector.get_pk_constraint("user")["constrained_columns"]
|
||||
id_column = next((col for col in columns if col["name"] == "id"), None)
|
||||
|
||||
if id_column and not id_column.get("unique", False):
|
||||
unique_constraints = inspector.get_unique_constraints("user")
|
||||
unique_columns = {tuple(u["column_names"]) for u in unique_constraints}
|
||||
|
||||
with op.batch_alter_table("user") as batch_op:
|
||||
# If primary key is wrong, drop it
|
||||
if pk_columns and pk_columns != ["id"]:
|
||||
batch_op.drop_constraint(
|
||||
inspector.get_pk_constraint("user")["name"], type_="primary"
|
||||
)
|
||||
|
||||
# Add unique constraint if missing
|
||||
if ("id",) not in unique_columns:
|
||||
batch_op.create_unique_constraint("uq_user_id", ["id"])
|
||||
|
||||
# Re-create correct primary key
|
||||
batch_op.create_primary_key("pk_user_id", ["id"])
|
||||
|
||||
# Create oauth_session table
|
||||
op.create_table(
|
||||
"oauth_session",
|
||||
sa.Column("id", sa.Text(), nullable=False),
|
||||
sa.Column("user_id", sa.Text(), nullable=False),
|
||||
sa.Column("id", sa.Text(), primary_key=True, nullable=False, unique=True),
|
||||
sa.Column(
|
||||
"user_id",
|
||||
sa.Text(),
|
||||
sa.ForeignKey("user.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column("provider", sa.Text(), nullable=False),
|
||||
sa.Column("token", sa.Text(), nullable=False),
|
||||
sa.Column("expires_at", sa.BigInteger(), nullable=False),
|
||||
sa.Column("created_at", sa.BigInteger(), nullable=False),
|
||||
sa.Column("updated_at", sa.BigInteger(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.ForeignKeyConstraint(["user_id"], ["user.id"], ondelete="CASCADE"),
|
||||
)
|
||||
|
||||
# Create indexes for better performance
|
||||
|
|
|
|||
|
|
@ -0,0 +1,169 @@
|
|||
"""Add knowledge_file table
|
||||
|
||||
Revision ID: 3e0e00844bb0
|
||||
Revises: 90ef40d4714e
|
||||
Create Date: 2025-12-02 06:54:19.401334
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import inspect
|
||||
import open_webui.internal.db
|
||||
|
||||
import time
|
||||
import json
|
||||
import uuid
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "3e0e00844bb0"
|
||||
down_revision: Union[str, None] = "90ef40d4714e"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table(
|
||||
"knowledge_file",
|
||||
sa.Column("id", sa.Text(), primary_key=True),
|
||||
sa.Column("user_id", sa.Text(), nullable=False),
|
||||
sa.Column(
|
||||
"knowledge_id",
|
||||
sa.Text(),
|
||||
sa.ForeignKey("knowledge.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column(
|
||||
"file_id",
|
||||
sa.Text(),
|
||||
sa.ForeignKey("file.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column("created_at", sa.BigInteger(), nullable=False),
|
||||
sa.Column("updated_at", sa.BigInteger(), nullable=False),
|
||||
# indexes
|
||||
sa.Index("ix_knowledge_file_knowledge_id", "knowledge_id"),
|
||||
sa.Index("ix_knowledge_file_file_id", "file_id"),
|
||||
sa.Index("ix_knowledge_file_user_id", "user_id"),
|
||||
# unique constraints
|
||||
sa.UniqueConstraint(
|
||||
"knowledge_id", "file_id", name="uq_knowledge_file_knowledge_file"
|
||||
), # prevent duplicate entries
|
||||
)
|
||||
|
||||
connection = op.get_bind()
|
||||
|
||||
# 2. Read existing group with user_ids JSON column
|
||||
knowledge_table = sa.Table(
|
||||
"knowledge",
|
||||
sa.MetaData(),
|
||||
sa.Column("id", sa.Text()),
|
||||
sa.Column("user_id", sa.Text()),
|
||||
sa.Column("data", sa.JSON()), # JSON stored as text in SQLite + PG
|
||||
)
|
||||
|
||||
results = connection.execute(
|
||||
sa.select(
|
||||
knowledge_table.c.id, knowledge_table.c.user_id, knowledge_table.c.data
|
||||
)
|
||||
).fetchall()
|
||||
|
||||
# 3. Insert members into group_member table
|
||||
kf_table = sa.Table(
|
||||
"knowledge_file",
|
||||
sa.MetaData(),
|
||||
sa.Column("id", sa.Text()),
|
||||
sa.Column("user_id", sa.Text()),
|
||||
sa.Column("knowledge_id", sa.Text()),
|
||||
sa.Column("file_id", sa.Text()),
|
||||
sa.Column("created_at", sa.BigInteger()),
|
||||
sa.Column("updated_at", sa.BigInteger()),
|
||||
)
|
||||
|
||||
file_table = sa.Table(
|
||||
"file",
|
||||
sa.MetaData(),
|
||||
sa.Column("id", sa.Text()),
|
||||
)
|
||||
|
||||
now = int(time.time())
|
||||
for knowledge_id, user_id, data in results:
|
||||
if not data:
|
||||
continue
|
||||
|
||||
if isinstance(data, str):
|
||||
try:
|
||||
data = json.loads(data)
|
||||
except Exception:
|
||||
continue # skip invalid JSON
|
||||
|
||||
if not isinstance(data, dict):
|
||||
continue
|
||||
|
||||
file_ids = data.get("file_ids", [])
|
||||
|
||||
for file_id in file_ids:
|
||||
file_exists = connection.execute(
|
||||
sa.select(file_table.c.id).where(file_table.c.id == file_id)
|
||||
).fetchone()
|
||||
|
||||
if not file_exists:
|
||||
continue # skip non-existing files
|
||||
|
||||
row = {
|
||||
"id": str(uuid.uuid4()),
|
||||
"user_id": user_id,
|
||||
"knowledge_id": knowledge_id,
|
||||
"file_id": file_id,
|
||||
"created_at": now,
|
||||
"updated_at": now,
|
||||
}
|
||||
connection.execute(kf_table.insert().values(**row))
|
||||
|
||||
with op.batch_alter_table("knowledge") as batch:
|
||||
batch.drop_column("data")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# 1. Add back the old data column
|
||||
op.add_column("knowledge", sa.Column("data", sa.JSON(), nullable=True))
|
||||
|
||||
connection = op.get_bind()
|
||||
|
||||
# 2. Read knowledge_file entries and reconstruct data JSON
|
||||
knowledge_table = sa.Table(
|
||||
"knowledge",
|
||||
sa.MetaData(),
|
||||
sa.Column("id", sa.Text()),
|
||||
sa.Column("data", sa.JSON()),
|
||||
)
|
||||
|
||||
kf_table = sa.Table(
|
||||
"knowledge_file",
|
||||
sa.MetaData(),
|
||||
sa.Column("id", sa.Text()),
|
||||
sa.Column("knowledge_id", sa.Text()),
|
||||
sa.Column("file_id", sa.Text()),
|
||||
)
|
||||
|
||||
results = connection.execute(sa.select(knowledge_table.c.id)).fetchall()
|
||||
|
||||
for (knowledge_id,) in results:
|
||||
file_ids = connection.execute(
|
||||
sa.select(kf_table.c.file_id).where(kf_table.c.knowledge_id == knowledge_id)
|
||||
).fetchall()
|
||||
|
||||
file_ids_list = [fid for (fid,) in file_ids]
|
||||
|
||||
data_json = {"file_ids": file_ids_list}
|
||||
|
||||
connection.execute(
|
||||
knowledge_table.update()
|
||||
.where(knowledge_table.c.id == knowledge_id)
|
||||
.values(data=data_json)
|
||||
)
|
||||
|
||||
# 3. Drop the knowledge_file table
|
||||
op.drop_table("knowledge_file")
|
||||
|
|
@ -0,0 +1,54 @@
|
|||
"""Add channel file table
|
||||
|
||||
Revision ID: 6283dc0e4d8d
|
||||
Revises: 3e0e00844bb0
|
||||
Create Date: 2025-12-10 15:11:39.424601
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import open_webui.internal.db
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "6283dc0e4d8d"
|
||||
down_revision: Union[str, None] = "3e0e00844bb0"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table(
|
||||
"channel_file",
|
||||
sa.Column("id", sa.Text(), primary_key=True),
|
||||
sa.Column("user_id", sa.Text(), nullable=False),
|
||||
sa.Column(
|
||||
"channel_id",
|
||||
sa.Text(),
|
||||
sa.ForeignKey("channel.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column(
|
||||
"file_id",
|
||||
sa.Text(),
|
||||
sa.ForeignKey("file.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column("created_at", sa.BigInteger(), nullable=False),
|
||||
sa.Column("updated_at", sa.BigInteger(), nullable=False),
|
||||
# indexes
|
||||
sa.Index("ix_channel_file_channel_id", "channel_id"),
|
||||
sa.Index("ix_channel_file_file_id", "file_id"),
|
||||
sa.Index("ix_channel_file_user_id", "user_id"),
|
||||
# unique constraints
|
||||
sa.UniqueConstraint(
|
||||
"channel_id", "file_id", name="uq_channel_file_channel_file"
|
||||
), # prevent duplicate entries
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_table("channel_file")
|
||||
|
|
@ -0,0 +1,49 @@
|
|||
"""Update channel file and knowledge table
|
||||
|
||||
Revision ID: 81cc2ce44d79
|
||||
Revises: 6283dc0e4d8d
|
||||
Create Date: 2025-12-10 16:07:58.001282
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import open_webui.internal.db
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "81cc2ce44d79"
|
||||
down_revision: Union[str, None] = "6283dc0e4d8d"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Add message_id column to channel_file table
|
||||
with op.batch_alter_table("channel_file", schema=None) as batch_op:
|
||||
batch_op.add_column(
|
||||
sa.Column(
|
||||
"message_id",
|
||||
sa.Text(),
|
||||
sa.ForeignKey(
|
||||
"message.id", ondelete="CASCADE", name="fk_channel_file_message_id"
|
||||
),
|
||||
nullable=True,
|
||||
)
|
||||
)
|
||||
|
||||
# Add data column to knowledge table
|
||||
with op.batch_alter_table("knowledge", schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column("data", sa.JSON(), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Remove message_id column from channel_file table
|
||||
with op.batch_alter_table("channel_file", schema=None) as batch_op:
|
||||
batch_op.drop_column("message_id")
|
||||
|
||||
# Remove data column from knowledge table
|
||||
with op.batch_alter_table("knowledge", schema=None) as batch_op:
|
||||
batch_op.drop_column("data")
|
||||
|
|
@ -0,0 +1,81 @@
|
|||
"""Update channel and channel members table
|
||||
|
||||
Revision ID: 90ef40d4714e
|
||||
Revises: b10670c03dd5
|
||||
Create Date: 2025-11-30 06:33:38.790341
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
import open_webui.internal.db
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "90ef40d4714e"
|
||||
down_revision: Union[str, None] = "b10670c03dd5"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Update 'channel' table
|
||||
op.add_column("channel", sa.Column("is_private", sa.Boolean(), nullable=True))
|
||||
|
||||
op.add_column("channel", sa.Column("archived_at", sa.BigInteger(), nullable=True))
|
||||
op.add_column("channel", sa.Column("archived_by", sa.Text(), nullable=True))
|
||||
|
||||
op.add_column("channel", sa.Column("deleted_at", sa.BigInteger(), nullable=True))
|
||||
op.add_column("channel", sa.Column("deleted_by", sa.Text(), nullable=True))
|
||||
|
||||
op.add_column("channel", sa.Column("updated_by", sa.Text(), nullable=True))
|
||||
|
||||
# Update 'channel_member' table
|
||||
op.add_column("channel_member", sa.Column("role", sa.Text(), nullable=True))
|
||||
op.add_column("channel_member", sa.Column("invited_by", sa.Text(), nullable=True))
|
||||
op.add_column(
|
||||
"channel_member", sa.Column("invited_at", sa.BigInteger(), nullable=True)
|
||||
)
|
||||
|
||||
# Create 'channel_webhook' table
|
||||
op.create_table(
|
||||
"channel_webhook",
|
||||
sa.Column("id", sa.Text(), primary_key=True, unique=True, nullable=False),
|
||||
sa.Column("user_id", sa.Text(), nullable=False),
|
||||
sa.Column(
|
||||
"channel_id",
|
||||
sa.Text(),
|
||||
sa.ForeignKey("channel.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column("name", sa.Text(), nullable=False),
|
||||
sa.Column("profile_image_url", sa.Text(), nullable=True),
|
||||
sa.Column("token", sa.Text(), nullable=False),
|
||||
sa.Column("last_used_at", sa.BigInteger(), nullable=True),
|
||||
sa.Column("created_at", sa.BigInteger(), nullable=False),
|
||||
sa.Column("updated_at", sa.BigInteger(), nullable=False),
|
||||
)
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Downgrade 'channel' table
|
||||
op.drop_column("channel", "is_private")
|
||||
op.drop_column("channel", "archived_at")
|
||||
op.drop_column("channel", "archived_by")
|
||||
op.drop_column("channel", "deleted_at")
|
||||
op.drop_column("channel", "deleted_by")
|
||||
op.drop_column("channel", "updated_by")
|
||||
|
||||
# Downgrade 'channel_member' table
|
||||
op.drop_column("channel_member", "role")
|
||||
op.drop_column("channel_member", "invited_by")
|
||||
op.drop_column("channel_member", "invited_at")
|
||||
|
||||
# Drop 'channel_webhook' table
|
||||
op.drop_table("channel_webhook")
|
||||
|
||||
pass
|
||||
|
|
@ -0,0 +1,251 @@
|
|||
"""Update user table
|
||||
|
||||
Revision ID: b10670c03dd5
|
||||
Revises: 2f1211949ecc
|
||||
Create Date: 2025-11-28 04:55:31.737538
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
import open_webui.internal.db
|
||||
import json
|
||||
import time
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "b10670c03dd5"
|
||||
down_revision: Union[str, None] = "2f1211949ecc"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def _drop_sqlite_indexes_for_column(table_name, column_name, conn):
|
||||
"""
|
||||
SQLite requires manual removal of any indexes referencing a column
|
||||
before ALTER TABLE ... DROP COLUMN can succeed.
|
||||
"""
|
||||
indexes = conn.execute(sa.text(f"PRAGMA index_list('{table_name}')")).fetchall()
|
||||
|
||||
for idx in indexes:
|
||||
index_name = idx[1] # index name
|
||||
# Get indexed columns
|
||||
idx_info = conn.execute(
|
||||
sa.text(f"PRAGMA index_info('{index_name}')")
|
||||
).fetchall()
|
||||
|
||||
indexed_cols = [row[2] for row in idx_info] # col names
|
||||
if column_name in indexed_cols:
|
||||
conn.execute(sa.text(f"DROP INDEX IF EXISTS {index_name}"))
|
||||
|
||||
|
||||
def _convert_column_to_json(table: str, column: str):
|
||||
conn = op.get_bind()
|
||||
dialect = conn.dialect.name
|
||||
|
||||
# SQLite cannot ALTER COLUMN → must recreate column
|
||||
if dialect == "sqlite":
|
||||
# 1. Add temporary column
|
||||
op.add_column(table, sa.Column(f"{column}_json", sa.JSON(), nullable=True))
|
||||
|
||||
# 2. Load old data
|
||||
rows = conn.execute(sa.text(f'SELECT id, {column} FROM "{table}"')).fetchall()
|
||||
|
||||
for row in rows:
|
||||
uid, raw = row
|
||||
if raw is None:
|
||||
parsed = None
|
||||
else:
|
||||
try:
|
||||
parsed = json.loads(raw)
|
||||
except Exception:
|
||||
parsed = None # fallback safe behavior
|
||||
|
||||
conn.execute(
|
||||
sa.text(f'UPDATE "{table}" SET {column}_json = :val WHERE id = :id'),
|
||||
{"val": json.dumps(parsed) if parsed else None, "id": uid},
|
||||
)
|
||||
|
||||
# 3. Drop old TEXT column
|
||||
op.drop_column(table, column)
|
||||
|
||||
# 4. Rename new JSON column → original name
|
||||
op.alter_column(table, f"{column}_json", new_column_name=column)
|
||||
|
||||
else:
|
||||
# PostgreSQL supports direct CAST
|
||||
op.alter_column(
|
||||
table,
|
||||
column,
|
||||
type_=sa.JSON(),
|
||||
postgresql_using=f"{column}::json",
|
||||
)
|
||||
|
||||
|
||||
def _convert_column_to_text(table: str, column: str):
|
||||
conn = op.get_bind()
|
||||
dialect = conn.dialect.name
|
||||
|
||||
if dialect == "sqlite":
|
||||
op.add_column(table, sa.Column(f"{column}_text", sa.Text(), nullable=True))
|
||||
|
||||
rows = conn.execute(sa.text(f'SELECT id, {column} FROM "{table}"')).fetchall()
|
||||
|
||||
for uid, raw in rows:
|
||||
conn.execute(
|
||||
sa.text(f'UPDATE "{table}" SET {column}_text = :val WHERE id = :id'),
|
||||
{"val": json.dumps(raw) if raw else None, "id": uid},
|
||||
)
|
||||
|
||||
op.drop_column(table, column)
|
||||
op.alter_column(table, f"{column}_text", new_column_name=column)
|
||||
|
||||
else:
|
||||
op.alter_column(
|
||||
table,
|
||||
column,
|
||||
type_=sa.Text(),
|
||||
postgresql_using=f"to_json({column})::text",
|
||||
)
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.add_column(
|
||||
"user", sa.Column("profile_banner_image_url", sa.Text(), nullable=True)
|
||||
)
|
||||
op.add_column("user", sa.Column("timezone", sa.String(), nullable=True))
|
||||
|
||||
op.add_column("user", sa.Column("presence_state", sa.String(), nullable=True))
|
||||
op.add_column("user", sa.Column("status_emoji", sa.String(), nullable=True))
|
||||
op.add_column("user", sa.Column("status_message", sa.Text(), nullable=True))
|
||||
op.add_column(
|
||||
"user", sa.Column("status_expires_at", sa.BigInteger(), nullable=True)
|
||||
)
|
||||
|
||||
op.add_column("user", sa.Column("oauth", sa.JSON(), nullable=True))
|
||||
|
||||
# Convert info (TEXT/JSONField) → JSON
|
||||
_convert_column_to_json("user", "info")
|
||||
# Convert settings (TEXT/JSONField) → JSON
|
||||
_convert_column_to_json("user", "settings")
|
||||
|
||||
op.create_table(
|
||||
"api_key",
|
||||
sa.Column("id", sa.Text(), primary_key=True, unique=True),
|
||||
sa.Column("user_id", sa.Text(), sa.ForeignKey("user.id", ondelete="CASCADE")),
|
||||
sa.Column("key", sa.Text(), unique=True, nullable=False),
|
||||
sa.Column("data", sa.JSON(), nullable=True),
|
||||
sa.Column("expires_at", sa.BigInteger(), nullable=True),
|
||||
sa.Column("last_used_at", sa.BigInteger(), nullable=True),
|
||||
sa.Column("created_at", sa.BigInteger(), nullable=False),
|
||||
sa.Column("updated_at", sa.BigInteger(), nullable=False),
|
||||
)
|
||||
|
||||
conn = op.get_bind()
|
||||
users = conn.execute(
|
||||
sa.text('SELECT id, oauth_sub FROM "user" WHERE oauth_sub IS NOT NULL')
|
||||
).fetchall()
|
||||
|
||||
for uid, oauth_sub in users:
|
||||
if oauth_sub:
|
||||
# Example formats supported:
|
||||
# provider@sub
|
||||
# plain sub (stored as {"oidc": {"sub": sub}})
|
||||
if "@" in oauth_sub:
|
||||
provider, sub = oauth_sub.split("@", 1)
|
||||
else:
|
||||
provider, sub = "oidc", oauth_sub
|
||||
|
||||
oauth_json = json.dumps({provider: {"sub": sub}})
|
||||
conn.execute(
|
||||
sa.text('UPDATE "user" SET oauth = :oauth WHERE id = :id'),
|
||||
{"oauth": oauth_json, "id": uid},
|
||||
)
|
||||
|
||||
users_with_keys = conn.execute(
|
||||
sa.text('SELECT id, api_key FROM "user" WHERE api_key IS NOT NULL')
|
||||
).fetchall()
|
||||
now = int(time.time())
|
||||
|
||||
for uid, api_key in users_with_keys:
|
||||
if api_key:
|
||||
conn.execute(
|
||||
sa.text(
|
||||
"""
|
||||
INSERT INTO api_key (id, user_id, key, created_at, updated_at)
|
||||
VALUES (:id, :user_id, :key, :created_at, :updated_at)
|
||||
"""
|
||||
),
|
||||
{
|
||||
"id": f"key_{uid}",
|
||||
"user_id": uid,
|
||||
"key": api_key,
|
||||
"created_at": now,
|
||||
"updated_at": now,
|
||||
},
|
||||
)
|
||||
|
||||
if conn.dialect.name == "sqlite":
|
||||
_drop_sqlite_indexes_for_column("user", "api_key", conn)
|
||||
_drop_sqlite_indexes_for_column("user", "oauth_sub", conn)
|
||||
|
||||
with op.batch_alter_table("user") as batch_op:
|
||||
batch_op.drop_column("api_key")
|
||||
batch_op.drop_column("oauth_sub")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# --- 1. Restore old oauth_sub column ---
|
||||
op.add_column("user", sa.Column("oauth_sub", sa.Text(), nullable=True))
|
||||
|
||||
conn = op.get_bind()
|
||||
users = conn.execute(
|
||||
sa.text('SELECT id, oauth FROM "user" WHERE oauth IS NOT NULL')
|
||||
).fetchall()
|
||||
|
||||
for uid, oauth in users:
|
||||
try:
|
||||
data = json.loads(oauth)
|
||||
provider = list(data.keys())[0]
|
||||
sub = data[provider].get("sub")
|
||||
oauth_sub = f"{provider}@{sub}"
|
||||
except Exception:
|
||||
oauth_sub = None
|
||||
|
||||
conn.execute(
|
||||
sa.text('UPDATE "user" SET oauth_sub = :oauth_sub WHERE id = :id'),
|
||||
{"oauth_sub": oauth_sub, "id": uid},
|
||||
)
|
||||
|
||||
op.drop_column("user", "oauth")
|
||||
|
||||
# --- 2. Restore api_key field ---
|
||||
op.add_column("user", sa.Column("api_key", sa.String(), nullable=True))
|
||||
|
||||
# Restore values from api_key
|
||||
keys = conn.execute(sa.text("SELECT user_id, key FROM api_key")).fetchall()
|
||||
for uid, key in keys:
|
||||
conn.execute(
|
||||
sa.text('UPDATE "user" SET api_key = :key WHERE id = :id'),
|
||||
{"key": key, "id": uid},
|
||||
)
|
||||
|
||||
# Drop new table
|
||||
op.drop_table("api_key")
|
||||
|
||||
with op.batch_alter_table("user") as batch_op:
|
||||
batch_op.drop_column("profile_banner_image_url")
|
||||
batch_op.drop_column("timezone")
|
||||
|
||||
batch_op.drop_column("presence_state")
|
||||
batch_op.drop_column("status_emoji")
|
||||
batch_op.drop_column("status_message")
|
||||
batch_op.drop_column("status_expires_at")
|
||||
|
||||
# Convert info (JSON) → TEXT
|
||||
_convert_column_to_text("user", "info")
|
||||
# Convert settings (JSON) → TEXT
|
||||
_convert_column_to_text("user", "settings")
|
||||
|
|
@ -0,0 +1,57 @@
|
|||
"""Add chat_file table
|
||||
|
||||
Revision ID: c440947495f3
|
||||
Revises: 81cc2ce44d79
|
||||
Create Date: 2025-12-21 20:27:41.694897
|
||||
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "c440947495f3"
|
||||
down_revision: Union[str, None] = "81cc2ce44d79"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table(
|
||||
"chat_file",
|
||||
sa.Column("id", sa.Text(), primary_key=True),
|
||||
sa.Column("user_id", sa.Text(), nullable=False),
|
||||
sa.Column(
|
||||
"chat_id",
|
||||
sa.Text(),
|
||||
sa.ForeignKey("chat.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column(
|
||||
"file_id",
|
||||
sa.Text(),
|
||||
sa.ForeignKey("file.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column("message_id", sa.Text(), nullable=True),
|
||||
sa.Column("created_at", sa.BigInteger(), nullable=False),
|
||||
sa.Column("updated_at", sa.BigInteger(), nullable=False),
|
||||
# indexes
|
||||
sa.Index("ix_chat_file_chat_id", "chat_id"),
|
||||
sa.Index("ix_chat_file_file_id", "file_id"),
|
||||
sa.Index("ix_chat_file_message_id", "message_id"),
|
||||
sa.Index("ix_chat_file_user_id", "user_id"),
|
||||
# unique constraints
|
||||
sa.UniqueConstraint(
|
||||
"chat_id", "file_id", name="uq_chat_file_chat_file"
|
||||
), # prevent duplicate entries
|
||||
)
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_table("chat_file")
|
||||
pass
|
||||
|
|
@ -3,13 +3,11 @@ import uuid
|
|||
from typing import Optional
|
||||
|
||||
from open_webui.internal.db import Base, get_db
|
||||
from open_webui.models.users import UserModel, Users
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from open_webui.models.users import UserModel, UserProfileImageResponse, Users
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy import Boolean, Column, String, Text
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
####################
|
||||
# DB MODEL
|
||||
|
|
@ -46,15 +44,7 @@ class ApiKey(BaseModel):
|
|||
api_key: Optional[str] = None
|
||||
|
||||
|
||||
class UserResponse(BaseModel):
|
||||
id: str
|
||||
email: str
|
||||
name: str
|
||||
role: str
|
||||
profile_image_url: str
|
||||
|
||||
|
||||
class SigninResponse(Token, UserResponse):
|
||||
class SigninResponse(Token, UserProfileImageResponse):
|
||||
pass
|
||||
|
||||
|
||||
|
|
@ -96,7 +86,7 @@ class AuthsTable:
|
|||
name: str,
|
||||
profile_image_url: str = "/user.png",
|
||||
role: str = "pending",
|
||||
oauth_sub: Optional[str] = None,
|
||||
oauth: Optional[dict] = None,
|
||||
) -> Optional[UserModel]:
|
||||
with get_db() as db:
|
||||
log.info("insert_new_auth")
|
||||
|
|
@ -110,7 +100,7 @@ class AuthsTable:
|
|||
db.add(result)
|
||||
|
||||
user = Users.insert_new_user(
|
||||
id, name, email, profile_image_url, role, oauth_sub
|
||||
id, name, email, profile_image_url, role, oauth=oauth
|
||||
)
|
||||
|
||||
db.commit()
|
||||
|
|
|
|||
|
|
@ -4,10 +4,24 @@ import uuid
|
|||
from typing import Optional
|
||||
|
||||
from open_webui.internal.db import Base, get_db
|
||||
from open_webui.utils.access_control import has_access
|
||||
from open_webui.models.groups import Groups
|
||||
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Boolean, Column, String, Text, JSON
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
|
||||
|
||||
from sqlalchemy import (
|
||||
BigInteger,
|
||||
Boolean,
|
||||
Column,
|
||||
ForeignKey,
|
||||
String,
|
||||
Text,
|
||||
JSON,
|
||||
UniqueConstraint,
|
||||
case,
|
||||
cast,
|
||||
)
|
||||
from sqlalchemy import or_, func, select, and_, text
|
||||
from sqlalchemy.sql import exists
|
||||
|
||||
|
|
@ -26,12 +40,23 @@ class Channel(Base):
|
|||
name = Column(Text)
|
||||
description = Column(Text, nullable=True)
|
||||
|
||||
# Used to indicate if the channel is private (for 'group' type channels)
|
||||
is_private = Column(Boolean, nullable=True)
|
||||
|
||||
data = Column(JSON, nullable=True)
|
||||
meta = Column(JSON, nullable=True)
|
||||
access_control = Column(JSON, nullable=True)
|
||||
|
||||
created_at = Column(BigInteger)
|
||||
|
||||
updated_at = Column(BigInteger)
|
||||
updated_by = Column(Text, nullable=True)
|
||||
|
||||
archived_at = Column(BigInteger, nullable=True)
|
||||
archived_by = Column(Text, nullable=True)
|
||||
|
||||
deleted_at = Column(BigInteger, nullable=True)
|
||||
deleted_by = Column(Text, nullable=True)
|
||||
|
||||
|
||||
class ChannelModel(BaseModel):
|
||||
|
|
@ -39,17 +64,157 @@ class ChannelModel(BaseModel):
|
|||
|
||||
id: str
|
||||
user_id: str
|
||||
|
||||
type: Optional[str] = None
|
||||
|
||||
name: str
|
||||
description: Optional[str] = None
|
||||
|
||||
is_private: Optional[bool] = None
|
||||
|
||||
data: Optional[dict] = None
|
||||
meta: Optional[dict] = None
|
||||
access_control: Optional[dict] = None
|
||||
|
||||
created_at: int # timestamp in epoch
|
||||
updated_at: int # timestamp in epoch
|
||||
created_at: int # timestamp in epoch (time_ns)
|
||||
|
||||
updated_at: int # timestamp in epoch (time_ns)
|
||||
updated_by: Optional[str] = None
|
||||
|
||||
archived_at: Optional[int] = None # timestamp in epoch (time_ns)
|
||||
archived_by: Optional[str] = None
|
||||
|
||||
deleted_at: Optional[int] = None # timestamp in epoch (time_ns)
|
||||
deleted_by: Optional[str] = None
|
||||
|
||||
|
||||
class ChannelMember(Base):
|
||||
__tablename__ = "channel_member"
|
||||
|
||||
id = Column(Text, primary_key=True, unique=True)
|
||||
channel_id = Column(Text, nullable=False)
|
||||
user_id = Column(Text, nullable=False)
|
||||
|
||||
role = Column(Text, nullable=True)
|
||||
status = Column(Text, nullable=True)
|
||||
|
||||
is_active = Column(Boolean, nullable=False, default=True)
|
||||
|
||||
is_channel_muted = Column(Boolean, nullable=False, default=False)
|
||||
is_channel_pinned = Column(Boolean, nullable=False, default=False)
|
||||
|
||||
data = Column(JSON, nullable=True)
|
||||
meta = Column(JSON, nullable=True)
|
||||
|
||||
invited_at = Column(BigInteger, nullable=True)
|
||||
invited_by = Column(Text, nullable=True)
|
||||
|
||||
joined_at = Column(BigInteger)
|
||||
left_at = Column(BigInteger, nullable=True)
|
||||
|
||||
last_read_at = Column(BigInteger, nullable=True)
|
||||
|
||||
created_at = Column(BigInteger)
|
||||
updated_at = Column(BigInteger)
|
||||
|
||||
|
||||
class ChannelMemberModel(BaseModel):
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
id: str
|
||||
channel_id: str
|
||||
user_id: str
|
||||
|
||||
role: Optional[str] = None
|
||||
status: Optional[str] = None
|
||||
|
||||
is_active: bool = True
|
||||
|
||||
is_channel_muted: bool = False
|
||||
is_channel_pinned: bool = False
|
||||
|
||||
data: Optional[dict] = None
|
||||
meta: Optional[dict] = None
|
||||
|
||||
invited_at: Optional[int] = None # timestamp in epoch (time_ns)
|
||||
invited_by: Optional[str] = None
|
||||
|
||||
joined_at: Optional[int] = None # timestamp in epoch (time_ns)
|
||||
left_at: Optional[int] = None # timestamp in epoch (time_ns)
|
||||
|
||||
last_read_at: Optional[int] = None # timestamp in epoch (time_ns)
|
||||
|
||||
created_at: Optional[int] = None # timestamp in epoch (time_ns)
|
||||
updated_at: Optional[int] = None # timestamp in epoch (time_ns)
|
||||
|
||||
|
||||
class ChannelFile(Base):
|
||||
__tablename__ = "channel_file"
|
||||
|
||||
id = Column(Text, unique=True, primary_key=True)
|
||||
user_id = Column(Text, nullable=False)
|
||||
|
||||
channel_id = Column(
|
||||
Text, ForeignKey("channel.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
message_id = Column(
|
||||
Text, ForeignKey("message.id", ondelete="CASCADE"), nullable=True
|
||||
)
|
||||
file_id = Column(Text, ForeignKey("file.id", ondelete="CASCADE"), nullable=False)
|
||||
|
||||
created_at = Column(BigInteger, nullable=False)
|
||||
updated_at = Column(BigInteger, nullable=False)
|
||||
|
||||
__table_args__ = (
|
||||
UniqueConstraint("channel_id", "file_id", name="uq_channel_file_channel_file"),
|
||||
)
|
||||
|
||||
|
||||
class ChannelFileModel(BaseModel):
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
id: str
|
||||
|
||||
channel_id: str
|
||||
file_id: str
|
||||
user_id: str
|
||||
|
||||
created_at: int # timestamp in epoch (time_ns)
|
||||
updated_at: int # timestamp in epoch (time_ns)
|
||||
|
||||
|
||||
class ChannelWebhook(Base):
|
||||
__tablename__ = "channel_webhook"
|
||||
|
||||
id = Column(Text, primary_key=True, unique=True)
|
||||
channel_id = Column(Text, nullable=False)
|
||||
user_id = Column(Text, nullable=False)
|
||||
|
||||
name = Column(Text, nullable=False)
|
||||
profile_image_url = Column(Text, nullable=True)
|
||||
|
||||
token = Column(Text, nullable=False)
|
||||
last_used_at = Column(BigInteger, nullable=True)
|
||||
|
||||
created_at = Column(BigInteger, nullable=False)
|
||||
updated_at = Column(BigInteger, nullable=False)
|
||||
|
||||
|
||||
class ChannelWebhookModel(BaseModel):
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
id: str
|
||||
channel_id: str
|
||||
user_id: str
|
||||
|
||||
name: str
|
||||
profile_image_url: Optional[str] = None
|
||||
|
||||
token: str
|
||||
last_used_at: Optional[int] = None # timestamp in epoch (time_ns)
|
||||
|
||||
created_at: int # timestamp in epoch (time_ns)
|
||||
updated_at: int # timestamp in epoch (time_ns)
|
||||
|
||||
|
||||
####################
|
||||
|
|
@ -58,27 +223,94 @@ class ChannelModel(BaseModel):
|
|||
|
||||
|
||||
class ChannelResponse(ChannelModel):
|
||||
is_manager: bool = False
|
||||
write_access: bool = False
|
||||
|
||||
user_count: Optional[int] = None
|
||||
|
||||
|
||||
class ChannelForm(BaseModel):
|
||||
name: str
|
||||
name: str = ""
|
||||
description: Optional[str] = None
|
||||
is_private: Optional[bool] = None
|
||||
data: Optional[dict] = None
|
||||
meta: Optional[dict] = None
|
||||
access_control: Optional[dict] = None
|
||||
group_ids: Optional[list[str]] = None
|
||||
user_ids: Optional[list[str]] = None
|
||||
|
||||
|
||||
class CreateChannelForm(ChannelForm):
|
||||
type: Optional[str] = None
|
||||
|
||||
|
||||
class ChannelTable:
|
||||
|
||||
def _collect_unique_user_ids(
|
||||
self,
|
||||
invited_by: str,
|
||||
user_ids: Optional[list[str]] = None,
|
||||
group_ids: Optional[list[str]] = None,
|
||||
) -> set[str]:
|
||||
"""
|
||||
Collect unique user ids from:
|
||||
- invited_by
|
||||
- user_ids
|
||||
- each group in group_ids
|
||||
Returns a set for efficient SQL diffing.
|
||||
"""
|
||||
users = set(user_ids or [])
|
||||
users.add(invited_by)
|
||||
|
||||
for group_id in group_ids or []:
|
||||
users.update(Groups.get_group_user_ids_by_id(group_id))
|
||||
|
||||
return users
|
||||
|
||||
def _create_membership_models(
|
||||
self,
|
||||
channel_id: str,
|
||||
invited_by: str,
|
||||
user_ids: set[str],
|
||||
) -> list[ChannelMember]:
|
||||
"""
|
||||
Takes a set of NEW user IDs (already filtered to exclude existing members).
|
||||
Returns ORM ChannelMember objects to be added.
|
||||
"""
|
||||
now = int(time.time_ns())
|
||||
memberships = []
|
||||
|
||||
for uid in user_ids:
|
||||
model = ChannelMemberModel(
|
||||
**{
|
||||
"id": str(uuid.uuid4()),
|
||||
"channel_id": channel_id,
|
||||
"user_id": uid,
|
||||
"status": "joined",
|
||||
"is_active": True,
|
||||
"is_channel_muted": False,
|
||||
"is_channel_pinned": False,
|
||||
"invited_at": now,
|
||||
"invited_by": invited_by,
|
||||
"joined_at": now,
|
||||
"left_at": None,
|
||||
"last_read_at": now,
|
||||
"created_at": now,
|
||||
"updated_at": now,
|
||||
}
|
||||
)
|
||||
memberships.append(ChannelMember(**model.model_dump()))
|
||||
|
||||
return memberships
|
||||
|
||||
def insert_new_channel(
|
||||
self, type: Optional[str], form_data: ChannelForm, user_id: str
|
||||
self, form_data: CreateChannelForm, user_id: str
|
||||
) -> Optional[ChannelModel]:
|
||||
with get_db() as db:
|
||||
channel = ChannelModel(
|
||||
**{
|
||||
**form_data.model_dump(),
|
||||
"type": type,
|
||||
"type": form_data.type if form_data.type else None,
|
||||
"name": form_data.name.lower(),
|
||||
"id": str(uuid.uuid4()),
|
||||
"user_id": user_id,
|
||||
|
|
@ -86,9 +318,21 @@ class ChannelTable:
|
|||
"updated_at": int(time.time_ns()),
|
||||
}
|
||||
)
|
||||
|
||||
new_channel = Channel(**channel.model_dump())
|
||||
|
||||
if form_data.type in ["group", "dm"]:
|
||||
users = self._collect_unique_user_ids(
|
||||
invited_by=user_id,
|
||||
user_ids=form_data.user_ids,
|
||||
group_ids=form_data.group_ids,
|
||||
)
|
||||
memberships = self._create_membership_models(
|
||||
channel_id=new_channel.id,
|
||||
invited_by=user_id,
|
||||
user_ids=users,
|
||||
)
|
||||
|
||||
db.add_all(memberships)
|
||||
db.add(new_channel)
|
||||
db.commit()
|
||||
return channel
|
||||
|
|
@ -98,22 +342,481 @@ class ChannelTable:
|
|||
channels = db.query(Channel).all()
|
||||
return [ChannelModel.model_validate(channel) for channel in channels]
|
||||
|
||||
def get_channels_by_user_id(
|
||||
self, user_id: str, permission: str = "read"
|
||||
) -> list[ChannelModel]:
|
||||
channels = self.get_channels()
|
||||
return [
|
||||
channel
|
||||
for channel in channels
|
||||
if channel.user_id == user_id
|
||||
or has_access(user_id, permission, channel.access_control)
|
||||
]
|
||||
def _has_permission(self, db, query, filter: dict, permission: str = "read"):
|
||||
group_ids = filter.get("group_ids", [])
|
||||
user_id = filter.get("user_id")
|
||||
|
||||
dialect_name = db.bind.dialect.name
|
||||
|
||||
# Public access
|
||||
conditions = []
|
||||
if group_ids or user_id:
|
||||
conditions.extend(
|
||||
[
|
||||
Channel.access_control.is_(None),
|
||||
cast(Channel.access_control, String) == "null",
|
||||
]
|
||||
)
|
||||
|
||||
# User-level permission
|
||||
if user_id:
|
||||
conditions.append(Channel.user_id == user_id)
|
||||
|
||||
# Group-level permission
|
||||
if group_ids:
|
||||
group_conditions = []
|
||||
for gid in group_ids:
|
||||
if dialect_name == "sqlite":
|
||||
group_conditions.append(
|
||||
Channel.access_control[permission]["group_ids"].contains([gid])
|
||||
)
|
||||
elif dialect_name == "postgresql":
|
||||
group_conditions.append(
|
||||
cast(
|
||||
Channel.access_control[permission]["group_ids"],
|
||||
JSONB,
|
||||
).contains([gid])
|
||||
)
|
||||
conditions.append(or_(*group_conditions))
|
||||
|
||||
if conditions:
|
||||
query = query.filter(or_(*conditions))
|
||||
|
||||
return query
|
||||
|
||||
def get_channels_by_user_id(self, user_id: str) -> list[ChannelModel]:
|
||||
with get_db() as db:
|
||||
user_group_ids = [
|
||||
group.id for group in Groups.get_groups_by_member_id(user_id)
|
||||
]
|
||||
|
||||
membership_channels = (
|
||||
db.query(Channel)
|
||||
.join(ChannelMember, Channel.id == ChannelMember.channel_id)
|
||||
.filter(
|
||||
Channel.deleted_at.is_(None),
|
||||
Channel.archived_at.is_(None),
|
||||
Channel.type.in_(["group", "dm"]),
|
||||
ChannelMember.user_id == user_id,
|
||||
ChannelMember.is_active.is_(True),
|
||||
)
|
||||
.all()
|
||||
)
|
||||
|
||||
query = db.query(Channel).filter(
|
||||
Channel.deleted_at.is_(None),
|
||||
Channel.archived_at.is_(None),
|
||||
or_(
|
||||
Channel.type.is_(None), # True NULL/None
|
||||
Channel.type == "", # Empty string
|
||||
and_(Channel.type != "group", Channel.type != "dm"),
|
||||
),
|
||||
)
|
||||
query = self._has_permission(
|
||||
db, query, {"user_id": user_id, "group_ids": user_group_ids}
|
||||
)
|
||||
|
||||
standard_channels = query.all()
|
||||
|
||||
all_channels = membership_channels + standard_channels
|
||||
return [ChannelModel.model_validate(c) for c in all_channels]
|
||||
|
||||
def get_dm_channel_by_user_ids(self, user_ids: list[str]) -> Optional[ChannelModel]:
|
||||
with get_db() as db:
|
||||
# Ensure uniqueness in case a list with duplicates is passed
|
||||
unique_user_ids = list(set(user_ids))
|
||||
|
||||
match_count = func.sum(
|
||||
case(
|
||||
(ChannelMember.user_id.in_(unique_user_ids), 1),
|
||||
else_=0,
|
||||
)
|
||||
)
|
||||
|
||||
subquery = (
|
||||
db.query(ChannelMember.channel_id)
|
||||
.group_by(ChannelMember.channel_id)
|
||||
# 1. Channel must have exactly len(user_ids) members
|
||||
.having(func.count(ChannelMember.user_id) == len(unique_user_ids))
|
||||
# 2. All those members must be in unique_user_ids
|
||||
.having(match_count == len(unique_user_ids))
|
||||
.subquery()
|
||||
)
|
||||
|
||||
channel = (
|
||||
db.query(Channel)
|
||||
.filter(
|
||||
Channel.id.in_(subquery),
|
||||
Channel.type == "dm",
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
return ChannelModel.model_validate(channel) if channel else None
|
||||
|
||||
def add_members_to_channel(
|
||||
self,
|
||||
channel_id: str,
|
||||
invited_by: str,
|
||||
user_ids: Optional[list[str]] = None,
|
||||
group_ids: Optional[list[str]] = None,
|
||||
) -> list[ChannelMemberModel]:
|
||||
with get_db() as db:
|
||||
# 1. Collect all user_ids including groups + inviter
|
||||
requested_users = self._collect_unique_user_ids(
|
||||
invited_by, user_ids, group_ids
|
||||
)
|
||||
|
||||
existing_users = {
|
||||
row.user_id
|
||||
for row in db.query(ChannelMember.user_id)
|
||||
.filter(ChannelMember.channel_id == channel_id)
|
||||
.all()
|
||||
}
|
||||
|
||||
new_user_ids = requested_users - existing_users
|
||||
if not new_user_ids:
|
||||
return [] # Nothing to add
|
||||
|
||||
new_memberships = self._create_membership_models(
|
||||
channel_id, invited_by, new_user_ids
|
||||
)
|
||||
|
||||
db.add_all(new_memberships)
|
||||
db.commit()
|
||||
|
||||
return [
|
||||
ChannelMemberModel.model_validate(membership)
|
||||
for membership in new_memberships
|
||||
]
|
||||
|
||||
def remove_members_from_channel(
|
||||
self,
|
||||
channel_id: str,
|
||||
user_ids: list[str],
|
||||
) -> int:
|
||||
with get_db() as db:
|
||||
result = (
|
||||
db.query(ChannelMember)
|
||||
.filter(
|
||||
ChannelMember.channel_id == channel_id,
|
||||
ChannelMember.user_id.in_(user_ids),
|
||||
)
|
||||
.delete(synchronize_session=False)
|
||||
)
|
||||
db.commit()
|
||||
return result # number of rows deleted
|
||||
|
||||
def is_user_channel_manager(self, channel_id: str, user_id: str) -> bool:
|
||||
with get_db() as db:
|
||||
# Check if the user is the creator of the channel
|
||||
# or has a 'manager' role in ChannelMember
|
||||
channel = db.query(Channel).filter(Channel.id == channel_id).first()
|
||||
if channel and channel.user_id == user_id:
|
||||
return True
|
||||
|
||||
membership = (
|
||||
db.query(ChannelMember)
|
||||
.filter(
|
||||
ChannelMember.channel_id == channel_id,
|
||||
ChannelMember.user_id == user_id,
|
||||
ChannelMember.role == "manager",
|
||||
)
|
||||
.first()
|
||||
)
|
||||
return membership is not None
|
||||
|
||||
def join_channel(
|
||||
self, channel_id: str, user_id: str
|
||||
) -> Optional[ChannelMemberModel]:
|
||||
with get_db() as db:
|
||||
# Check if the membership already exists
|
||||
existing_membership = (
|
||||
db.query(ChannelMember)
|
||||
.filter(
|
||||
ChannelMember.channel_id == channel_id,
|
||||
ChannelMember.user_id == user_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if existing_membership:
|
||||
return ChannelMemberModel.model_validate(existing_membership)
|
||||
|
||||
# Create new membership
|
||||
channel_member = ChannelMemberModel(
|
||||
**{
|
||||
"id": str(uuid.uuid4()),
|
||||
"channel_id": channel_id,
|
||||
"user_id": user_id,
|
||||
"status": "joined",
|
||||
"is_active": True,
|
||||
"is_channel_muted": False,
|
||||
"is_channel_pinned": False,
|
||||
"joined_at": int(time.time_ns()),
|
||||
"left_at": None,
|
||||
"last_read_at": int(time.time_ns()),
|
||||
"created_at": int(time.time_ns()),
|
||||
"updated_at": int(time.time_ns()),
|
||||
}
|
||||
)
|
||||
new_membership = ChannelMember(**channel_member.model_dump())
|
||||
|
||||
db.add(new_membership)
|
||||
db.commit()
|
||||
return channel_member
|
||||
|
||||
def leave_channel(self, channel_id: str, user_id: str) -> bool:
|
||||
with get_db() as db:
|
||||
membership = (
|
||||
db.query(ChannelMember)
|
||||
.filter(
|
||||
ChannelMember.channel_id == channel_id,
|
||||
ChannelMember.user_id == user_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if not membership:
|
||||
return False
|
||||
|
||||
membership.status = "left"
|
||||
membership.is_active = False
|
||||
membership.left_at = int(time.time_ns())
|
||||
membership.updated_at = int(time.time_ns())
|
||||
|
||||
db.commit()
|
||||
return True
|
||||
|
||||
def get_member_by_channel_and_user_id(
|
||||
self, channel_id: str, user_id: str
|
||||
) -> Optional[ChannelMemberModel]:
|
||||
with get_db() as db:
|
||||
membership = (
|
||||
db.query(ChannelMember)
|
||||
.filter(
|
||||
ChannelMember.channel_id == channel_id,
|
||||
ChannelMember.user_id == user_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
return ChannelMemberModel.model_validate(membership) if membership else None
|
||||
|
||||
def get_members_by_channel_id(self, channel_id: str) -> list[ChannelMemberModel]:
|
||||
with get_db() as db:
|
||||
memberships = (
|
||||
db.query(ChannelMember)
|
||||
.filter(ChannelMember.channel_id == channel_id)
|
||||
.all()
|
||||
)
|
||||
return [
|
||||
ChannelMemberModel.model_validate(membership)
|
||||
for membership in memberships
|
||||
]
|
||||
|
||||
def pin_channel(self, channel_id: str, user_id: str, is_pinned: bool) -> bool:
|
||||
with get_db() as db:
|
||||
membership = (
|
||||
db.query(ChannelMember)
|
||||
.filter(
|
||||
ChannelMember.channel_id == channel_id,
|
||||
ChannelMember.user_id == user_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if not membership:
|
||||
return False
|
||||
|
||||
membership.is_channel_pinned = is_pinned
|
||||
membership.updated_at = int(time.time_ns())
|
||||
|
||||
db.commit()
|
||||
return True
|
||||
|
||||
def update_member_last_read_at(self, channel_id: str, user_id: str) -> bool:
|
||||
with get_db() as db:
|
||||
membership = (
|
||||
db.query(ChannelMember)
|
||||
.filter(
|
||||
ChannelMember.channel_id == channel_id,
|
||||
ChannelMember.user_id == user_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if not membership:
|
||||
return False
|
||||
|
||||
membership.last_read_at = int(time.time_ns())
|
||||
membership.updated_at = int(time.time_ns())
|
||||
|
||||
db.commit()
|
||||
return True
|
||||
|
||||
def update_member_active_status(
|
||||
self, channel_id: str, user_id: str, is_active: bool
|
||||
) -> bool:
|
||||
with get_db() as db:
|
||||
membership = (
|
||||
db.query(ChannelMember)
|
||||
.filter(
|
||||
ChannelMember.channel_id == channel_id,
|
||||
ChannelMember.user_id == user_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if not membership:
|
||||
return False
|
||||
|
||||
membership.is_active = is_active
|
||||
membership.updated_at = int(time.time_ns())
|
||||
|
||||
db.commit()
|
||||
return True
|
||||
|
||||
def is_user_channel_member(self, channel_id: str, user_id: str) -> bool:
|
||||
with get_db() as db:
|
||||
membership = (
|
||||
db.query(ChannelMember)
|
||||
.filter(
|
||||
ChannelMember.channel_id == channel_id,
|
||||
ChannelMember.user_id == user_id,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
return membership is not None
|
||||
|
||||
def get_channel_by_id(self, id: str) -> Optional[ChannelModel]:
|
||||
with get_db() as db:
|
||||
channel = db.query(Channel).filter(Channel.id == id).first()
|
||||
return ChannelModel.model_validate(channel) if channel else None
|
||||
|
||||
def get_channels_by_file_id(self, file_id: str) -> list[ChannelModel]:
|
||||
with get_db() as db:
|
||||
channel_files = (
|
||||
db.query(ChannelFile).filter(ChannelFile.file_id == file_id).all()
|
||||
)
|
||||
channel_ids = [cf.channel_id for cf in channel_files]
|
||||
channels = db.query(Channel).filter(Channel.id.in_(channel_ids)).all()
|
||||
return [ChannelModel.model_validate(channel) for channel in channels]
|
||||
|
||||
def get_channels_by_file_id_and_user_id(
|
||||
self, file_id: str, user_id: str
|
||||
) -> list[ChannelModel]:
|
||||
with get_db() as db:
|
||||
# 1. Determine which channels have this file
|
||||
channel_file_rows = (
|
||||
db.query(ChannelFile).filter(ChannelFile.file_id == file_id).all()
|
||||
)
|
||||
channel_ids = [row.channel_id for row in channel_file_rows]
|
||||
|
||||
if not channel_ids:
|
||||
return []
|
||||
|
||||
# 2. Load all channel rows that still exist
|
||||
channels = (
|
||||
db.query(Channel)
|
||||
.filter(
|
||||
Channel.id.in_(channel_ids),
|
||||
Channel.deleted_at.is_(None),
|
||||
Channel.archived_at.is_(None),
|
||||
)
|
||||
.all()
|
||||
)
|
||||
if not channels:
|
||||
return []
|
||||
|
||||
# Preload user's group membership
|
||||
user_group_ids = [g.id for g in Groups.get_groups_by_member_id(user_id)]
|
||||
|
||||
allowed_channels = []
|
||||
|
||||
for channel in channels:
|
||||
# --- Case A: group or dm => user must be an active member ---
|
||||
if channel.type in ["group", "dm"]:
|
||||
membership = (
|
||||
db.query(ChannelMember)
|
||||
.filter(
|
||||
ChannelMember.channel_id == channel.id,
|
||||
ChannelMember.user_id == user_id,
|
||||
ChannelMember.is_active.is_(True),
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if membership:
|
||||
allowed_channels.append(ChannelModel.model_validate(channel))
|
||||
continue
|
||||
|
||||
# --- Case B: standard channel => rely on ACL permissions ---
|
||||
query = db.query(Channel).filter(Channel.id == channel.id)
|
||||
|
||||
query = self._has_permission(
|
||||
db,
|
||||
query,
|
||||
{"user_id": user_id, "group_ids": user_group_ids},
|
||||
permission="read",
|
||||
)
|
||||
|
||||
allowed = query.first()
|
||||
if allowed:
|
||||
allowed_channels.append(ChannelModel.model_validate(allowed))
|
||||
|
||||
return allowed_channels
|
||||
|
||||
def get_channel_by_id_and_user_id(
|
||||
self, id: str, user_id: str
|
||||
) -> Optional[ChannelModel]:
|
||||
with get_db() as db:
|
||||
# Fetch the channel
|
||||
channel: Channel = (
|
||||
db.query(Channel)
|
||||
.filter(
|
||||
Channel.id == id,
|
||||
Channel.deleted_at.is_(None),
|
||||
Channel.archived_at.is_(None),
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if not channel:
|
||||
return None
|
||||
|
||||
# If the channel is a group or dm, read access requires membership (active)
|
||||
if channel.type in ["group", "dm"]:
|
||||
membership = (
|
||||
db.query(ChannelMember)
|
||||
.filter(
|
||||
ChannelMember.channel_id == id,
|
||||
ChannelMember.user_id == user_id,
|
||||
ChannelMember.is_active.is_(True),
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if membership:
|
||||
return ChannelModel.model_validate(channel)
|
||||
else:
|
||||
return None
|
||||
|
||||
# For channels that are NOT group/dm, fall back to ACL-based read access
|
||||
query = db.query(Channel).filter(Channel.id == id)
|
||||
|
||||
# Determine user groups
|
||||
user_group_ids = [
|
||||
group.id for group in Groups.get_groups_by_member_id(user_id)
|
||||
]
|
||||
|
||||
# Apply ACL rules
|
||||
query = self._has_permission(
|
||||
db,
|
||||
query,
|
||||
{"user_id": user_id, "group_ids": user_group_ids},
|
||||
permission="read",
|
||||
)
|
||||
|
||||
channel_allowed = query.first()
|
||||
return (
|
||||
ChannelModel.model_validate(channel_allowed)
|
||||
if channel_allowed
|
||||
else None
|
||||
)
|
||||
|
||||
def update_channel_by_id(
|
||||
self, id: str, form_data: ChannelForm
|
||||
) -> Optional[ChannelModel]:
|
||||
|
|
@ -123,14 +826,77 @@ class ChannelTable:
|
|||
return None
|
||||
|
||||
channel.name = form_data.name
|
||||
channel.description = form_data.description
|
||||
channel.is_private = form_data.is_private
|
||||
|
||||
channel.data = form_data.data
|
||||
channel.meta = form_data.meta
|
||||
|
||||
channel.access_control = form_data.access_control
|
||||
channel.updated_at = int(time.time_ns())
|
||||
|
||||
db.commit()
|
||||
return ChannelModel.model_validate(channel) if channel else None
|
||||
|
||||
def add_file_to_channel_by_id(
|
||||
self, channel_id: str, file_id: str, user_id: str
|
||||
) -> Optional[ChannelFileModel]:
|
||||
with get_db() as db:
|
||||
channel_file = ChannelFileModel(
|
||||
**{
|
||||
"id": str(uuid.uuid4()),
|
||||
"channel_id": channel_id,
|
||||
"file_id": file_id,
|
||||
"user_id": user_id,
|
||||
"created_at": int(time.time()),
|
||||
"updated_at": int(time.time()),
|
||||
}
|
||||
)
|
||||
|
||||
try:
|
||||
result = ChannelFile(**channel_file.model_dump())
|
||||
db.add(result)
|
||||
db.commit()
|
||||
db.refresh(result)
|
||||
if result:
|
||||
return ChannelFileModel.model_validate(result)
|
||||
else:
|
||||
return None
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def set_file_message_id_in_channel_by_id(
|
||||
self, channel_id: str, file_id: str, message_id: str
|
||||
) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
channel_file = (
|
||||
db.query(ChannelFile)
|
||||
.filter_by(channel_id=channel_id, file_id=file_id)
|
||||
.first()
|
||||
)
|
||||
if not channel_file:
|
||||
return False
|
||||
|
||||
channel_file.message_id = message_id
|
||||
channel_file.updated_at = int(time.time())
|
||||
|
||||
db.commit()
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def remove_file_from_channel_by_id(self, channel_id: str, file_id: str) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
db.query(ChannelFile).filter_by(
|
||||
channel_id=channel_id, file_id=file_id
|
||||
).delete()
|
||||
db.commit()
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def delete_channel_by_id(self, id: str):
|
||||
with get_db() as db:
|
||||
db.query(Channel).filter(Channel.id == id).delete()
|
||||
|
|
|
|||
|
|
@ -7,10 +7,20 @@ from typing import Optional
|
|||
from open_webui.internal.db import Base, get_db
|
||||
from open_webui.models.tags import TagModel, Tag, Tags
|
||||
from open_webui.models.folders import Folders
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from open_webui.utils.misc import sanitize_data_for_db, sanitize_text_for_db
|
||||
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Boolean, Column, String, Text, JSON, Index
|
||||
from sqlalchemy import (
|
||||
BigInteger,
|
||||
Boolean,
|
||||
Column,
|
||||
ForeignKey,
|
||||
String,
|
||||
Text,
|
||||
JSON,
|
||||
Index,
|
||||
UniqueConstraint,
|
||||
)
|
||||
from sqlalchemy import or_, func, select, and_, text
|
||||
from sqlalchemy.sql import exists
|
||||
from sqlalchemy.sql.expression import bindparam
|
||||
|
|
@ -20,7 +30,6 @@ from sqlalchemy.sql.expression import bindparam
|
|||
####################
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
|
||||
class Chat(Base):
|
||||
|
|
@ -75,6 +84,38 @@ class ChatModel(BaseModel):
|
|||
folder_id: Optional[str] = None
|
||||
|
||||
|
||||
class ChatFile(Base):
|
||||
__tablename__ = "chat_file"
|
||||
|
||||
id = Column(Text, unique=True, primary_key=True)
|
||||
user_id = Column(Text, nullable=False)
|
||||
|
||||
chat_id = Column(Text, ForeignKey("chat.id", ondelete="CASCADE"), nullable=False)
|
||||
message_id = Column(Text, nullable=True)
|
||||
file_id = Column(Text, ForeignKey("file.id", ondelete="CASCADE"), nullable=False)
|
||||
|
||||
created_at = Column(BigInteger, nullable=False)
|
||||
updated_at = Column(BigInteger, nullable=False)
|
||||
|
||||
__table_args__ = (
|
||||
UniqueConstraint("chat_id", "file_id", name="uq_chat_file_chat_file"),
|
||||
)
|
||||
|
||||
|
||||
class ChatFileModel(BaseModel):
|
||||
id: str
|
||||
user_id: str
|
||||
|
||||
chat_id: str
|
||||
message_id: Optional[str] = None
|
||||
file_id: str
|
||||
|
||||
created_at: int
|
||||
updated_at: int
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
####################
|
||||
# Forms
|
||||
####################
|
||||
|
|
@ -126,20 +167,53 @@ class ChatTitleIdResponse(BaseModel):
|
|||
created_at: int
|
||||
|
||||
|
||||
class ChatListResponse(BaseModel):
|
||||
items: list[ChatModel]
|
||||
total: int
|
||||
|
||||
|
||||
class ChatUsageStatsResponse(BaseModel):
|
||||
id: str # chat id
|
||||
|
||||
models: dict = {} # models used in the chat with their usage counts
|
||||
message_count: int # number of messages in the chat
|
||||
|
||||
history_models: dict = {} # models used in the chat history with their usage counts
|
||||
history_message_count: int # number of messages in the chat history
|
||||
history_user_message_count: int # number of user messages in the chat history
|
||||
history_assistant_message_count: (
|
||||
int # number of assistant messages in the chat history
|
||||
)
|
||||
|
||||
average_response_time: (
|
||||
float # average response time of assistant messages in seconds
|
||||
)
|
||||
average_user_message_content_length: (
|
||||
float # average length of user message contents
|
||||
)
|
||||
average_assistant_message_content_length: (
|
||||
float # average length of assistant message contents
|
||||
)
|
||||
|
||||
tags: list[str] = [] # tags associated with the chat
|
||||
|
||||
last_message_at: int # timestamp of the last message
|
||||
updated_at: int
|
||||
created_at: int
|
||||
|
||||
model_config = ConfigDict(extra="allow")
|
||||
|
||||
|
||||
class ChatUsageStatsListResponse(BaseModel):
|
||||
items: list[ChatUsageStatsResponse]
|
||||
total: int
|
||||
model_config = ConfigDict(extra="allow")
|
||||
|
||||
|
||||
class ChatTable:
|
||||
def _clean_null_bytes(self, obj):
|
||||
"""
|
||||
Recursively remove actual null bytes (\x00) and unicode escape \\u0000
|
||||
from strings inside dict/list structures.
|
||||
Safe for JSON objects.
|
||||
"""
|
||||
if isinstance(obj, str):
|
||||
return obj.replace("\x00", "").replace("\u0000", "")
|
||||
elif isinstance(obj, dict):
|
||||
return {k: self._clean_null_bytes(v) for k, v in obj.items()}
|
||||
elif isinstance(obj, list):
|
||||
return [self._clean_null_bytes(v) for v in obj]
|
||||
return obj
|
||||
"""Recursively remove null bytes from strings in dict/list structures."""
|
||||
return sanitize_data_for_db(obj)
|
||||
|
||||
def _sanitize_chat_row(self, chat_item):
|
||||
"""
|
||||
|
|
@ -310,7 +384,7 @@ class ChatTable:
|
|||
|
||||
# Sanitize message content for null characters before upserting
|
||||
if isinstance(message.get("content"), str):
|
||||
message["content"] = message["content"].replace("\x00", "")
|
||||
message["content"] = sanitize_text_for_db(message["content"])
|
||||
|
||||
chat = chat.chat
|
||||
history = chat.get("history", {})
|
||||
|
|
@ -675,14 +749,31 @@ class ChatTable:
|
|||
)
|
||||
return [ChatModel.model_validate(chat) for chat in all_chats]
|
||||
|
||||
def get_chats_by_user_id(self, user_id: str) -> list[ChatModel]:
|
||||
def get_chats_by_user_id(
|
||||
self, user_id: str, skip: Optional[int] = None, limit: Optional[int] = None
|
||||
) -> ChatListResponse:
|
||||
with get_db() as db:
|
||||
all_chats = (
|
||||
query = (
|
||||
db.query(Chat)
|
||||
.filter_by(user_id=user_id)
|
||||
.order_by(Chat.updated_at.desc())
|
||||
)
|
||||
return [ChatModel.model_validate(chat) for chat in all_chats]
|
||||
|
||||
total = query.count()
|
||||
|
||||
if skip is not None:
|
||||
query = query.offset(skip)
|
||||
if limit is not None:
|
||||
query = query.limit(limit)
|
||||
|
||||
all_chats = query.all()
|
||||
|
||||
return ChatListResponse(
|
||||
**{
|
||||
"items": [ChatModel.model_validate(chat) for chat in all_chats],
|
||||
"total": total,
|
||||
}
|
||||
)
|
||||
|
||||
def get_pinned_chats_by_user_id(self, user_id: str) -> list[ChatModel]:
|
||||
with get_db() as db:
|
||||
|
|
@ -713,7 +804,7 @@ class ChatTable:
|
|||
"""
|
||||
Filters chats based on a search query using Python, allowing pagination using skip and limit.
|
||||
"""
|
||||
search_text = search_text.replace("\u0000", "").lower().strip()
|
||||
search_text = sanitize_text_for_db(search_text).lower().strip()
|
||||
|
||||
if not search_text:
|
||||
return self.get_chat_list_by_user_id(
|
||||
|
|
@ -1170,5 +1261,93 @@ class ChatTable:
|
|||
except Exception:
|
||||
return False
|
||||
|
||||
def insert_chat_files(
|
||||
self, chat_id: str, message_id: str, file_ids: list[str], user_id: str
|
||||
) -> Optional[list[ChatFileModel]]:
|
||||
if not file_ids:
|
||||
return None
|
||||
|
||||
chat_message_file_ids = [
|
||||
item.id
|
||||
for item in self.get_chat_files_by_chat_id_and_message_id(
|
||||
chat_id, message_id
|
||||
)
|
||||
]
|
||||
# Remove duplicates and existing file_ids
|
||||
file_ids = list(
|
||||
set(
|
||||
[
|
||||
file_id
|
||||
for file_id in file_ids
|
||||
if file_id and file_id not in chat_message_file_ids
|
||||
]
|
||||
)
|
||||
)
|
||||
if not file_ids:
|
||||
return None
|
||||
|
||||
try:
|
||||
with get_db() as db:
|
||||
now = int(time.time())
|
||||
|
||||
chat_files = [
|
||||
ChatFileModel(
|
||||
id=str(uuid.uuid4()),
|
||||
user_id=user_id,
|
||||
chat_id=chat_id,
|
||||
message_id=message_id,
|
||||
file_id=file_id,
|
||||
created_at=now,
|
||||
updated_at=now,
|
||||
)
|
||||
for file_id in file_ids
|
||||
]
|
||||
|
||||
results = [
|
||||
ChatFile(**chat_file.model_dump()) for chat_file in chat_files
|
||||
]
|
||||
|
||||
db.add_all(results)
|
||||
db.commit()
|
||||
|
||||
return chat_files
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_chat_files_by_chat_id_and_message_id(
|
||||
self, chat_id: str, message_id: str
|
||||
) -> list[ChatFileModel]:
|
||||
with get_db() as db:
|
||||
all_chat_files = (
|
||||
db.query(ChatFile)
|
||||
.filter_by(chat_id=chat_id, message_id=message_id)
|
||||
.order_by(ChatFile.created_at.asc())
|
||||
.all()
|
||||
)
|
||||
return [
|
||||
ChatFileModel.model_validate(chat_file) for chat_file in all_chat_files
|
||||
]
|
||||
|
||||
def delete_chat_file(self, chat_id: str, file_id: str) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
db.query(ChatFile).filter_by(chat_id=chat_id, file_id=file_id).delete()
|
||||
db.commit()
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def get_shared_chats_by_file_id(self, file_id: str) -> list[ChatModel]:
|
||||
with get_db() as db:
|
||||
# Join Chat and ChatFile tables to get shared chats associated with the file_id
|
||||
all_chats = (
|
||||
db.query(Chat)
|
||||
.join(ChatFile, Chat.id == ChatFile.chat_id)
|
||||
.filter(ChatFile.file_id == file_id, Chat.share_id.isnot(None))
|
||||
.all()
|
||||
)
|
||||
|
||||
return [ChatModel.model_validate(chat) for chat in all_chats]
|
||||
|
||||
|
||||
Chats = ChatTable()
|
||||
|
|
|
|||
|
|
@ -6,12 +6,10 @@ from typing import Optional
|
|||
from open_webui.internal.db import Base, get_db
|
||||
from open_webui.models.users import User
|
||||
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Column, Text, JSON, Boolean
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
|
||||
####################
|
||||
|
|
@ -62,6 +60,13 @@ class FeedbackResponse(BaseModel):
|
|||
updated_at: int
|
||||
|
||||
|
||||
class FeedbackIdResponse(BaseModel):
|
||||
id: str
|
||||
user_id: str
|
||||
created_at: int
|
||||
updated_at: int
|
||||
|
||||
|
||||
class RatingData(BaseModel):
|
||||
rating: Optional[str | int] = None
|
||||
model_id: Optional[str] = None
|
||||
|
|
|
|||
|
|
@ -3,12 +3,10 @@ import time
|
|||
from typing import Optional
|
||||
|
||||
from open_webui.internal.db import Base, JSONField, get_db
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Column, String, Text, JSON
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
####################
|
||||
# Files DB Schema
|
||||
|
|
@ -83,7 +81,7 @@ class FileModelResponse(BaseModel):
|
|||
class FileMetadataResponse(BaseModel):
|
||||
id: str
|
||||
hash: Optional[str] = None
|
||||
meta: dict
|
||||
meta: Optional[dict] = None
|
||||
created_at: int # timestamp in epoch
|
||||
updated_at: int # timestamp in epoch
|
||||
|
||||
|
|
@ -104,6 +102,11 @@ class FileUpdateForm(BaseModel):
|
|||
meta: Optional[dict] = None
|
||||
|
||||
|
||||
class FileListResponse(BaseModel):
|
||||
items: list[FileModel]
|
||||
total: int
|
||||
|
||||
|
||||
class FilesTable:
|
||||
def insert_new_file(self, user_id: str, form_data: FileForm) -> Optional[FileModel]:
|
||||
with get_db() as db:
|
||||
|
|
@ -238,6 +241,7 @@ class FilesTable:
|
|||
try:
|
||||
file = db.query(File).filter_by(id=id).first()
|
||||
file.hash = hash
|
||||
file.updated_at = int(time.time())
|
||||
db.commit()
|
||||
|
||||
return FileModel.model_validate(file)
|
||||
|
|
@ -249,6 +253,7 @@ class FilesTable:
|
|||
try:
|
||||
file = db.query(File).filter_by(id=id).first()
|
||||
file.data = {**(file.data if file.data else {}), **data}
|
||||
file.updated_at = int(time.time())
|
||||
db.commit()
|
||||
return FileModel.model_validate(file)
|
||||
except Exception as e:
|
||||
|
|
@ -260,6 +265,7 @@ class FilesTable:
|
|||
try:
|
||||
file = db.query(File).filter_by(id=id).first()
|
||||
file.meta = {**(file.meta if file.meta else {}), **meta}
|
||||
file.updated_at = int(time.time())
|
||||
db.commit()
|
||||
return FileModel.model_validate(file)
|
||||
except Exception:
|
||||
|
|
|
|||
|
|
@ -9,11 +9,9 @@ from pydantic import BaseModel, ConfigDict
|
|||
from sqlalchemy import BigInteger, Column, Text, JSON, Boolean, func
|
||||
|
||||
from open_webui.internal.db import Base, get_db
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
|
||||
####################
|
||||
|
|
|
|||
|
|
@ -4,12 +4,10 @@ from typing import Optional
|
|||
|
||||
from open_webui.internal.db import Base, JSONField, get_db
|
||||
from open_webui.models.users import Users, UserModel
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Boolean, Column, String, Text, Index
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
####################
|
||||
# Functions DB Schema
|
||||
|
|
|
|||
|
|
@ -5,17 +5,26 @@ from typing import Optional
|
|||
import uuid
|
||||
|
||||
from open_webui.internal.db import Base, get_db
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
from open_webui.models.files import FileMetadataResponse
|
||||
|
||||
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Column, String, Text, JSON, func, ForeignKey
|
||||
from sqlalchemy import (
|
||||
BigInteger,
|
||||
Column,
|
||||
String,
|
||||
Text,
|
||||
JSON,
|
||||
and_,
|
||||
func,
|
||||
ForeignKey,
|
||||
cast,
|
||||
or_,
|
||||
)
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
####################
|
||||
# UserGroup DB Schema
|
||||
|
|
@ -41,7 +50,6 @@ class Group(Base):
|
|||
|
||||
|
||||
class GroupModel(BaseModel):
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
id: str
|
||||
user_id: str
|
||||
|
||||
|
|
@ -56,6 +64,8 @@ class GroupModel(BaseModel):
|
|||
created_at: int # timestamp in epoch
|
||||
updated_at: int # timestamp in epoch
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
class GroupMember(Base):
|
||||
__tablename__ = "group_member"
|
||||
|
|
@ -84,17 +94,8 @@ class GroupMemberModel(BaseModel):
|
|||
####################
|
||||
|
||||
|
||||
class GroupResponse(BaseModel):
|
||||
id: str
|
||||
user_id: str
|
||||
name: str
|
||||
description: str
|
||||
permissions: Optional[dict] = None
|
||||
data: Optional[dict] = None
|
||||
meta: Optional[dict] = None
|
||||
class GroupResponse(GroupModel):
|
||||
member_count: Optional[int] = None
|
||||
created_at: int # timestamp in epoch
|
||||
updated_at: int # timestamp in epoch
|
||||
|
||||
|
||||
class GroupForm(BaseModel):
|
||||
|
|
@ -112,6 +113,11 @@ class GroupUpdateForm(GroupForm):
|
|||
pass
|
||||
|
||||
|
||||
class GroupListResponse(BaseModel):
|
||||
items: list[GroupResponse] = []
|
||||
total: int = 0
|
||||
|
||||
|
||||
class GroupTable:
|
||||
def insert_new_group(
|
||||
self, user_id: str, form_data: GroupForm
|
||||
|
|
@ -140,13 +146,87 @@ class GroupTable:
|
|||
except Exception:
|
||||
return None
|
||||
|
||||
def get_groups(self) -> list[GroupModel]:
|
||||
def get_all_groups(self) -> list[GroupModel]:
|
||||
with get_db() as db:
|
||||
groups = db.query(Group).order_by(Group.updated_at.desc()).all()
|
||||
return [GroupModel.model_validate(group) for group in groups]
|
||||
|
||||
def get_groups(self, filter) -> list[GroupResponse]:
|
||||
with get_db() as db:
|
||||
query = db.query(Group)
|
||||
|
||||
if filter:
|
||||
if "query" in filter:
|
||||
query = query.filter(Group.name.ilike(f"%{filter['query']}%"))
|
||||
if "member_id" in filter:
|
||||
query = query.join(
|
||||
GroupMember, GroupMember.group_id == Group.id
|
||||
).filter(GroupMember.user_id == filter["member_id"])
|
||||
|
||||
if "share" in filter:
|
||||
share_value = filter["share"]
|
||||
json_share = Group.data["config"]["share"].as_boolean()
|
||||
|
||||
if share_value:
|
||||
query = query.filter(
|
||||
or_(
|
||||
Group.data.is_(None),
|
||||
json_share.is_(None),
|
||||
json_share == True,
|
||||
)
|
||||
)
|
||||
else:
|
||||
query = query.filter(
|
||||
and_(Group.data.isnot(None), json_share == False)
|
||||
)
|
||||
groups = query.order_by(Group.updated_at.desc()).all()
|
||||
return [
|
||||
GroupModel.model_validate(group)
|
||||
for group in db.query(Group).order_by(Group.updated_at.desc()).all()
|
||||
GroupResponse.model_validate(
|
||||
{
|
||||
**GroupModel.model_validate(group).model_dump(),
|
||||
"member_count": self.get_group_member_count_by_id(group.id),
|
||||
}
|
||||
)
|
||||
for group in groups
|
||||
]
|
||||
|
||||
def search_groups(
|
||||
self, filter: Optional[dict] = None, skip: int = 0, limit: int = 30
|
||||
) -> GroupListResponse:
|
||||
with get_db() as db:
|
||||
query = db.query(Group)
|
||||
|
||||
if filter:
|
||||
if "query" in filter:
|
||||
query = query.filter(Group.name.ilike(f"%{filter['query']}%"))
|
||||
if "member_id" in filter:
|
||||
query = query.join(
|
||||
GroupMember, GroupMember.group_id == Group.id
|
||||
).filter(GroupMember.user_id == filter["member_id"])
|
||||
|
||||
if "share" in filter:
|
||||
# 'share' is stored in data JSON, support both sqlite and postgres
|
||||
share_value = filter["share"]
|
||||
print("Filtering by share:", share_value)
|
||||
query = query.filter(
|
||||
Group.data.op("->>")("share") == str(share_value)
|
||||
)
|
||||
|
||||
total = query.count()
|
||||
query = query.order_by(Group.updated_at.desc())
|
||||
groups = query.offset(skip).limit(limit).all()
|
||||
|
||||
return {
|
||||
"items": [
|
||||
GroupResponse.model_validate(
|
||||
**GroupModel.model_validate(group).model_dump(),
|
||||
member_count=self.get_group_member_count_by_id(group.id),
|
||||
)
|
||||
for group in groups
|
||||
],
|
||||
"total": total,
|
||||
}
|
||||
|
||||
def get_groups_by_member_id(self, user_id: str) -> list[GroupModel]:
|
||||
with get_db() as db:
|
||||
return [
|
||||
|
|
@ -293,7 +373,7 @@ class GroupTable:
|
|||
) -> list[GroupModel]:
|
||||
|
||||
# check for existing groups
|
||||
existing_groups = self.get_groups()
|
||||
existing_groups = self.get_all_groups()
|
||||
existing_group_names = {group.name for group in existing_groups}
|
||||
|
||||
new_groups = []
|
||||
|
|
|
|||
|
|
@ -5,20 +5,34 @@ from typing import Optional
|
|||
import uuid
|
||||
|
||||
from open_webui.internal.db import Base, get_db
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
from open_webui.models.files import FileMetadataResponse
|
||||
from open_webui.models.files import (
|
||||
File,
|
||||
FileModel,
|
||||
FileMetadataResponse,
|
||||
FileModelResponse,
|
||||
)
|
||||
from open_webui.models.groups import Groups
|
||||
from open_webui.models.users import Users, UserResponse
|
||||
from open_webui.models.users import User, UserModel, Users, UserResponse
|
||||
|
||||
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Column, String, Text, JSON
|
||||
from sqlalchemy import (
|
||||
BigInteger,
|
||||
Column,
|
||||
ForeignKey,
|
||||
String,
|
||||
Text,
|
||||
JSON,
|
||||
UniqueConstraint,
|
||||
or_,
|
||||
)
|
||||
|
||||
from open_webui.utils.access_control import has_access
|
||||
from open_webui.utils.db.access_control import has_permission
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
####################
|
||||
# Knowledge DB Schema
|
||||
|
|
@ -34,9 +48,7 @@ class Knowledge(Base):
|
|||
name = Column(Text)
|
||||
description = Column(Text)
|
||||
|
||||
data = Column(JSON, nullable=True)
|
||||
meta = Column(JSON, nullable=True)
|
||||
|
||||
access_control = Column(JSON, nullable=True) # Controls data access levels.
|
||||
# Defines access control rules for this entry.
|
||||
# - `None`: Public access, available to all users with the "user" role.
|
||||
|
|
@ -67,7 +79,6 @@ class KnowledgeModel(BaseModel):
|
|||
name: str
|
||||
description: str
|
||||
|
||||
data: Optional[dict] = None
|
||||
meta: Optional[dict] = None
|
||||
|
||||
access_control: Optional[dict] = None
|
||||
|
|
@ -76,11 +87,42 @@ class KnowledgeModel(BaseModel):
|
|||
updated_at: int # timestamp in epoch
|
||||
|
||||
|
||||
class KnowledgeFile(Base):
|
||||
__tablename__ = "knowledge_file"
|
||||
|
||||
id = Column(Text, unique=True, primary_key=True)
|
||||
|
||||
knowledge_id = Column(
|
||||
Text, ForeignKey("knowledge.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
file_id = Column(Text, ForeignKey("file.id", ondelete="CASCADE"), nullable=False)
|
||||
user_id = Column(Text, nullable=False)
|
||||
|
||||
created_at = Column(BigInteger, nullable=False)
|
||||
updated_at = Column(BigInteger, nullable=False)
|
||||
|
||||
__table_args__ = (
|
||||
UniqueConstraint(
|
||||
"knowledge_id", "file_id", name="uq_knowledge_file_knowledge_file"
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class KnowledgeFileModel(BaseModel):
|
||||
id: str
|
||||
knowledge_id: str
|
||||
file_id: str
|
||||
user_id: str
|
||||
|
||||
created_at: int # timestamp in epoch
|
||||
updated_at: int # timestamp in epoch
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
####################
|
||||
# Forms
|
||||
####################
|
||||
|
||||
|
||||
class KnowledgeUserModel(KnowledgeModel):
|
||||
user: Optional[UserResponse] = None
|
||||
|
||||
|
|
@ -90,16 +132,29 @@ class KnowledgeResponse(KnowledgeModel):
|
|||
|
||||
|
||||
class KnowledgeUserResponse(KnowledgeUserModel):
|
||||
files: Optional[list[FileMetadataResponse | dict]] = None
|
||||
pass
|
||||
|
||||
|
||||
class KnowledgeForm(BaseModel):
|
||||
name: str
|
||||
description: str
|
||||
data: Optional[dict] = None
|
||||
access_control: Optional[dict] = None
|
||||
|
||||
|
||||
class FileUserResponse(FileModelResponse):
|
||||
user: Optional[UserResponse] = None
|
||||
|
||||
|
||||
class KnowledgeListResponse(BaseModel):
|
||||
items: list[KnowledgeUserModel]
|
||||
total: int
|
||||
|
||||
|
||||
class KnowledgeFileListResponse(BaseModel):
|
||||
items: list[FileUserResponse]
|
||||
total: int
|
||||
|
||||
|
||||
class KnowledgeTable:
|
||||
def insert_new_knowledge(
|
||||
self, user_id: str, form_data: KnowledgeForm
|
||||
|
|
@ -127,12 +182,13 @@ class KnowledgeTable:
|
|||
except Exception:
|
||||
return None
|
||||
|
||||
def get_knowledge_bases(self) -> list[KnowledgeUserModel]:
|
||||
def get_knowledge_bases(
|
||||
self, skip: int = 0, limit: int = 30
|
||||
) -> list[KnowledgeUserModel]:
|
||||
with get_db() as db:
|
||||
all_knowledge = (
|
||||
db.query(Knowledge).order_by(Knowledge.updated_at.desc()).all()
|
||||
)
|
||||
|
||||
user_ids = list(set(knowledge.user_id for knowledge in all_knowledge))
|
||||
|
||||
users = Users.get_users_by_user_ids(user_ids) if user_ids else []
|
||||
|
|
@ -151,6 +207,126 @@ class KnowledgeTable:
|
|||
)
|
||||
return knowledge_bases
|
||||
|
||||
def search_knowledge_bases(
|
||||
self, user_id: str, filter: dict, skip: int = 0, limit: int = 30
|
||||
) -> KnowledgeListResponse:
|
||||
try:
|
||||
with get_db() as db:
|
||||
query = db.query(Knowledge, User).outerjoin(
|
||||
User, User.id == Knowledge.user_id
|
||||
)
|
||||
|
||||
if filter:
|
||||
query_key = filter.get("query")
|
||||
if query_key:
|
||||
query = query.filter(
|
||||
or_(
|
||||
Knowledge.name.ilike(f"%{query_key}%"),
|
||||
Knowledge.description.ilike(f"%{query_key}%"),
|
||||
)
|
||||
)
|
||||
|
||||
view_option = filter.get("view_option")
|
||||
if view_option == "created":
|
||||
query = query.filter(Knowledge.user_id == user_id)
|
||||
elif view_option == "shared":
|
||||
query = query.filter(Knowledge.user_id != user_id)
|
||||
|
||||
query = has_permission(db, Knowledge, query, filter)
|
||||
|
||||
query = query.order_by(Knowledge.updated_at.desc())
|
||||
|
||||
total = query.count()
|
||||
if skip:
|
||||
query = query.offset(skip)
|
||||
if limit:
|
||||
query = query.limit(limit)
|
||||
|
||||
items = query.all()
|
||||
|
||||
knowledge_bases = []
|
||||
for knowledge_base, user in items:
|
||||
knowledge_bases.append(
|
||||
KnowledgeUserModel.model_validate(
|
||||
{
|
||||
**KnowledgeModel.model_validate(
|
||||
knowledge_base
|
||||
).model_dump(),
|
||||
"user": (
|
||||
UserModel.model_validate(user).model_dump()
|
||||
if user
|
||||
else None
|
||||
),
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
return KnowledgeListResponse(items=knowledge_bases, total=total)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return KnowledgeListResponse(items=[], total=0)
|
||||
|
||||
def search_knowledge_files(
|
||||
self, filter: dict, skip: int = 0, limit: int = 30
|
||||
) -> KnowledgeFileListResponse:
|
||||
"""
|
||||
Scalable version: search files across all knowledge bases the user has
|
||||
READ access to, without loading all KBs or using large IN() lists.
|
||||
"""
|
||||
try:
|
||||
with get_db() as db:
|
||||
# Base query: join Knowledge → KnowledgeFile → File
|
||||
query = (
|
||||
db.query(File, User)
|
||||
.join(KnowledgeFile, File.id == KnowledgeFile.file_id)
|
||||
.join(Knowledge, KnowledgeFile.knowledge_id == Knowledge.id)
|
||||
.outerjoin(User, User.id == KnowledgeFile.user_id)
|
||||
)
|
||||
|
||||
# Apply access-control directly to the joined query
|
||||
# This makes the database handle filtering, even with 10k+ KBs
|
||||
query = has_permission(db, Knowledge, query, filter)
|
||||
|
||||
# Apply filename search
|
||||
if filter:
|
||||
q = filter.get("query")
|
||||
if q:
|
||||
query = query.filter(File.filename.ilike(f"%{q}%"))
|
||||
|
||||
# Order by file changes
|
||||
query = query.order_by(File.updated_at.desc())
|
||||
|
||||
# Count before pagination
|
||||
total = query.count()
|
||||
|
||||
if skip:
|
||||
query = query.offset(skip)
|
||||
if limit:
|
||||
query = query.limit(limit)
|
||||
|
||||
rows = query.all()
|
||||
|
||||
items = []
|
||||
for file, user in rows:
|
||||
items.append(
|
||||
FileUserResponse(
|
||||
**FileModel.model_validate(file).model_dump(),
|
||||
user=(
|
||||
UserResponse(
|
||||
**UserModel.model_validate(user).model_dump()
|
||||
)
|
||||
if user
|
||||
else None
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
return KnowledgeFileListResponse(items=items, total=total)
|
||||
|
||||
except Exception as e:
|
||||
print("search_knowledge_files error:", e)
|
||||
return KnowledgeFileListResponse(items=[], total=0)
|
||||
|
||||
def check_access_by_user_id(self, id, user_id, permission="write") -> bool:
|
||||
knowledge = self.get_knowledge_by_id(id)
|
||||
if not knowledge:
|
||||
|
|
@ -182,6 +358,197 @@ class KnowledgeTable:
|
|||
except Exception:
|
||||
return None
|
||||
|
||||
def get_knowledge_by_id_and_user_id(
|
||||
self, id: str, user_id: str
|
||||
) -> Optional[KnowledgeModel]:
|
||||
knowledge = self.get_knowledge_by_id(id)
|
||||
if not knowledge:
|
||||
return None
|
||||
|
||||
if knowledge.user_id == user_id:
|
||||
return knowledge
|
||||
|
||||
user_group_ids = {group.id for group in Groups.get_groups_by_member_id(user_id)}
|
||||
if has_access(user_id, "write", knowledge.access_control, user_group_ids):
|
||||
return knowledge
|
||||
return None
|
||||
|
||||
def get_knowledges_by_file_id(self, file_id: str) -> list[KnowledgeModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
knowledges = (
|
||||
db.query(Knowledge)
|
||||
.join(KnowledgeFile, Knowledge.id == KnowledgeFile.knowledge_id)
|
||||
.filter(KnowledgeFile.file_id == file_id)
|
||||
.all()
|
||||
)
|
||||
return [
|
||||
KnowledgeModel.model_validate(knowledge) for knowledge in knowledges
|
||||
]
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
def search_files_by_id(
|
||||
self,
|
||||
knowledge_id: str,
|
||||
user_id: str,
|
||||
filter: dict,
|
||||
skip: int = 0,
|
||||
limit: int = 30,
|
||||
) -> KnowledgeFileListResponse:
|
||||
try:
|
||||
with get_db() as db:
|
||||
query = (
|
||||
db.query(File, User)
|
||||
.join(KnowledgeFile, File.id == KnowledgeFile.file_id)
|
||||
.outerjoin(User, User.id == KnowledgeFile.user_id)
|
||||
.filter(KnowledgeFile.knowledge_id == knowledge_id)
|
||||
)
|
||||
|
||||
if filter:
|
||||
query_key = filter.get("query")
|
||||
if query_key:
|
||||
query = query.filter(or_(File.filename.ilike(f"%{query_key}%")))
|
||||
|
||||
view_option = filter.get("view_option")
|
||||
if view_option == "created":
|
||||
query = query.filter(KnowledgeFile.user_id == user_id)
|
||||
elif view_option == "shared":
|
||||
query = query.filter(KnowledgeFile.user_id != user_id)
|
||||
|
||||
order_by = filter.get("order_by")
|
||||
direction = filter.get("direction")
|
||||
|
||||
if order_by == "name":
|
||||
if direction == "asc":
|
||||
query = query.order_by(File.filename.asc())
|
||||
else:
|
||||
query = query.order_by(File.filename.desc())
|
||||
elif order_by == "created_at":
|
||||
if direction == "asc":
|
||||
query = query.order_by(File.created_at.asc())
|
||||
else:
|
||||
query = query.order_by(File.created_at.desc())
|
||||
elif order_by == "updated_at":
|
||||
if direction == "asc":
|
||||
query = query.order_by(File.updated_at.asc())
|
||||
else:
|
||||
query = query.order_by(File.updated_at.desc())
|
||||
else:
|
||||
query = query.order_by(File.updated_at.desc())
|
||||
|
||||
else:
|
||||
query = query.order_by(File.updated_at.desc())
|
||||
|
||||
# Count BEFORE pagination
|
||||
total = query.count()
|
||||
|
||||
if skip:
|
||||
query = query.offset(skip)
|
||||
if limit:
|
||||
query = query.limit(limit)
|
||||
|
||||
items = query.all()
|
||||
|
||||
files = []
|
||||
for file, user in items:
|
||||
files.append(
|
||||
FileUserResponse(
|
||||
**FileModel.model_validate(file).model_dump(),
|
||||
user=(
|
||||
UserResponse(
|
||||
**UserModel.model_validate(user).model_dump()
|
||||
)
|
||||
if user
|
||||
else None
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
return KnowledgeFileListResponse(items=files, total=total)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return KnowledgeFileListResponse(items=[], total=0)
|
||||
|
||||
def get_files_by_id(self, knowledge_id: str) -> list[FileModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
files = (
|
||||
db.query(File)
|
||||
.join(KnowledgeFile, File.id == KnowledgeFile.file_id)
|
||||
.filter(KnowledgeFile.knowledge_id == knowledge_id)
|
||||
.all()
|
||||
)
|
||||
return [FileModel.model_validate(file) for file in files]
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
def get_file_metadatas_by_id(self, knowledge_id: str) -> list[FileMetadataResponse]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
files = self.get_files_by_id(knowledge_id)
|
||||
return [FileMetadataResponse(**file.model_dump()) for file in files]
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
def add_file_to_knowledge_by_id(
|
||||
self, knowledge_id: str, file_id: str, user_id: str
|
||||
) -> Optional[KnowledgeFileModel]:
|
||||
with get_db() as db:
|
||||
knowledge_file = KnowledgeFileModel(
|
||||
**{
|
||||
"id": str(uuid.uuid4()),
|
||||
"knowledge_id": knowledge_id,
|
||||
"file_id": file_id,
|
||||
"user_id": user_id,
|
||||
"created_at": int(time.time()),
|
||||
"updated_at": int(time.time()),
|
||||
}
|
||||
)
|
||||
|
||||
try:
|
||||
result = KnowledgeFile(**knowledge_file.model_dump())
|
||||
db.add(result)
|
||||
db.commit()
|
||||
db.refresh(result)
|
||||
if result:
|
||||
return KnowledgeFileModel.model_validate(result)
|
||||
else:
|
||||
return None
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def remove_file_from_knowledge_by_id(self, knowledge_id: str, file_id: str) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
db.query(KnowledgeFile).filter_by(
|
||||
knowledge_id=knowledge_id, file_id=file_id
|
||||
).delete()
|
||||
db.commit()
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def reset_knowledge_by_id(self, id: str) -> Optional[KnowledgeModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
# Delete all knowledge_file entries for this knowledge_id
|
||||
db.query(KnowledgeFile).filter_by(knowledge_id=id).delete()
|
||||
db.commit()
|
||||
|
||||
# Update the knowledge entry's updated_at timestamp
|
||||
db.query(Knowledge).filter_by(id=id).update(
|
||||
{
|
||||
"updated_at": int(time.time()),
|
||||
}
|
||||
)
|
||||
db.commit()
|
||||
|
||||
return self.get_knowledge_by_id(id=id)
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
return None
|
||||
|
||||
def update_knowledge_by_id(
|
||||
self, id: str, form_data: KnowledgeForm, overwrite: bool = False
|
||||
) -> Optional[KnowledgeModel]:
|
||||
|
|
|
|||
|
|
@ -5,10 +5,11 @@ from typing import Optional
|
|||
|
||||
from open_webui.internal.db import Base, get_db
|
||||
from open_webui.models.tags import TagModel, Tag, Tags
|
||||
from open_webui.models.users import Users, UserNameResponse
|
||||
from open_webui.models.users import Users, User, UserNameResponse
|
||||
from open_webui.models.channels import Channels, ChannelMember
|
||||
|
||||
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from pydantic import BaseModel, ConfigDict, field_validator
|
||||
from sqlalchemy import BigInteger, Boolean, Column, String, Text, JSON
|
||||
from sqlalchemy import or_, func, select, and_, text
|
||||
from sqlalchemy.sql import exists
|
||||
|
|
@ -39,7 +40,7 @@ class MessageReactionModel(BaseModel):
|
|||
|
||||
class Message(Base):
|
||||
__tablename__ = "message"
|
||||
id = Column(Text, primary_key=True)
|
||||
id = Column(Text, primary_key=True, unique=True)
|
||||
|
||||
user_id = Column(Text)
|
||||
channel_id = Column(Text, nullable=True)
|
||||
|
|
@ -47,6 +48,11 @@ class Message(Base):
|
|||
reply_to_id = Column(Text, nullable=True)
|
||||
parent_id = Column(Text, nullable=True)
|
||||
|
||||
# Pins
|
||||
is_pinned = Column(Boolean, nullable=False, default=False)
|
||||
pinned_at = Column(BigInteger, nullable=True)
|
||||
pinned_by = Column(Text, nullable=True)
|
||||
|
||||
content = Column(Text)
|
||||
data = Column(JSON, nullable=True)
|
||||
meta = Column(JSON, nullable=True)
|
||||
|
|
@ -65,12 +71,17 @@ class MessageModel(BaseModel):
|
|||
reply_to_id: Optional[str] = None
|
||||
parent_id: Optional[str] = None
|
||||
|
||||
# Pins
|
||||
is_pinned: bool = False
|
||||
pinned_by: Optional[str] = None
|
||||
pinned_at: Optional[int] = None # timestamp in epoch (time_ns)
|
||||
|
||||
content: str
|
||||
data: Optional[dict] = None
|
||||
meta: Optional[dict] = None
|
||||
|
||||
created_at: int # timestamp in epoch
|
||||
updated_at: int # timestamp in epoch
|
||||
created_at: int # timestamp in epoch (time_ns)
|
||||
updated_at: int # timestamp in epoch (time_ns)
|
||||
|
||||
|
||||
####################
|
||||
|
|
@ -79,6 +90,7 @@ class MessageModel(BaseModel):
|
|||
|
||||
|
||||
class MessageForm(BaseModel):
|
||||
temp_id: Optional[str] = None
|
||||
content: str
|
||||
reply_to_id: Optional[str] = None
|
||||
parent_id: Optional[str] = None
|
||||
|
|
@ -88,7 +100,7 @@ class MessageForm(BaseModel):
|
|||
|
||||
class Reactions(BaseModel):
|
||||
name: str
|
||||
user_ids: list[str]
|
||||
users: list[dict]
|
||||
count: int
|
||||
|
||||
|
||||
|
|
@ -96,8 +108,25 @@ class MessageUserResponse(MessageModel):
|
|||
user: Optional[UserNameResponse] = None
|
||||
|
||||
|
||||
class MessageUserSlimResponse(MessageUserResponse):
|
||||
data: bool | None = None
|
||||
|
||||
@field_validator("data", mode="before")
|
||||
def convert_data_to_bool(cls, v):
|
||||
# No data or not a dict → False
|
||||
if not isinstance(v, dict):
|
||||
return False
|
||||
|
||||
# True if ANY value in the dict is non-empty
|
||||
return any(bool(val) for val in v.values())
|
||||
|
||||
|
||||
class MessageReplyToResponse(MessageUserResponse):
|
||||
reply_to_message: Optional[MessageUserResponse] = None
|
||||
reply_to_message: Optional[MessageUserSlimResponse] = None
|
||||
|
||||
|
||||
class MessageWithReactionsResponse(MessageUserSlimResponse):
|
||||
reactions: list[Reactions]
|
||||
|
||||
|
||||
class MessageResponse(MessageReplyToResponse):
|
||||
|
|
@ -111,9 +140,11 @@ class MessageTable:
|
|||
self, form_data: MessageForm, channel_id: str, user_id: str
|
||||
) -> Optional[MessageModel]:
|
||||
with get_db() as db:
|
||||
id = str(uuid.uuid4())
|
||||
channel_member = Channels.join_channel(channel_id, user_id)
|
||||
|
||||
id = str(uuid.uuid4())
|
||||
ts = int(time.time_ns())
|
||||
|
||||
message = MessageModel(
|
||||
**{
|
||||
"id": id,
|
||||
|
|
@ -121,6 +152,9 @@ class MessageTable:
|
|||
"channel_id": channel_id,
|
||||
"reply_to_id": form_data.reply_to_id,
|
||||
"parent_id": form_data.parent_id,
|
||||
"is_pinned": False,
|
||||
"pinned_at": None,
|
||||
"pinned_by": None,
|
||||
"content": form_data.content,
|
||||
"data": form_data.data,
|
||||
"meta": form_data.meta,
|
||||
|
|
@ -128,8 +162,8 @@ class MessageTable:
|
|||
"updated_at": ts,
|
||||
}
|
||||
)
|
||||
|
||||
result = Message(**message.model_dump())
|
||||
|
||||
db.add(result)
|
||||
db.commit()
|
||||
db.refresh(result)
|
||||
|
|
@ -280,6 +314,30 @@ class MessageTable:
|
|||
)
|
||||
return messages
|
||||
|
||||
def get_last_message_by_channel_id(self, channel_id: str) -> Optional[MessageModel]:
|
||||
with get_db() as db:
|
||||
message = (
|
||||
db.query(Message)
|
||||
.filter_by(channel_id=channel_id)
|
||||
.order_by(Message.created_at.desc())
|
||||
.first()
|
||||
)
|
||||
return MessageModel.model_validate(message) if message else None
|
||||
|
||||
def get_pinned_messages_by_channel_id(
|
||||
self, channel_id: str, skip: int = 0, limit: int = 50
|
||||
) -> list[MessageModel]:
|
||||
with get_db() as db:
|
||||
all_messages = (
|
||||
db.query(Message)
|
||||
.filter_by(channel_id=channel_id, is_pinned=True)
|
||||
.order_by(Message.pinned_at.desc())
|
||||
.offset(skip)
|
||||
.limit(limit)
|
||||
.all()
|
||||
)
|
||||
return [MessageModel.model_validate(message) for message in all_messages]
|
||||
|
||||
def update_message_by_id(
|
||||
self, id: str, form_data: MessageForm
|
||||
) -> Optional[MessageModel]:
|
||||
|
|
@ -299,10 +357,44 @@ class MessageTable:
|
|||
db.refresh(message)
|
||||
return MessageModel.model_validate(message) if message else None
|
||||
|
||||
def update_is_pinned_by_id(
|
||||
self, id: str, is_pinned: bool, pinned_by: Optional[str] = None
|
||||
) -> Optional[MessageModel]:
|
||||
with get_db() as db:
|
||||
message = db.get(Message, id)
|
||||
message.is_pinned = is_pinned
|
||||
message.pinned_at = int(time.time_ns()) if is_pinned else None
|
||||
message.pinned_by = pinned_by if is_pinned else None
|
||||
db.commit()
|
||||
db.refresh(message)
|
||||
return MessageModel.model_validate(message) if message else None
|
||||
|
||||
def get_unread_message_count(
|
||||
self, channel_id: str, user_id: str, last_read_at: Optional[int] = None
|
||||
) -> int:
|
||||
with get_db() as db:
|
||||
query = db.query(Message).filter(
|
||||
Message.channel_id == channel_id,
|
||||
Message.parent_id == None, # only count top-level messages
|
||||
Message.created_at > (last_read_at if last_read_at else 0),
|
||||
)
|
||||
if user_id:
|
||||
query = query.filter(Message.user_id != user_id)
|
||||
return query.count()
|
||||
|
||||
def add_reaction_to_message(
|
||||
self, id: str, user_id: str, name: str
|
||||
) -> Optional[MessageReactionModel]:
|
||||
with get_db() as db:
|
||||
# check for existing reaction
|
||||
existing_reaction = (
|
||||
db.query(MessageReaction)
|
||||
.filter_by(message_id=id, user_id=user_id, name=name)
|
||||
.first()
|
||||
)
|
||||
if existing_reaction:
|
||||
return MessageReactionModel.model_validate(existing_reaction)
|
||||
|
||||
reaction_id = str(uuid.uuid4())
|
||||
reaction = MessageReactionModel(
|
||||
id=reaction_id,
|
||||
|
|
@ -319,17 +411,30 @@ class MessageTable:
|
|||
|
||||
def get_reactions_by_message_id(self, id: str) -> list[Reactions]:
|
||||
with get_db() as db:
|
||||
all_reactions = db.query(MessageReaction).filter_by(message_id=id).all()
|
||||
# JOIN User so all user info is fetched in one query
|
||||
results = (
|
||||
db.query(MessageReaction, User)
|
||||
.join(User, MessageReaction.user_id == User.id)
|
||||
.filter(MessageReaction.message_id == id)
|
||||
.all()
|
||||
)
|
||||
|
||||
reactions = {}
|
||||
for reaction in all_reactions:
|
||||
|
||||
for reaction, user in results:
|
||||
if reaction.name not in reactions:
|
||||
reactions[reaction.name] = {
|
||||
"name": reaction.name,
|
||||
"user_ids": [],
|
||||
"users": [],
|
||||
"count": 0,
|
||||
}
|
||||
reactions[reaction.name]["user_ids"].append(reaction.user_id)
|
||||
|
||||
reactions[reaction.name]["users"].append(
|
||||
{
|
||||
"id": user.id,
|
||||
"name": user.name,
|
||||
}
|
||||
)
|
||||
reactions[reaction.name]["count"] += 1
|
||||
|
||||
return [Reactions(**reaction) for reaction in reactions.values()]
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ import time
|
|||
from typing import Optional
|
||||
|
||||
from open_webui.internal.db import Base, JSONField, get_db
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
from open_webui.models.groups import Groups
|
||||
from open_webui.models.users import User, UserModel, Users, UserResponse
|
||||
|
|
@ -13,6 +12,8 @@ from pydantic import BaseModel, ConfigDict
|
|||
|
||||
from sqlalchemy import String, cast, or_, and_, func
|
||||
from sqlalchemy.dialects import postgresql, sqlite
|
||||
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
from sqlalchemy import BigInteger, Column, Text, JSON, Boolean
|
||||
|
||||
|
||||
|
|
@ -20,7 +21,6 @@ from open_webui.utils.access_control import has_access
|
|||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
|
||||
####################
|
||||
|
|
@ -53,7 +53,7 @@ class ModelMeta(BaseModel):
|
|||
class Model(Base):
|
||||
__tablename__ = "model"
|
||||
|
||||
id = Column(Text, primary_key=True)
|
||||
id = Column(Text, primary_key=True, unique=True)
|
||||
"""
|
||||
The model's id as used in the API. If set to an existing model, it will override the model.
|
||||
"""
|
||||
|
|
@ -220,6 +220,48 @@ class ModelsTable:
|
|||
or has_access(user_id, permission, model.access_control, user_group_ids)
|
||||
]
|
||||
|
||||
def _has_permission(self, db, query, filter: dict, permission: str = "read"):
|
||||
group_ids = filter.get("group_ids", [])
|
||||
user_id = filter.get("user_id")
|
||||
|
||||
dialect_name = db.bind.dialect.name
|
||||
|
||||
# Public access
|
||||
conditions = []
|
||||
if group_ids or user_id:
|
||||
conditions.extend(
|
||||
[
|
||||
Model.access_control.is_(None),
|
||||
cast(Model.access_control, String) == "null",
|
||||
]
|
||||
)
|
||||
|
||||
# User-level permission
|
||||
if user_id:
|
||||
conditions.append(Model.user_id == user_id)
|
||||
|
||||
# Group-level permission
|
||||
if group_ids:
|
||||
group_conditions = []
|
||||
for gid in group_ids:
|
||||
if dialect_name == "sqlite":
|
||||
group_conditions.append(
|
||||
Model.access_control[permission]["group_ids"].contains([gid])
|
||||
)
|
||||
elif dialect_name == "postgresql":
|
||||
group_conditions.append(
|
||||
cast(
|
||||
Model.access_control[permission]["group_ids"],
|
||||
JSONB,
|
||||
).contains([gid])
|
||||
)
|
||||
conditions.append(or_(*group_conditions))
|
||||
|
||||
if conditions:
|
||||
query = query.filter(or_(*conditions))
|
||||
|
||||
return query
|
||||
|
||||
def search_models(
|
||||
self, user_id: str, filter: dict = {}, skip: int = 0, limit: int = 30
|
||||
) -> ModelListResponse:
|
||||
|
|
@ -238,16 +280,20 @@ class ModelsTable:
|
|||
)
|
||||
)
|
||||
|
||||
if filter.get("user_id"):
|
||||
query = query.filter(Model.user_id == filter.get("user_id"))
|
||||
|
||||
view_option = filter.get("view_option")
|
||||
|
||||
if view_option == "created":
|
||||
query = query.filter(Model.user_id == user_id)
|
||||
elif view_option == "shared":
|
||||
query = query.filter(Model.user_id != user_id)
|
||||
|
||||
# Apply access control filtering
|
||||
query = self._has_permission(
|
||||
db,
|
||||
query,
|
||||
filter,
|
||||
permission="write",
|
||||
)
|
||||
|
||||
tag = filter.get("tag")
|
||||
if tag:
|
||||
# TODO: This is a simple implementation and should be improved for performance
|
||||
|
|
@ -311,6 +357,14 @@ class ModelsTable:
|
|||
except Exception:
|
||||
return None
|
||||
|
||||
def get_models_by_ids(self, ids: list[str]) -> list[ModelModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
models = db.query(Model).filter(Model.id.in_(ids)).all()
|
||||
return [ModelModel.model_validate(model) for model in models]
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
def toggle_model_by_id(self, id: str) -> Optional[ModelModel]:
|
||||
with get_db() as db:
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -7,12 +7,15 @@ from functools import lru_cache
|
|||
from open_webui.internal.db import Base, get_db
|
||||
from open_webui.models.groups import Groups
|
||||
from open_webui.utils.access_control import has_access
|
||||
from open_webui.models.users import Users, UserResponse
|
||||
from open_webui.models.users import User, UserModel, Users, UserResponse
|
||||
|
||||
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Boolean, Column, String, Text, JSON
|
||||
from sqlalchemy import or_, func, select, and_, text
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
|
||||
|
||||
from sqlalchemy import or_, func, select, and_, text, cast, or_, and_, func
|
||||
from sqlalchemy.sql import exists
|
||||
|
||||
####################
|
||||
|
|
@ -23,7 +26,7 @@ from sqlalchemy.sql import exists
|
|||
class Note(Base):
|
||||
__tablename__ = "note"
|
||||
|
||||
id = Column(Text, primary_key=True)
|
||||
id = Column(Text, primary_key=True, unique=True)
|
||||
user_id = Column(Text)
|
||||
|
||||
title = Column(Text)
|
||||
|
|
@ -75,7 +78,138 @@ class NoteUserResponse(NoteModel):
|
|||
user: Optional[UserResponse] = None
|
||||
|
||||
|
||||
class NoteItemResponse(BaseModel):
|
||||
id: str
|
||||
title: str
|
||||
data: Optional[dict]
|
||||
updated_at: int
|
||||
created_at: int
|
||||
user: Optional[UserResponse] = None
|
||||
|
||||
|
||||
class NoteListResponse(BaseModel):
|
||||
items: list[NoteUserResponse]
|
||||
total: int
|
||||
|
||||
|
||||
class NoteTable:
|
||||
def _has_permission(self, db, query, filter: dict, permission: str = "read"):
|
||||
group_ids = filter.get("group_ids", [])
|
||||
user_id = filter.get("user_id")
|
||||
dialect_name = db.bind.dialect.name
|
||||
|
||||
conditions = []
|
||||
|
||||
# Handle read_only permission separately
|
||||
if permission == "read_only":
|
||||
# For read_only, we want items where:
|
||||
# 1. User has explicit read permission (via groups or user-level)
|
||||
# 2. BUT does NOT have write permission
|
||||
# 3. Public items are NOT considered read_only
|
||||
|
||||
read_conditions = []
|
||||
|
||||
# Group-level read permission
|
||||
if group_ids:
|
||||
group_read_conditions = []
|
||||
for gid in group_ids:
|
||||
if dialect_name == "sqlite":
|
||||
group_read_conditions.append(
|
||||
Note.access_control["read"]["group_ids"].contains([gid])
|
||||
)
|
||||
elif dialect_name == "postgresql":
|
||||
group_read_conditions.append(
|
||||
cast(
|
||||
Note.access_control["read"]["group_ids"],
|
||||
JSONB,
|
||||
).contains([gid])
|
||||
)
|
||||
|
||||
if group_read_conditions:
|
||||
read_conditions.append(or_(*group_read_conditions))
|
||||
|
||||
# Combine read conditions
|
||||
if read_conditions:
|
||||
has_read = or_(*read_conditions)
|
||||
else:
|
||||
# If no read conditions, return empty result
|
||||
return query.filter(False)
|
||||
|
||||
# Now exclude items where user has write permission
|
||||
write_exclusions = []
|
||||
|
||||
# Exclude items owned by user (they have implicit write)
|
||||
if user_id:
|
||||
write_exclusions.append(Note.user_id != user_id)
|
||||
|
||||
# Exclude items where user has explicit write permission via groups
|
||||
if group_ids:
|
||||
group_write_conditions = []
|
||||
for gid in group_ids:
|
||||
if dialect_name == "sqlite":
|
||||
group_write_conditions.append(
|
||||
Note.access_control["write"]["group_ids"].contains([gid])
|
||||
)
|
||||
elif dialect_name == "postgresql":
|
||||
group_write_conditions.append(
|
||||
cast(
|
||||
Note.access_control["write"]["group_ids"],
|
||||
JSONB,
|
||||
).contains([gid])
|
||||
)
|
||||
|
||||
if group_write_conditions:
|
||||
# User should NOT have write permission
|
||||
write_exclusions.append(~or_(*group_write_conditions))
|
||||
|
||||
# Exclude public items (items without access_control)
|
||||
write_exclusions.append(Note.access_control.isnot(None))
|
||||
write_exclusions.append(cast(Note.access_control, String) != "null")
|
||||
|
||||
# Combine: has read AND does not have write AND not public
|
||||
if write_exclusions:
|
||||
query = query.filter(and_(has_read, *write_exclusions))
|
||||
else:
|
||||
query = query.filter(has_read)
|
||||
|
||||
return query
|
||||
|
||||
# Original logic for other permissions (read, write, etc.)
|
||||
# Public access conditions
|
||||
if group_ids or user_id:
|
||||
conditions.extend(
|
||||
[
|
||||
Note.access_control.is_(None),
|
||||
cast(Note.access_control, String) == "null",
|
||||
]
|
||||
)
|
||||
|
||||
# User-level permission (owner has all permissions)
|
||||
if user_id:
|
||||
conditions.append(Note.user_id == user_id)
|
||||
|
||||
# Group-level permission
|
||||
if group_ids:
|
||||
group_conditions = []
|
||||
for gid in group_ids:
|
||||
if dialect_name == "sqlite":
|
||||
group_conditions.append(
|
||||
Note.access_control[permission]["group_ids"].contains([gid])
|
||||
)
|
||||
elif dialect_name == "postgresql":
|
||||
group_conditions.append(
|
||||
cast(
|
||||
Note.access_control[permission]["group_ids"],
|
||||
JSONB,
|
||||
).contains([gid])
|
||||
)
|
||||
conditions.append(or_(*group_conditions))
|
||||
|
||||
if conditions:
|
||||
query = query.filter(or_(*conditions))
|
||||
|
||||
return query
|
||||
|
||||
def insert_new_note(
|
||||
self,
|
||||
form_data: NoteForm,
|
||||
|
|
@ -110,15 +244,107 @@ class NoteTable:
|
|||
notes = query.all()
|
||||
return [NoteModel.model_validate(note) for note in notes]
|
||||
|
||||
def search_notes(
|
||||
self, user_id: str, filter: dict = {}, skip: int = 0, limit: int = 30
|
||||
) -> NoteListResponse:
|
||||
with get_db() as db:
|
||||
query = db.query(Note, User).outerjoin(User, User.id == Note.user_id)
|
||||
if filter:
|
||||
query_key = filter.get("query")
|
||||
if query_key:
|
||||
query = query.filter(
|
||||
or_(
|
||||
Note.title.ilike(f"%{query_key}%"),
|
||||
cast(Note.data["content"]["md"], Text).ilike(
|
||||
f"%{query_key}%"
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
view_option = filter.get("view_option")
|
||||
if view_option == "created":
|
||||
query = query.filter(Note.user_id == user_id)
|
||||
elif view_option == "shared":
|
||||
query = query.filter(Note.user_id != user_id)
|
||||
|
||||
# Apply access control filtering
|
||||
if "permission" in filter:
|
||||
permission = filter["permission"]
|
||||
else:
|
||||
permission = "write"
|
||||
|
||||
query = self._has_permission(
|
||||
db,
|
||||
query,
|
||||
filter,
|
||||
permission=permission,
|
||||
)
|
||||
|
||||
order_by = filter.get("order_by")
|
||||
direction = filter.get("direction")
|
||||
|
||||
if order_by == "name":
|
||||
if direction == "asc":
|
||||
query = query.order_by(Note.title.asc())
|
||||
else:
|
||||
query = query.order_by(Note.title.desc())
|
||||
elif order_by == "created_at":
|
||||
if direction == "asc":
|
||||
query = query.order_by(Note.created_at.asc())
|
||||
else:
|
||||
query = query.order_by(Note.created_at.desc())
|
||||
elif order_by == "updated_at":
|
||||
if direction == "asc":
|
||||
query = query.order_by(Note.updated_at.asc())
|
||||
else:
|
||||
query = query.order_by(Note.updated_at.desc())
|
||||
else:
|
||||
query = query.order_by(Note.updated_at.desc())
|
||||
|
||||
else:
|
||||
query = query.order_by(Note.updated_at.desc())
|
||||
|
||||
# Count BEFORE pagination
|
||||
total = query.count()
|
||||
|
||||
if skip:
|
||||
query = query.offset(skip)
|
||||
if limit:
|
||||
query = query.limit(limit)
|
||||
|
||||
items = query.all()
|
||||
|
||||
notes = []
|
||||
for note, user in items:
|
||||
notes.append(
|
||||
NoteUserResponse(
|
||||
**NoteModel.model_validate(note).model_dump(),
|
||||
user=(
|
||||
UserResponse(**UserModel.model_validate(user).model_dump())
|
||||
if user
|
||||
else None
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
return NoteListResponse(items=notes, total=total)
|
||||
|
||||
def get_notes_by_user_id(
|
||||
self,
|
||||
user_id: str,
|
||||
permission: str = "read",
|
||||
skip: Optional[int] = None,
|
||||
limit: Optional[int] = None,
|
||||
) -> list[NoteModel]:
|
||||
with get_db() as db:
|
||||
query = db.query(Note).filter(Note.user_id == user_id)
|
||||
query = query.order_by(Note.updated_at.desc())
|
||||
user_group_ids = [
|
||||
group.id for group in Groups.get_groups_by_member_id(user_id)
|
||||
]
|
||||
|
||||
query = db.query(Note).order_by(Note.updated_at.desc())
|
||||
query = self._has_permission(
|
||||
db, query, {"user_id": user_id, "group_ids": user_group_ids}, permission
|
||||
)
|
||||
|
||||
if skip is not None:
|
||||
query = query.offset(skip)
|
||||
|
|
@ -128,56 +354,6 @@ class NoteTable:
|
|||
notes = query.all()
|
||||
return [NoteModel.model_validate(note) for note in notes]
|
||||
|
||||
def get_notes_by_permission(
|
||||
self,
|
||||
user_id: str,
|
||||
permission: str = "write",
|
||||
skip: Optional[int] = None,
|
||||
limit: Optional[int] = None,
|
||||
) -> list[NoteModel]:
|
||||
with get_db() as db:
|
||||
user_groups = Groups.get_groups_by_member_id(user_id)
|
||||
user_group_ids = {group.id for group in user_groups}
|
||||
|
||||
# Order newest-first. We stream to keep memory usage low.
|
||||
query = (
|
||||
db.query(Note)
|
||||
.order_by(Note.updated_at.desc())
|
||||
.execution_options(stream_results=True)
|
||||
.yield_per(256)
|
||||
)
|
||||
|
||||
results: list[NoteModel] = []
|
||||
n_skipped = 0
|
||||
|
||||
for note in query:
|
||||
# Fast-pass #1: owner
|
||||
if note.user_id == user_id:
|
||||
permitted = True
|
||||
# Fast-pass #2: public/open
|
||||
elif note.access_control is None:
|
||||
# Technically this should mean public access for both read and write, but we'll only do read for now
|
||||
# We might want to change this behavior later
|
||||
permitted = permission == "read"
|
||||
else:
|
||||
permitted = has_access(
|
||||
user_id, permission, note.access_control, user_group_ids
|
||||
)
|
||||
|
||||
if not permitted:
|
||||
continue
|
||||
|
||||
# Apply skip AFTER permission filtering so it counts only accessible notes
|
||||
if skip and n_skipped < skip:
|
||||
n_skipped += 1
|
||||
continue
|
||||
|
||||
results.append(NoteModel.model_validate(note))
|
||||
if limit is not None and len(results) >= limit:
|
||||
break
|
||||
|
||||
return results
|
||||
|
||||
def get_note_by_id(self, id: str) -> Optional[NoteModel]:
|
||||
with get_db() as db:
|
||||
note = db.query(Note).filter(Note.id == id).first()
|
||||
|
|
|
|||
|
|
@ -9,13 +9,12 @@ import json
|
|||
from cryptography.fernet import Fernet
|
||||
|
||||
from open_webui.internal.db import Base, get_db
|
||||
from open_webui.env import SRC_LOG_LEVELS, OAUTH_SESSION_TOKEN_ENCRYPTION_KEY
|
||||
from open_webui.env import OAUTH_SESSION_TOKEN_ENCRYPTION_KEY
|
||||
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Column, String, Text, Index
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
####################
|
||||
# DB MODEL
|
||||
|
|
@ -25,7 +24,7 @@ log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
|||
class OAuthSession(Base):
|
||||
__tablename__ = "oauth_session"
|
||||
|
||||
id = Column(Text, primary_key=True)
|
||||
id = Column(Text, primary_key=True, unique=True)
|
||||
user_id = Column(Text, nullable=False)
|
||||
provider = Column(Text, nullable=False)
|
||||
token = Column(
|
||||
|
|
|
|||
|
|
@ -6,12 +6,10 @@ from typing import Optional
|
|||
from open_webui.internal.db import Base, get_db
|
||||
|
||||
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Column, String, JSON, PrimaryKeyConstraint, Index
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
|
||||
####################
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ from open_webui.internal.db import Base, JSONField, get_db
|
|||
from open_webui.models.users import Users, UserResponse
|
||||
from open_webui.models.groups import Groups
|
||||
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Column, String, Text, JSON
|
||||
|
||||
|
|
@ -14,7 +13,6 @@ from open_webui.utils.access_control import has_access
|
|||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
####################
|
||||
# Tools DB Schema
|
||||
|
|
@ -24,7 +22,7 @@ log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
|||
class Tool(Base):
|
||||
__tablename__ = "tool"
|
||||
|
||||
id = Column(String, primary_key=True)
|
||||
id = Column(String, primary_key=True, unique=True)
|
||||
user_id = Column(String)
|
||||
name = Column(Text)
|
||||
content = Column(Text)
|
||||
|
|
|
|||
|
|
@ -5,14 +5,29 @@ from open_webui.internal.db import Base, JSONField, get_db
|
|||
|
||||
|
||||
from open_webui.env import DATABASE_USER_ACTIVE_STATUS_UPDATE_INTERVAL
|
||||
|
||||
from open_webui.models.chats import Chats
|
||||
from open_webui.models.groups import Groups, GroupMember
|
||||
from open_webui.models.channels import ChannelMember
|
||||
|
||||
from open_webui.utils.misc import throttle
|
||||
|
||||
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
from sqlalchemy import BigInteger, Column, String, Text, Date, exists, select
|
||||
from sqlalchemy import (
|
||||
BigInteger,
|
||||
JSON,
|
||||
Column,
|
||||
String,
|
||||
Boolean,
|
||||
Text,
|
||||
Date,
|
||||
exists,
|
||||
select,
|
||||
cast,
|
||||
)
|
||||
from sqlalchemy import or_, case
|
||||
from sqlalchemy.dialects.postgresql import JSONB
|
||||
|
||||
import datetime
|
||||
|
||||
|
|
@ -21,59 +36,71 @@ import datetime
|
|||
####################
|
||||
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "user"
|
||||
|
||||
id = Column(String, primary_key=True)
|
||||
name = Column(String)
|
||||
|
||||
email = Column(String)
|
||||
username = Column(String(50), nullable=True)
|
||||
|
||||
role = Column(String)
|
||||
profile_image_url = Column(Text)
|
||||
|
||||
bio = Column(Text, nullable=True)
|
||||
gender = Column(Text, nullable=True)
|
||||
date_of_birth = Column(Date, nullable=True)
|
||||
|
||||
info = Column(JSONField, nullable=True)
|
||||
settings = Column(JSONField, nullable=True)
|
||||
|
||||
api_key = Column(String, nullable=True, unique=True)
|
||||
oauth_sub = Column(Text, unique=True)
|
||||
|
||||
last_active_at = Column(BigInteger)
|
||||
|
||||
updated_at = Column(BigInteger)
|
||||
created_at = Column(BigInteger)
|
||||
|
||||
|
||||
class UserSettings(BaseModel):
|
||||
ui: Optional[dict] = {}
|
||||
model_config = ConfigDict(extra="allow")
|
||||
pass
|
||||
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "user"
|
||||
|
||||
id = Column(String, primary_key=True, unique=True)
|
||||
email = Column(String)
|
||||
username = Column(String(50), nullable=True)
|
||||
role = Column(String)
|
||||
|
||||
name = Column(String)
|
||||
|
||||
profile_image_url = Column(Text)
|
||||
profile_banner_image_url = Column(Text, nullable=True)
|
||||
|
||||
bio = Column(Text, nullable=True)
|
||||
gender = Column(Text, nullable=True)
|
||||
date_of_birth = Column(Date, nullable=True)
|
||||
timezone = Column(String, nullable=True)
|
||||
|
||||
presence_state = Column(String, nullable=True)
|
||||
status_emoji = Column(String, nullable=True)
|
||||
status_message = Column(Text, nullable=True)
|
||||
status_expires_at = Column(BigInteger, nullable=True)
|
||||
|
||||
info = Column(JSON, nullable=True)
|
||||
settings = Column(JSON, nullable=True)
|
||||
|
||||
oauth = Column(JSON, nullable=True)
|
||||
|
||||
last_active_at = Column(BigInteger)
|
||||
updated_at = Column(BigInteger)
|
||||
created_at = Column(BigInteger)
|
||||
|
||||
|
||||
class UserModel(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
|
||||
email: str
|
||||
username: Optional[str] = None
|
||||
|
||||
role: str = "pending"
|
||||
|
||||
name: str
|
||||
|
||||
profile_image_url: str
|
||||
profile_banner_image_url: Optional[str] = None
|
||||
|
||||
bio: Optional[str] = None
|
||||
gender: Optional[str] = None
|
||||
date_of_birth: Optional[datetime.date] = None
|
||||
timezone: Optional[str] = None
|
||||
|
||||
presence_state: Optional[str] = None
|
||||
status_emoji: Optional[str] = None
|
||||
status_message: Optional[str] = None
|
||||
status_expires_at: Optional[int] = None
|
||||
|
||||
info: Optional[dict] = None
|
||||
settings: Optional[UserSettings] = None
|
||||
|
||||
api_key: Optional[str] = None
|
||||
oauth_sub: Optional[str] = None
|
||||
oauth: Optional[dict] = None
|
||||
|
||||
last_active_at: int # timestamp in epoch
|
||||
updated_at: int # timestamp in epoch
|
||||
|
|
@ -82,6 +109,38 @@ class UserModel(BaseModel):
|
|||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
class UserStatusModel(UserModel):
|
||||
is_active: bool = False
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
class ApiKey(Base):
|
||||
__tablename__ = "api_key"
|
||||
|
||||
id = Column(Text, primary_key=True, unique=True)
|
||||
user_id = Column(Text, nullable=False)
|
||||
key = Column(Text, unique=True, nullable=False)
|
||||
data = Column(JSON, nullable=True)
|
||||
expires_at = Column(BigInteger, nullable=True)
|
||||
last_used_at = Column(BigInteger, nullable=True)
|
||||
created_at = Column(BigInteger, nullable=False)
|
||||
updated_at = Column(BigInteger, nullable=False)
|
||||
|
||||
|
||||
class ApiKeyModel(BaseModel):
|
||||
id: str
|
||||
user_id: str
|
||||
key: str
|
||||
data: Optional[dict] = None
|
||||
expires_at: Optional[int] = None
|
||||
last_used_at: Optional[int] = None
|
||||
created_at: int # timestamp in epoch
|
||||
updated_at: int # timestamp in epoch
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
|
||||
####################
|
||||
# Forms
|
||||
####################
|
||||
|
|
@ -113,7 +172,13 @@ class UserGroupIdsListResponse(BaseModel):
|
|||
total: int
|
||||
|
||||
|
||||
class UserInfoResponse(BaseModel):
|
||||
class UserStatus(BaseModel):
|
||||
status_emoji: Optional[str] = None
|
||||
status_message: Optional[str] = None
|
||||
status_expires_at: Optional[int] = None
|
||||
|
||||
|
||||
class UserInfoResponse(UserStatus):
|
||||
id: str
|
||||
name: str
|
||||
email: str
|
||||
|
|
@ -125,6 +190,12 @@ class UserIdNameResponse(BaseModel):
|
|||
name: str
|
||||
|
||||
|
||||
class UserIdNameStatusResponse(UserStatus):
|
||||
id: str
|
||||
name: str
|
||||
is_active: Optional[bool] = None
|
||||
|
||||
|
||||
class UserInfoListResponse(BaseModel):
|
||||
users: list[UserInfoResponse]
|
||||
total: int
|
||||
|
|
@ -135,18 +206,18 @@ class UserIdNameListResponse(BaseModel):
|
|||
total: int
|
||||
|
||||
|
||||
class UserResponse(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
email: str
|
||||
role: str
|
||||
profile_image_url: str
|
||||
|
||||
|
||||
class UserNameResponse(BaseModel):
|
||||
id: str
|
||||
name: str
|
||||
role: str
|
||||
|
||||
|
||||
class UserResponse(UserNameResponse):
|
||||
email: str
|
||||
|
||||
|
||||
class UserProfileImageResponse(UserNameResponse):
|
||||
email: str
|
||||
profile_image_url: str
|
||||
|
||||
|
||||
|
|
@ -171,20 +242,20 @@ class UsersTable:
|
|||
email: str,
|
||||
profile_image_url: str = "/user.png",
|
||||
role: str = "pending",
|
||||
oauth_sub: Optional[str] = None,
|
||||
oauth: Optional[dict] = None,
|
||||
) -> Optional[UserModel]:
|
||||
with get_db() as db:
|
||||
user = UserModel(
|
||||
**{
|
||||
"id": id,
|
||||
"name": name,
|
||||
"email": email,
|
||||
"name": name,
|
||||
"role": role,
|
||||
"profile_image_url": profile_image_url,
|
||||
"last_active_at": int(time.time()),
|
||||
"created_at": int(time.time()),
|
||||
"updated_at": int(time.time()),
|
||||
"oauth_sub": oauth_sub,
|
||||
"oauth": oauth,
|
||||
}
|
||||
)
|
||||
result = User(**user.model_dump())
|
||||
|
|
@ -207,8 +278,13 @@ class UsersTable:
|
|||
def get_user_by_api_key(self, api_key: str) -> Optional[UserModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
user = db.query(User).filter_by(api_key=api_key).first()
|
||||
return UserModel.model_validate(user)
|
||||
user = (
|
||||
db.query(User)
|
||||
.join(ApiKey, User.id == ApiKey.user_id)
|
||||
.filter(ApiKey.key == api_key)
|
||||
.first()
|
||||
)
|
||||
return UserModel.model_validate(user) if user else None
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
|
@ -220,12 +296,23 @@ class UsersTable:
|
|||
except Exception:
|
||||
return None
|
||||
|
||||
def get_user_by_oauth_sub(self, sub: str) -> Optional[UserModel]:
|
||||
def get_user_by_oauth_sub(self, provider: str, sub: str) -> Optional[UserModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
user = db.query(User).filter_by(oauth_sub=sub).first()
|
||||
return UserModel.model_validate(user)
|
||||
except Exception:
|
||||
with get_db() as db: # type: Session
|
||||
dialect_name = db.bind.dialect.name
|
||||
|
||||
query = db.query(User)
|
||||
if dialect_name == "sqlite":
|
||||
query = query.filter(User.oauth.contains({provider: {"sub": sub}}))
|
||||
elif dialect_name == "postgresql":
|
||||
query = query.filter(
|
||||
User.oauth[provider].cast(JSONB)["sub"].astext == sub
|
||||
)
|
||||
|
||||
user = query.first()
|
||||
return UserModel.model_validate(user) if user else None
|
||||
except Exception as e:
|
||||
# You may want to log the exception here
|
||||
return None
|
||||
|
||||
def get_users(
|
||||
|
|
@ -248,6 +335,17 @@ class UsersTable:
|
|||
)
|
||||
)
|
||||
|
||||
channel_id = filter.get("channel_id")
|
||||
if channel_id:
|
||||
query = query.filter(
|
||||
exists(
|
||||
select(ChannelMember.id).where(
|
||||
ChannelMember.user_id == User.id,
|
||||
ChannelMember.channel_id == channel_id,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
user_ids = filter.get("user_ids")
|
||||
group_ids = filter.get("group_ids")
|
||||
|
||||
|
|
@ -340,7 +438,6 @@ class UsersTable:
|
|||
query = query.order_by(User.created_at.desc())
|
||||
|
||||
# Count BEFORE pagination
|
||||
query = query.distinct(User.id)
|
||||
total = query.count()
|
||||
|
||||
# correct pagination logic
|
||||
|
|
@ -355,7 +452,17 @@ class UsersTable:
|
|||
"total": total,
|
||||
}
|
||||
|
||||
def get_users_by_user_ids(self, user_ids: list[str]) -> list[UserModel]:
|
||||
def get_users_by_group_id(self, group_id: str) -> list[UserModel]:
|
||||
with get_db() as db:
|
||||
users = (
|
||||
db.query(User)
|
||||
.join(GroupMember, User.id == GroupMember.user_id)
|
||||
.filter(GroupMember.group_id == group_id)
|
||||
.all()
|
||||
)
|
||||
return [UserModel.model_validate(user) for user in users]
|
||||
|
||||
def get_users_by_user_ids(self, user_ids: list[str]) -> list[UserStatusModel]:
|
||||
with get_db() as db:
|
||||
users = db.query(User).filter(User.id.in_(user_ids)).all()
|
||||
return [UserModel.model_validate(user) for user in users]
|
||||
|
|
@ -411,6 +518,21 @@ class UsersTable:
|
|||
except Exception:
|
||||
return None
|
||||
|
||||
def update_user_status_by_id(
|
||||
self, id: str, form_data: UserStatus
|
||||
) -> Optional[UserModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
db.query(User).filter_by(id=id).update(
|
||||
{**form_data.model_dump(exclude_none=True)}
|
||||
)
|
||||
db.commit()
|
||||
|
||||
user = db.query(User).filter_by(id=id).first()
|
||||
return UserModel.model_validate(user)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def update_user_profile_image_url_by_id(
|
||||
self, id: str, profile_image_url: str
|
||||
) -> Optional[UserModel]:
|
||||
|
|
@ -427,7 +549,7 @@ class UsersTable:
|
|||
return None
|
||||
|
||||
@throttle(DATABASE_USER_ACTIVE_STATUS_UPDATE_INTERVAL)
|
||||
def update_user_last_active_by_id(self, id: str) -> Optional[UserModel]:
|
||||
def update_last_active_by_id(self, id: str) -> Optional[UserModel]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
db.query(User).filter_by(id=id).update(
|
||||
|
|
@ -440,16 +562,35 @@ class UsersTable:
|
|||
except Exception:
|
||||
return None
|
||||
|
||||
def update_user_oauth_sub_by_id(
|
||||
self, id: str, oauth_sub: str
|
||||
def update_user_oauth_by_id(
|
||||
self, id: str, provider: str, sub: str
|
||||
) -> Optional[UserModel]:
|
||||
"""
|
||||
Update or insert an OAuth provider/sub pair into the user's oauth JSON field.
|
||||
Example resulting structure:
|
||||
{
|
||||
"google": { "sub": "123" },
|
||||
"github": { "sub": "abc" }
|
||||
}
|
||||
"""
|
||||
try:
|
||||
with get_db() as db:
|
||||
db.query(User).filter_by(id=id).update({"oauth_sub": oauth_sub})
|
||||
user = db.query(User).filter_by(id=id).first()
|
||||
if not user:
|
||||
return None
|
||||
|
||||
# Load existing oauth JSON or create empty
|
||||
oauth = user.oauth or {}
|
||||
|
||||
# Update or insert provider entry
|
||||
oauth[provider] = {"sub": sub}
|
||||
|
||||
# Persist updated JSON
|
||||
db.query(User).filter_by(id=id).update({"oauth": oauth})
|
||||
db.commit()
|
||||
|
||||
user = db.query(User).filter_by(id=id).first()
|
||||
return UserModel.model_validate(user)
|
||||
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
|
@ -503,23 +644,45 @@ class UsersTable:
|
|||
except Exception:
|
||||
return False
|
||||
|
||||
def update_user_api_key_by_id(self, id: str, api_key: str) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
result = db.query(User).filter_by(id=id).update({"api_key": api_key})
|
||||
db.commit()
|
||||
return True if result == 1 else False
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def get_user_api_key_by_id(self, id: str) -> Optional[str]:
|
||||
try:
|
||||
with get_db() as db:
|
||||
user = db.query(User).filter_by(id=id).first()
|
||||
return user.api_key
|
||||
api_key = db.query(ApiKey).filter_by(user_id=id).first()
|
||||
return api_key.key if api_key else None
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def update_user_api_key_by_id(self, id: str, api_key: str) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
db.query(ApiKey).filter_by(user_id=id).delete()
|
||||
db.commit()
|
||||
|
||||
now = int(time.time())
|
||||
new_api_key = ApiKey(
|
||||
id=f"key_{id}",
|
||||
user_id=id,
|
||||
key=api_key,
|
||||
created_at=now,
|
||||
updated_at=now,
|
||||
)
|
||||
db.add(new_api_key)
|
||||
db.commit()
|
||||
|
||||
return True
|
||||
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def delete_user_api_key_by_id(self, id: str) -> bool:
|
||||
try:
|
||||
with get_db() as db:
|
||||
db.query(ApiKey).filter_by(user_id=id).delete()
|
||||
db.commit()
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def get_valid_user_ids(self, user_ids: list[str]) -> list[str]:
|
||||
with get_db() as db:
|
||||
users = db.query(User).filter(User.id.in_(user_ids)).all()
|
||||
|
|
@ -533,5 +696,23 @@ class UsersTable:
|
|||
else:
|
||||
return None
|
||||
|
||||
def get_active_user_count(self) -> int:
|
||||
with get_db() as db:
|
||||
# Consider user active if last_active_at within the last 3 minutes
|
||||
three_minutes_ago = int(time.time()) - 180
|
||||
count = (
|
||||
db.query(User).filter(User.last_active_at >= three_minutes_ago).count()
|
||||
)
|
||||
return count
|
||||
|
||||
def is_user_active(self, user_id: str) -> bool:
|
||||
with get_db() as db:
|
||||
user = db.query(User).filter_by(id=user_id).first()
|
||||
if user and user.last_active_at:
|
||||
# Consider user active if last_active_at within the last 3 minutes
|
||||
three_minutes_ago = int(time.time()) - 180
|
||||
return user.last_active_at >= three_minutes_ago
|
||||
return False
|
||||
|
||||
|
||||
Users = UsersTable()
|
||||
|
|
|
|||
|
|
@ -6,10 +6,8 @@ from urllib.parse import quote
|
|||
from langchain_core.document_loaders import BaseLoader
|
||||
from langchain_core.documents import Document
|
||||
from open_webui.utils.headers import include_user_info_headers
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
class ExternalDocumentLoader(BaseLoader):
|
||||
|
|
|
|||
|
|
@ -4,10 +4,8 @@ from typing import Iterator, List, Union
|
|||
|
||||
from langchain_core.document_loaders import BaseLoader
|
||||
from langchain_core.documents import Document
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
class ExternalWebLoader(BaseLoader):
|
||||
|
|
|
|||
|
|
@ -30,11 +30,10 @@ from open_webui.retrieval.loaders.datalab_marker import DatalabMarkerLoader
|
|||
from open_webui.retrieval.loaders.mineru import MinerULoader
|
||||
|
||||
|
||||
from open_webui.env import SRC_LOG_LEVELS, GLOBAL_LOG_LEVEL
|
||||
from open_webui.env import GLOBAL_LOG_LEVEL
|
||||
|
||||
logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL)
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
known_source_ext = [
|
||||
"go",
|
||||
|
|
@ -144,19 +143,17 @@ class DoclingLoader:
|
|||
with open(self.file_path, "rb") as f:
|
||||
headers = {}
|
||||
if self.api_key:
|
||||
headers["Authorization"] = f"Bearer {self.api_key}"
|
||||
|
||||
files = {
|
||||
"files": (
|
||||
self.file_path,
|
||||
f,
|
||||
self.mime_type or "application/octet-stream",
|
||||
)
|
||||
}
|
||||
headers["X-Api-Key"] = f"Bearer {self.api_key}"
|
||||
|
||||
r = requests.post(
|
||||
f"{self.url}/v1/convert/file",
|
||||
files=files,
|
||||
files={
|
||||
"files": (
|
||||
self.file_path,
|
||||
f,
|
||||
self.mime_type or "application/octet-stream",
|
||||
)
|
||||
},
|
||||
data={
|
||||
"image_export_mode": "placeholder",
|
||||
**self.params,
|
||||
|
|
@ -322,22 +319,33 @@ class Loader:
|
|||
file_path=file_path,
|
||||
api_endpoint=self.kwargs.get("DOCUMENT_INTELLIGENCE_ENDPOINT"),
|
||||
api_key=self.kwargs.get("DOCUMENT_INTELLIGENCE_KEY"),
|
||||
api_model=self.kwargs.get("DOCUMENT_INTELLIGENCE_MODEL"),
|
||||
)
|
||||
else:
|
||||
loader = AzureAIDocumentIntelligenceLoader(
|
||||
file_path=file_path,
|
||||
api_endpoint=self.kwargs.get("DOCUMENT_INTELLIGENCE_ENDPOINT"),
|
||||
azure_credential=DefaultAzureCredential(),
|
||||
api_model=self.kwargs.get("DOCUMENT_INTELLIGENCE_MODEL"),
|
||||
)
|
||||
elif self.engine == "mineru" and file_ext in [
|
||||
"pdf"
|
||||
]: # MinerU currently only supports PDF
|
||||
|
||||
mineru_timeout = self.kwargs.get("MINERU_API_TIMEOUT", 300)
|
||||
if mineru_timeout:
|
||||
try:
|
||||
mineru_timeout = int(mineru_timeout)
|
||||
except ValueError:
|
||||
mineru_timeout = 300
|
||||
|
||||
loader = MinerULoader(
|
||||
file_path=file_path,
|
||||
api_mode=self.kwargs.get("MINERU_API_MODE", "local"),
|
||||
api_url=self.kwargs.get("MINERU_API_URL", "http://localhost:8000"),
|
||||
api_key=self.kwargs.get("MINERU_API_KEY", ""),
|
||||
params=self.kwargs.get("MINERU_PARAMS", {}),
|
||||
timeout=mineru_timeout,
|
||||
)
|
||||
elif (
|
||||
self.engine == "mistral_ocr"
|
||||
|
|
|
|||
|
|
@ -26,11 +26,13 @@ class MinerULoader:
|
|||
api_url: str = "http://localhost:8000",
|
||||
api_key: str = "",
|
||||
params: dict = None,
|
||||
timeout: Optional[int] = 300,
|
||||
):
|
||||
self.file_path = file_path
|
||||
self.api_mode = api_mode.lower()
|
||||
self.api_url = api_url.rstrip("/")
|
||||
self.api_key = api_key
|
||||
self.timeout = timeout
|
||||
|
||||
# Parse params dict with defaults
|
||||
self.params = params or {}
|
||||
|
|
@ -101,7 +103,7 @@ class MinerULoader:
|
|||
f"{self.api_url}/file_parse",
|
||||
data=form_data,
|
||||
files=files,
|
||||
timeout=300, # 5 minute timeout for large documents
|
||||
timeout=self.timeout,
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
|
|
@ -300,7 +302,7 @@ class MinerULoader:
|
|||
response = requests.put(
|
||||
upload_url,
|
||||
data=f,
|
||||
timeout=300, # 5 minute timeout for large files
|
||||
timeout=self.timeout,
|
||||
)
|
||||
response.raise_for_status()
|
||||
except FileNotFoundError:
|
||||
|
|
|
|||
|
|
@ -9,11 +9,10 @@ from typing import List, Dict, Any
|
|||
from contextlib import asynccontextmanager
|
||||
|
||||
from langchain_core.documents import Document
|
||||
from open_webui.env import SRC_LOG_LEVELS, GLOBAL_LOG_LEVEL
|
||||
from open_webui.env import GLOBAL_LOG_LEVEL
|
||||
|
||||
logging.basicConfig(stream=sys.stdout, level=GLOBAL_LOG_LEVEL)
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
class MistralLoader:
|
||||
|
|
|
|||
|
|
@ -4,10 +4,8 @@ from typing import Iterator, List, Literal, Union
|
|||
|
||||
from langchain_core.document_loaders import BaseLoader
|
||||
from langchain_core.documents import Document
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
class TavilyLoader(BaseLoader):
|
||||
|
|
|
|||
|
|
@ -4,10 +4,8 @@ from xml.etree.ElementTree import ParseError
|
|||
from typing import Any, Dict, Generator, List, Optional, Sequence, Union
|
||||
from urllib.parse import parse_qs, urlparse
|
||||
from langchain_core.documents import Document
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
ALLOWED_SCHEMES = {"http", "https"}
|
||||
ALLOWED_NETLOCS = {
|
||||
|
|
|
|||
|
|
@ -5,12 +5,10 @@ import numpy as np
|
|||
from colbert.infra import ColBERTConfig
|
||||
from colbert.modeling.checkpoint import Checkpoint
|
||||
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
from open_webui.retrieval.models.base_reranker import BaseReranker
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
class ColBERT(BaseReranker):
|
||||
|
|
|
|||
|
|
@ -4,13 +4,12 @@ from typing import Optional, List, Tuple
|
|||
from urllib.parse import quote
|
||||
|
||||
|
||||
from open_webui.env import ENABLE_FORWARD_USER_INFO_HEADERS, SRC_LOG_LEVELS
|
||||
from open_webui.env import ENABLE_FORWARD_USER_INFO_HEADERS
|
||||
from open_webui.retrieval.models.base_reranker import BaseReranker
|
||||
from open_webui.utils.headers import include_user_info_headers
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
class ExternalReranker(BaseReranker):
|
||||
|
|
@ -19,10 +18,12 @@ class ExternalReranker(BaseReranker):
|
|||
api_key: str,
|
||||
url: str = "http://localhost:8080/v1/rerank",
|
||||
model: str = "reranker",
|
||||
timeout: Optional[int] = None,
|
||||
):
|
||||
self.api_key = api_key
|
||||
self.url = url
|
||||
self.model = model
|
||||
self.timeout = timeout
|
||||
|
||||
def predict(
|
||||
self, sentences: List[Tuple[str, str]], user=None
|
||||
|
|
@ -53,6 +54,7 @@ class ExternalReranker(BaseReranker):
|
|||
f"{self.url}",
|
||||
headers=headers,
|
||||
json=payload,
|
||||
timeout=self.timeout,
|
||||
)
|
||||
|
||||
r.raise_for_status()
|
||||
|
|
|
|||
|
|
@ -12,7 +12,10 @@ import re
|
|||
|
||||
from urllib.parse import quote
|
||||
from huggingface_hub import snapshot_download
|
||||
from langchain.retrievers import ContextualCompressionRetriever, EnsembleRetriever
|
||||
from langchain_classic.retrievers import (
|
||||
ContextualCompressionRetriever,
|
||||
EnsembleRetriever,
|
||||
)
|
||||
from langchain_community.retrievers import BM25Retriever
|
||||
from langchain_core.documents import Document
|
||||
|
||||
|
|
@ -37,7 +40,6 @@ from open_webui.retrieval.loaders.youtube import YoutubeLoader
|
|||
|
||||
|
||||
from open_webui.env import (
|
||||
SRC_LOG_LEVELS,
|
||||
OFFLINE_MODE,
|
||||
ENABLE_FORWARD_USER_INFO_HEADERS,
|
||||
)
|
||||
|
|
@ -48,7 +50,6 @@ from open_webui.config import (
|
|||
)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
from typing import Any
|
||||
|
|
@ -1088,23 +1089,19 @@ async def get_sources_from_items(
|
|||
or knowledge_base.user_id == user.id
|
||||
or has_access(user.id, "read", knowledge_base.access_control)
|
||||
):
|
||||
|
||||
file_ids = knowledge_base.data.get("file_ids", [])
|
||||
files = Knowledges.get_files_by_id(knowledge_base.id)
|
||||
|
||||
documents = []
|
||||
metadatas = []
|
||||
for file_id in file_ids:
|
||||
file_object = Files.get_file_by_id(file_id)
|
||||
|
||||
if file_object:
|
||||
documents.append(file_object.data.get("content", ""))
|
||||
metadatas.append(
|
||||
{
|
||||
"file_id": file_id,
|
||||
"name": file_object.filename,
|
||||
"source": file_object.filename,
|
||||
}
|
||||
)
|
||||
for file in files:
|
||||
documents.append(file.data.get("content", ""))
|
||||
metadatas.append(
|
||||
{
|
||||
"file_id": file.id,
|
||||
"name": file.filename,
|
||||
"source": file.filename,
|
||||
}
|
||||
)
|
||||
|
||||
query_result = {
|
||||
"documents": [documents],
|
||||
|
|
@ -1285,7 +1282,7 @@ class RerankCompressor(BaseDocumentCompressor):
|
|||
|
||||
scores = None
|
||||
if reranking:
|
||||
scores = self.reranking_function(query, documents)
|
||||
scores = await asyncio.to_thread(self.reranking_function, query, documents)
|
||||
else:
|
||||
from sentence_transformers import util
|
||||
|
||||
|
|
|
|||
|
|
@ -24,10 +24,8 @@ from open_webui.config import (
|
|||
CHROMA_CLIENT_AUTH_PROVIDER,
|
||||
CHROMA_CLIENT_AUTH_CREDENTIALS,
|
||||
)
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
class ChromaClient(VectorDBBase):
|
||||
|
|
|
|||
|
|
@ -25,10 +25,8 @@ from open_webui.config import (
|
|||
MILVUS_DISKANN_MAX_DEGREE,
|
||||
MILVUS_DISKANN_SEARCH_LIST_SIZE,
|
||||
)
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
class MilvusClient(VectorDBBase):
|
||||
|
|
@ -200,23 +198,24 @@ class MilvusClient(VectorDBBase):
|
|||
def query(self, collection_name: str, filter: dict, limit: int = -1):
|
||||
connections.connect(uri=MILVUS_URI, token=MILVUS_TOKEN, db_name=MILVUS_DB)
|
||||
|
||||
# Construct the filter string for querying
|
||||
collection_name = collection_name.replace("-", "_")
|
||||
if not self.has_collection(collection_name):
|
||||
log.warning(
|
||||
f"Query attempted on non-existent collection: {self.collection_prefix}_{collection_name}"
|
||||
)
|
||||
return None
|
||||
filter_string = " && ".join(
|
||||
[
|
||||
f'metadata["{key}"] == {json.dumps(value)}'
|
||||
for key, value in filter.items()
|
||||
]
|
||||
)
|
||||
|
||||
filter_expressions = []
|
||||
for key, value in filter.items():
|
||||
if isinstance(value, str):
|
||||
filter_expressions.append(f'metadata["{key}"] == "{value}"')
|
||||
else:
|
||||
filter_expressions.append(f'metadata["{key}"] == {value}')
|
||||
|
||||
filter_string = " && ".join(filter_expressions)
|
||||
|
||||
collection = Collection(f"{self.collection_prefix}_{collection_name}")
|
||||
collection.load()
|
||||
all_results = []
|
||||
|
||||
try:
|
||||
log.info(
|
||||
|
|
@ -224,24 +223,25 @@ class MilvusClient(VectorDBBase):
|
|||
)
|
||||
|
||||
iterator = collection.query_iterator(
|
||||
filter=filter_string,
|
||||
expr=filter_string,
|
||||
output_fields=[
|
||||
"id",
|
||||
"data",
|
||||
"metadata",
|
||||
],
|
||||
limit=limit, # Pass the limit directly; -1 means no limit.
|
||||
limit=limit if limit > 0 else -1,
|
||||
)
|
||||
|
||||
all_results = []
|
||||
while True:
|
||||
result = iterator.next()
|
||||
if not result:
|
||||
batch = iterator.next()
|
||||
if not batch:
|
||||
iterator.close()
|
||||
break
|
||||
all_results += result
|
||||
all_results.extend(batch)
|
||||
|
||||
log.info(f"Total results from query: {len(all_results)}")
|
||||
return self._result_to_get_result([all_results])
|
||||
log.debug(f"Total results from query: {len(all_results)}")
|
||||
return self._result_to_get_result([all_results] if all_results else [[]])
|
||||
|
||||
except Exception as e:
|
||||
log.exception(
|
||||
|
|
|
|||
|
|
@ -12,7 +12,6 @@ from open_webui.config import (
|
|||
MILVUS_HNSW_EFCONSTRUCTION,
|
||||
MILVUS_IVF_FLAT_NLIST,
|
||||
)
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from open_webui.retrieval.vector.main import (
|
||||
GetResult,
|
||||
SearchResult,
|
||||
|
|
@ -29,7 +28,6 @@ from pymilvus import (
|
|||
)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
RESOURCE_ID_FIELD = "resource_id"
|
||||
|
||||
|
|
@ -157,7 +155,6 @@ class MilvusClient(VectorDBBase):
|
|||
for item in items
|
||||
]
|
||||
collection.insert(entities)
|
||||
collection.flush()
|
||||
|
||||
def search(
|
||||
self, collection_name: str, vectors: List[List[float]], limit: int
|
||||
|
|
@ -263,15 +260,23 @@ class MilvusClient(VectorDBBase):
|
|||
else:
|
||||
expr.append(f"metadata['{key}'] == {value}")
|
||||
|
||||
results = collection.query(
|
||||
iterator = collection.query_iterator(
|
||||
expr=" and ".join(expr),
|
||||
output_fields=["id", "text", "metadata"],
|
||||
limit=limit,
|
||||
limit=limit if limit else -1,
|
||||
)
|
||||
|
||||
ids = [res["id"] for res in results]
|
||||
documents = [res["text"] for res in results]
|
||||
metadatas = [res["metadata"] for res in results]
|
||||
all_results = []
|
||||
while True:
|
||||
batch = iterator.next()
|
||||
if not batch:
|
||||
iterator.close()
|
||||
break
|
||||
all_results.extend(batch)
|
||||
|
||||
ids = [res["id"] for res in all_results]
|
||||
documents = [res["text"] for res in all_results]
|
||||
metadatas = [res["metadata"] for res in all_results]
|
||||
|
||||
return GetResult(ids=[ids], documents=[documents], metadatas=[metadatas])
|
||||
|
||||
|
|
|
|||
|
|
@ -55,10 +55,8 @@ from open_webui.config import (
|
|||
ORACLE_DB_POOL_MAX,
|
||||
ORACLE_DB_POOL_INCREMENT,
|
||||
)
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
class Oracle23aiClient(VectorDBBase):
|
||||
|
|
|
|||
|
|
@ -51,7 +51,6 @@ from open_webui.config import (
|
|||
PGVECTOR_USE_HALFVEC,
|
||||
)
|
||||
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
VECTOR_LENGTH = PGVECTOR_INITIALIZE_MAX_VECTOR_LENGTH
|
||||
USE_HALFVEC = PGVECTOR_USE_HALFVEC
|
||||
|
|
@ -61,7 +60,6 @@ VECTOR_OPCLASS = "halfvec_cosine_ops" if USE_HALFVEC else "vector_cosine_ops"
|
|||
Base = declarative_base()
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def pgcrypto_encrypt(val, key):
|
||||
|
|
|
|||
|
|
@ -31,7 +31,6 @@ from open_webui.config import (
|
|||
PINECONE_METRIC,
|
||||
PINECONE_CLOUD,
|
||||
)
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from open_webui.retrieval.vector.utils import process_metadata
|
||||
|
||||
|
||||
|
|
@ -39,7 +38,6 @@ NO_LIMIT = 10000 # Reasonable limit to avoid overwhelming the system
|
|||
BATCH_SIZE = 100 # Recommended batch size for Pinecone operations
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
class PineconeClient(VectorDBBase):
|
||||
|
|
|
|||
|
|
@ -22,12 +22,10 @@ from open_webui.config import (
|
|||
QDRANT_TIMEOUT,
|
||||
QDRANT_HNSW_M,
|
||||
)
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
NO_LIMIT = 999999999
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
class QdrantClient(VectorDBBase):
|
||||
|
|
|
|||
|
|
@ -13,7 +13,6 @@ from open_webui.config import (
|
|||
QDRANT_TIMEOUT,
|
||||
QDRANT_HNSW_M,
|
||||
)
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from open_webui.retrieval.vector.main import (
|
||||
GetResult,
|
||||
SearchResult,
|
||||
|
|
@ -30,7 +29,6 @@ TENANT_ID_FIELD = "tenant_id"
|
|||
DEFAULT_DIMENSION = 384
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def _tenant_filter(tenant_id: str) -> models.FieldCondition:
|
||||
|
|
|
|||
|
|
@ -6,13 +6,11 @@ from open_webui.retrieval.vector.main import (
|
|||
SearchResult,
|
||||
)
|
||||
from open_webui.config import S3_VECTOR_BUCKET_NAME, S3_VECTOR_REGION
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from typing import List, Optional, Dict, Any, Union
|
||||
import logging
|
||||
import boto3
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
class S3VectorClient(VectorDBBase):
|
||||
|
|
|
|||
|
|
@ -1,10 +1,8 @@
|
|||
import logging
|
||||
from typing import Optional
|
||||
from open_webui.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
"""
|
||||
Azure AI Search integration for Open WebUI.
|
||||
|
|
|
|||
|
|
@ -4,11 +4,9 @@ from pprint import pprint
|
|||
from typing import Optional
|
||||
import requests
|
||||
from open_webui.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
import argparse
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
"""
|
||||
Documentation: https://docs.microsoft.com/en-us/bing/search-apis/bing-web-search/overview
|
||||
"""
|
||||
|
|
|
|||
|
|
@ -4,20 +4,18 @@ from typing import Optional
|
|||
import requests
|
||||
import json
|
||||
from open_webui.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def _parse_response(response):
|
||||
result = {}
|
||||
results = []
|
||||
if "data" in response:
|
||||
data = response["data"]
|
||||
if "webPages" in data:
|
||||
webPages = data["webPages"]
|
||||
if "value" in webPages:
|
||||
result["webpage"] = [
|
||||
results = [
|
||||
{
|
||||
"id": item.get("id", ""),
|
||||
"name": item.get("name", ""),
|
||||
|
|
@ -31,7 +29,7 @@ def _parse_response(response):
|
|||
}
|
||||
for item in webPages["value"]
|
||||
]
|
||||
return result
|
||||
return results
|
||||
|
||||
|
||||
def search_bocha(
|
||||
|
|
@ -53,7 +51,7 @@ def search_bocha(
|
|||
response = requests.post(url, headers=headers, data=payload, timeout=5)
|
||||
response.raise_for_status()
|
||||
results = _parse_response(response.json())
|
||||
print(results)
|
||||
|
||||
if filter_list:
|
||||
results = get_filtered_results(results, filter_list)
|
||||
|
||||
|
|
@ -61,5 +59,5 @@ def search_bocha(
|
|||
SearchResult(
|
||||
link=result["url"], title=result.get("name"), snippet=result.get("summary")
|
||||
)
|
||||
for result in results.get("webpage", [])[:count]
|
||||
for result in results[:count]
|
||||
]
|
||||
|
|
|
|||
|
|
@ -3,10 +3,8 @@ from typing import Optional
|
|||
|
||||
import requests
|
||||
from open_webui.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def search_brave(
|
||||
|
|
|
|||
|
|
@ -4,10 +4,8 @@ from typing import Optional
|
|||
from open_webui.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from ddgs import DDGS
|
||||
from ddgs.exceptions import RatelimitException
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def search_duckduckgo(
|
||||
|
|
|
|||
|
|
@ -3,11 +3,9 @@ from dataclasses import dataclass
|
|||
from typing import Optional
|
||||
|
||||
import requests
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from open_webui.retrieval.web.main import SearchResult
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
EXA_API_BASE = "https://api.exa.ai"
|
||||
|
||||
|
|
|
|||
|
|
@ -5,14 +5,12 @@ import requests
|
|||
|
||||
from fastapi import Request
|
||||
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
from open_webui.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.utils.headers import include_user_info_headers
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def search_external(
|
||||
|
|
|
|||
|
|
@ -2,11 +2,9 @@ import logging
|
|||
from typing import Optional, List
|
||||
|
||||
from open_webui.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def search_firecrawl(
|
||||
|
|
|
|||
|
|
@ -3,10 +3,8 @@ from typing import Optional
|
|||
|
||||
import requests
|
||||
from open_webui.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def search_google_pse(
|
||||
|
|
|
|||
|
|
@ -2,11 +2,9 @@ import logging
|
|||
|
||||
import requests
|
||||
from open_webui.retrieval.web.main import SearchResult
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from yarl import URL
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def search_jina(api_key: str, query: str, count: int) -> list[SearchResult]:
|
||||
|
|
|
|||
|
|
@ -3,10 +3,8 @@ from typing import Optional
|
|||
|
||||
import requests
|
||||
from open_webui.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def search_kagi(
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ def get_filtered_results(results, filter_list):
|
|||
except Exception:
|
||||
pass
|
||||
|
||||
if any(is_string_allowed(hostname, filter_list) for hostname in hostnames):
|
||||
if is_string_allowed(hostnames, filter_list):
|
||||
filtered_results.append(result)
|
||||
continue
|
||||
|
||||
|
|
|
|||
|
|
@ -3,10 +3,8 @@ from typing import Optional
|
|||
|
||||
import requests
|
||||
from open_webui.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def search_mojeek(
|
||||
|
|
|
|||
|
|
@ -3,11 +3,9 @@ from dataclasses import dataclass
|
|||
from typing import Optional
|
||||
|
||||
import requests
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from open_webui.retrieval.web.main import SearchResult
|
||||
from open_webui.retrieval.web.main import SearchResult, get_filtered_results
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def search_ollama_cloud(
|
||||
|
|
@ -38,6 +36,9 @@ def search_ollama_cloud(
|
|||
results = data.get("results", [])
|
||||
log.info(f"Found {len(results)} results")
|
||||
|
||||
if filter_list:
|
||||
results = get_filtered_results(results, filter_list)
|
||||
|
||||
return [
|
||||
SearchResult(
|
||||
link=result.get("url", ""),
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ from typing import Optional, Literal
|
|||
import requests
|
||||
|
||||
from open_webui.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
MODELS = Literal[
|
||||
"sonar",
|
||||
|
|
@ -16,7 +15,6 @@ SEARCH_CONTEXT_USAGE_LEVELS = Literal["low", "medium", "high"]
|
|||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def search_perplexity(
|
||||
|
|
|
|||
|
|
@ -4,11 +4,9 @@ import requests
|
|||
|
||||
from open_webui.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.utils.headers import include_user_info_headers
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def search_perplexity_search(
|
||||
|
|
|
|||
|
|
@ -4,10 +4,8 @@ from urllib.parse import urlencode
|
|||
|
||||
import requests
|
||||
from open_webui.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def search_searchapi(
|
||||
|
|
|
|||
|
|
@ -3,10 +3,8 @@ from typing import Optional
|
|||
|
||||
import requests
|
||||
from open_webui.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def search_searxng(
|
||||
|
|
@ -27,7 +25,7 @@ def search_searxng(
|
|||
count (int): The maximum number of results to retrieve from the search.
|
||||
|
||||
Keyword Args:
|
||||
language (str): Language filter for the search results; e.g., "en-US". Defaults to an empty string.
|
||||
language (str): Language filter for the search results; e.g., "all", "en-US", "es". Defaults to "all".
|
||||
safesearch (int): Safe search filter for safer web results; 0 = off, 1 = moderate, 2 = strict. Defaults to 1 (moderate).
|
||||
time_range (str): Time range for filtering results by date; e.g., "2023-04-05..today" or "all-time". Defaults to ''.
|
||||
categories: (Optional[list[str]]): Specific categories within which the search should be performed, defaulting to an empty string if not provided.
|
||||
|
|
@ -40,7 +38,7 @@ def search_searxng(
|
|||
"""
|
||||
|
||||
# Default values for optional parameters are provided as empty strings or None when not specified.
|
||||
language = kwargs.get("language", "en-US")
|
||||
language = kwargs.get("language", "all")
|
||||
safesearch = kwargs.get("safesearch", "1")
|
||||
time_range = kwargs.get("time_range", "")
|
||||
categories = "".join(kwargs.get("categories", []))
|
||||
|
|
|
|||
|
|
@ -4,10 +4,8 @@ from urllib.parse import urlencode
|
|||
|
||||
import requests
|
||||
from open_webui.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def search_serpapi(
|
||||
|
|
|
|||
|
|
@ -4,10 +4,8 @@ from typing import Optional
|
|||
|
||||
import requests
|
||||
from open_webui.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def search_serper(
|
||||
|
|
|
|||
|
|
@ -4,10 +4,8 @@ from urllib.parse import urlencode
|
|||
|
||||
import requests
|
||||
from open_webui.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def search_serply(
|
||||
|
|
|
|||
|
|
@ -3,10 +3,8 @@ from typing import Optional
|
|||
|
||||
import requests
|
||||
from open_webui.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def search_serpstack(
|
||||
|
|
|
|||
|
|
@ -4,10 +4,8 @@ from typing import Optional, List
|
|||
|
||||
|
||||
from open_webui.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def search_sougou(
|
||||
|
|
|
|||
|
|
@ -3,10 +3,8 @@ from typing import Optional
|
|||
|
||||
import requests
|
||||
from open_webui.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def search_tavily(
|
||||
|
|
|
|||
|
|
@ -33,6 +33,7 @@ from open_webui.config import (
|
|||
PLAYWRIGHT_WS_URL,
|
||||
PLAYWRIGHT_TIMEOUT,
|
||||
WEB_LOADER_ENGINE,
|
||||
WEB_LOADER_TIMEOUT,
|
||||
FIRECRAWL_API_BASE_URL,
|
||||
FIRECRAWL_API_KEY,
|
||||
TAVILY_API_KEY,
|
||||
|
|
@ -41,11 +42,9 @@ from open_webui.config import (
|
|||
EXTERNAL_WEB_LOADER_API_KEY,
|
||||
WEB_FETCH_FILTER_LIST,
|
||||
)
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from open_webui.utils.misc import is_string_allowed
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def resolve_hostname(hostname):
|
||||
|
|
@ -674,6 +673,20 @@ def get_web_loader(
|
|||
|
||||
if WEB_LOADER_ENGINE.value == "" or WEB_LOADER_ENGINE.value == "safe_web":
|
||||
WebLoaderClass = SafeWebBaseLoader
|
||||
|
||||
request_kwargs = {}
|
||||
if WEB_LOADER_TIMEOUT.value:
|
||||
try:
|
||||
timeout_value = float(WEB_LOADER_TIMEOUT.value)
|
||||
except ValueError:
|
||||
timeout_value = None
|
||||
|
||||
if timeout_value:
|
||||
request_kwargs["timeout"] = timeout_value
|
||||
|
||||
if request_kwargs:
|
||||
web_loader_args["requests_kwargs"] = request_kwargs
|
||||
|
||||
if WEB_LOADER_ENGINE.value == "playwright":
|
||||
WebLoaderClass = SafePlaywrightURLLoader
|
||||
web_loader_args["playwright_timeout"] = PLAYWRIGHT_TIMEOUT.value
|
||||
|
|
|
|||
|
|
@ -4,10 +4,8 @@ from typing import Optional
|
|||
import requests
|
||||
from requests.auth import HTTPDigestAuth
|
||||
from open_webui.retrieval.web.main import SearchResult, get_filtered_results
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["RAG"])
|
||||
|
||||
|
||||
def search_yacy(
|
||||
|
|
|
|||
|
|
@ -33,6 +33,7 @@ from fastapi.responses import FileResponse
|
|||
from pydantic import BaseModel
|
||||
|
||||
|
||||
from open_webui.utils.misc import strict_match_mime_type
|
||||
from open_webui.utils.auth import get_admin_user, get_verified_user
|
||||
from open_webui.utils.headers import include_user_info_headers
|
||||
from open_webui.config import (
|
||||
|
|
@ -48,7 +49,6 @@ from open_webui.env import (
|
|||
ENV,
|
||||
AIOHTTP_CLIENT_SESSION_SSL,
|
||||
AIOHTTP_CLIENT_TIMEOUT,
|
||||
SRC_LOG_LEVELS,
|
||||
DEVICE_TYPE,
|
||||
ENABLE_FORWARD_USER_INFO_HEADERS,
|
||||
)
|
||||
|
|
@ -63,7 +63,6 @@ AZURE_MAX_FILE_SIZE_MB = 200
|
|||
AZURE_MAX_FILE_SIZE = AZURE_MAX_FILE_SIZE_MB * 1024 * 1024 # Convert MB to bytes
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["AUDIO"])
|
||||
|
||||
SPEECH_CACHE_DIR = CACHE_DIR / "audio" / "speech"
|
||||
SPEECH_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
|
@ -1152,20 +1151,11 @@ def transcription(
|
|||
user=Depends(get_verified_user),
|
||||
):
|
||||
log.info(f"file.content_type: {file.content_type}")
|
||||
|
||||
stt_supported_content_types = getattr(
|
||||
request.app.state.config, "STT_SUPPORTED_CONTENT_TYPES", []
|
||||
)
|
||||
|
||||
if not any(
|
||||
fnmatch(file.content_type, content_type)
|
||||
for content_type in (
|
||||
stt_supported_content_types
|
||||
if stt_supported_content_types
|
||||
and any(t.strip() for t in stt_supported_content_types)
|
||||
else ["audio/*", "video/webm"]
|
||||
)
|
||||
):
|
||||
if not strict_match_mime_type(stt_supported_content_types, file.content_type):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.FILE_NOT_SUPPORTED,
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import logging
|
|||
from aiohttp import ClientSession
|
||||
import urllib
|
||||
|
||||
|
||||
from open_webui.models.auths import (
|
||||
AddUserForm,
|
||||
ApiKey,
|
||||
|
|
@ -16,9 +17,13 @@ from open_webui.models.auths import (
|
|||
SigninResponse,
|
||||
SignupForm,
|
||||
UpdatePasswordForm,
|
||||
UserResponse,
|
||||
)
|
||||
from open_webui.models.users import Users, UpdateProfileForm
|
||||
from open_webui.models.users import (
|
||||
UserProfileImageResponse,
|
||||
Users,
|
||||
UpdateProfileForm,
|
||||
UserStatus,
|
||||
)
|
||||
from open_webui.models.groups import Groups
|
||||
from open_webui.models.oauth_sessions import OAuthSessions
|
||||
|
||||
|
|
@ -32,7 +37,6 @@ from open_webui.env import (
|
|||
WEBUI_AUTH_COOKIE_SECURE,
|
||||
WEBUI_AUTH_SIGNOUT_REDIRECT_URL,
|
||||
ENABLE_INITIAL_ADMIN_SIGNUP,
|
||||
SRC_LOG_LEVELS,
|
||||
)
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, status
|
||||
from fastapi.responses import RedirectResponse, Response, JSONResponse
|
||||
|
|
@ -60,6 +64,11 @@ from open_webui.utils.auth import (
|
|||
)
|
||||
from open_webui.utils.webhook import post_webhook
|
||||
from open_webui.utils.access_control import get_permissions, has_permission
|
||||
from open_webui.utils.groups import apply_default_group_assignment
|
||||
|
||||
from open_webui.utils.redis import get_redis_client
|
||||
from open_webui.utils.rate_limit import RateLimiter
|
||||
|
||||
|
||||
from typing import Optional, List
|
||||
|
||||
|
|
@ -71,19 +80,22 @@ from ldap3.utils.conv import escape_filter_chars
|
|||
router = APIRouter()
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MAIN"])
|
||||
|
||||
signin_rate_limiter = RateLimiter(
|
||||
redis_client=get_redis_client(), limit=5 * 3, window=60 * 3
|
||||
)
|
||||
|
||||
############################
|
||||
# GetSessionUser
|
||||
############################
|
||||
|
||||
|
||||
class SessionUserResponse(Token, UserResponse):
|
||||
class SessionUserResponse(Token, UserProfileImageResponse):
|
||||
expires_at: Optional[int] = None
|
||||
permissions: Optional[dict] = None
|
||||
|
||||
|
||||
class SessionUserInfoResponse(SessionUserResponse):
|
||||
class SessionUserInfoResponse(SessionUserResponse, UserStatus):
|
||||
bio: Optional[str] = None
|
||||
gender: Optional[str] = None
|
||||
date_of_birth: Optional[datetime.date] = None
|
||||
|
|
@ -140,6 +152,9 @@ async def get_session_user(
|
|||
"bio": user.bio,
|
||||
"gender": user.gender,
|
||||
"date_of_birth": user.date_of_birth,
|
||||
"status_emoji": user.status_emoji,
|
||||
"status_message": user.status_message,
|
||||
"status_expires_at": user.status_expires_at,
|
||||
"permissions": user_permissions,
|
||||
}
|
||||
|
||||
|
|
@ -149,7 +164,7 @@ async def get_session_user(
|
|||
############################
|
||||
|
||||
|
||||
@router.post("/update/profile", response_model=UserResponse)
|
||||
@router.post("/update/profile", response_model=UserProfileImageResponse)
|
||||
async def update_profile(
|
||||
form_data: UpdateProfileForm, session_user=Depends(get_verified_user)
|
||||
):
|
||||
|
|
@ -271,13 +286,11 @@ async def ldap_auth(request: Request, response: Response, form_data: LdapForm):
|
|||
f"{LDAP_ATTRIBUTE_FOR_MAIL}",
|
||||
"cn",
|
||||
]
|
||||
|
||||
if ENABLE_LDAP_GROUP_MANAGEMENT:
|
||||
search_attributes.append(f"{LDAP_ATTRIBUTE_FOR_GROUPS}")
|
||||
log.info(
|
||||
f"LDAP Group Management enabled. Adding {LDAP_ATTRIBUTE_FOR_GROUPS} to search attributes"
|
||||
)
|
||||
|
||||
log.info(f"LDAP search attributes: {search_attributes}")
|
||||
|
||||
search_success = connection_app.search(
|
||||
|
|
@ -285,15 +298,22 @@ async def ldap_auth(request: Request, response: Response, form_data: LdapForm):
|
|||
search_filter=f"(&({LDAP_ATTRIBUTE_FOR_USERNAME}={escape_filter_chars(form_data.user.lower())}){LDAP_SEARCH_FILTERS})",
|
||||
attributes=search_attributes,
|
||||
)
|
||||
|
||||
if not search_success or not connection_app.entries:
|
||||
raise HTTPException(400, detail="User not found in the LDAP server")
|
||||
|
||||
entry = connection_app.entries[0]
|
||||
username = str(entry[f"{LDAP_ATTRIBUTE_FOR_USERNAME}"]).lower()
|
||||
entry_username = entry[f"{LDAP_ATTRIBUTE_FOR_USERNAME}"].value
|
||||
email = entry[
|
||||
f"{LDAP_ATTRIBUTE_FOR_MAIL}"
|
||||
].value # retrieve the Attribute value
|
||||
|
||||
username_list = [] # list of usernames from LDAP attribute
|
||||
if isinstance(entry_username, list):
|
||||
username_list = [str(name).lower() for name in entry_username]
|
||||
else:
|
||||
username_list = [str(entry_username).lower()]
|
||||
|
||||
# TODO: support multiple emails if LDAP returns a list
|
||||
if not email:
|
||||
raise HTTPException(400, "User does not have a valid email address.")
|
||||
elif isinstance(email, str):
|
||||
|
|
@ -303,13 +323,13 @@ async def ldap_auth(request: Request, response: Response, form_data: LdapForm):
|
|||
else:
|
||||
email = str(email).lower()
|
||||
|
||||
cn = str(entry["cn"])
|
||||
user_dn = entry.entry_dn
|
||||
cn = str(entry["cn"]) # common name
|
||||
user_dn = entry.entry_dn # user distinguished name
|
||||
|
||||
user_groups = []
|
||||
if ENABLE_LDAP_GROUP_MANAGEMENT and LDAP_ATTRIBUTE_FOR_GROUPS in entry:
|
||||
group_dns = entry[LDAP_ATTRIBUTE_FOR_GROUPS]
|
||||
log.info(f"LDAP raw group DNs for user {username}: {group_dns}")
|
||||
log.info(f"LDAP raw group DNs for user {username_list}: {group_dns}")
|
||||
|
||||
if group_dns:
|
||||
log.info(f"LDAP group_dns original: {group_dns}")
|
||||
|
|
@ -360,16 +380,16 @@ async def ldap_auth(request: Request, response: Response, form_data: LdapForm):
|
|||
)
|
||||
|
||||
log.info(
|
||||
f"LDAP groups for user {username}: {user_groups} (total: {len(user_groups)})"
|
||||
f"LDAP groups for user {username_list}: {user_groups} (total: {len(user_groups)})"
|
||||
)
|
||||
else:
|
||||
log.info(f"No groups found for user {username}")
|
||||
log.info(f"No groups found for user {username_list}")
|
||||
elif ENABLE_LDAP_GROUP_MANAGEMENT:
|
||||
log.warning(
|
||||
f"LDAP Group Management enabled but {LDAP_ATTRIBUTE_FOR_GROUPS} attribute not found in user entry"
|
||||
)
|
||||
|
||||
if username == form_data.user.lower():
|
||||
if username_list and form_data.user.lower() in username_list:
|
||||
connection_user = Connection(
|
||||
server,
|
||||
user_dn,
|
||||
|
|
@ -401,6 +421,11 @@ async def ldap_auth(request: Request, response: Response, form_data: LdapForm):
|
|||
500, detail=ERROR_MESSAGES.CREATE_USER_ERROR
|
||||
)
|
||||
|
||||
apply_default_group_assignment(
|
||||
request.app.state.config.DEFAULT_GROUP_ID,
|
||||
user.id,
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as err:
|
||||
|
|
@ -449,7 +474,6 @@ async def ldap_auth(request: Request, response: Response, form_data: LdapForm):
|
|||
):
|
||||
if ENABLE_LDAP_GROUP_CREATION:
|
||||
Groups.create_groups_by_group_names(user.id, user_groups)
|
||||
|
||||
try:
|
||||
Groups.sync_groups_by_group_names(user.id, user_groups)
|
||||
log.info(
|
||||
|
|
@ -544,6 +568,12 @@ async def signin(request: Request, response: Response, form_data: SigninForm):
|
|||
admin_email.lower(), lambda pw: verify_password(admin_password, pw)
|
||||
)
|
||||
else:
|
||||
if signin_rate_limiter.is_limited(form_data.email.lower()):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_429_TOO_MANY_REQUESTS,
|
||||
detail=ERROR_MESSAGES.RATE_LIMIT_EXCEEDED,
|
||||
)
|
||||
|
||||
password_bytes = form_data.password.encode("utf-8")
|
||||
if len(password_bytes) > 72:
|
||||
# TODO: Implement other hashing algorithms that support longer passwords
|
||||
|
|
@ -700,9 +730,10 @@ async def signup(request: Request, response: Response, form_data: SignupForm):
|
|||
# Disable signup after the first user is created
|
||||
request.app.state.config.ENABLE_SIGNUP = False
|
||||
|
||||
default_group_id = getattr(request.app.state.config, "DEFAULT_GROUP_ID", "")
|
||||
if default_group_id and default_group_id:
|
||||
Groups.add_users_to_group(default_group_id, [user.id])
|
||||
apply_default_group_assignment(
|
||||
request.app.state.config.DEFAULT_GROUP_ID,
|
||||
user.id,
|
||||
)
|
||||
|
||||
return {
|
||||
"token": token,
|
||||
|
|
@ -807,7 +838,9 @@ async def signout(request: Request, response: Response):
|
|||
|
||||
|
||||
@router.post("/add", response_model=SigninResponse)
|
||||
async def add_user(form_data: AddUserForm, user=Depends(get_admin_user)):
|
||||
async def add_user(
|
||||
request: Request, form_data: AddUserForm, user=Depends(get_admin_user)
|
||||
):
|
||||
if not validate_email_format(form_data.email.lower()):
|
||||
raise HTTPException(
|
||||
status.HTTP_400_BAD_REQUEST, detail=ERROR_MESSAGES.INVALID_EMAIL_FORMAT
|
||||
|
|
@ -832,6 +865,11 @@ async def add_user(form_data: AddUserForm, user=Depends(get_admin_user)):
|
|||
)
|
||||
|
||||
if user:
|
||||
apply_default_group_assignment(
|
||||
request.app.state.config.DEFAULT_GROUP_ID,
|
||||
user.id,
|
||||
)
|
||||
|
||||
token = create_token(data={"id": user.id})
|
||||
return {
|
||||
"token": token,
|
||||
|
|
@ -901,6 +939,7 @@ async def get_admin_config(request: Request, user=Depends(get_admin_user)):
|
|||
"JWT_EXPIRES_IN": request.app.state.config.JWT_EXPIRES_IN,
|
||||
"ENABLE_COMMUNITY_SHARING": request.app.state.config.ENABLE_COMMUNITY_SHARING,
|
||||
"ENABLE_MESSAGE_RATING": request.app.state.config.ENABLE_MESSAGE_RATING,
|
||||
"ENABLE_FOLDERS": request.app.state.config.ENABLE_FOLDERS,
|
||||
"ENABLE_CHANNELS": request.app.state.config.ENABLE_CHANNELS,
|
||||
"ENABLE_NOTES": request.app.state.config.ENABLE_NOTES,
|
||||
"ENABLE_USER_WEBHOOKS": request.app.state.config.ENABLE_USER_WEBHOOKS,
|
||||
|
|
@ -922,6 +961,7 @@ class AdminConfig(BaseModel):
|
|||
JWT_EXPIRES_IN: str
|
||||
ENABLE_COMMUNITY_SHARING: bool
|
||||
ENABLE_MESSAGE_RATING: bool
|
||||
ENABLE_FOLDERS: bool
|
||||
ENABLE_CHANNELS: bool
|
||||
ENABLE_NOTES: bool
|
||||
ENABLE_USER_WEBHOOKS: bool
|
||||
|
|
@ -946,6 +986,7 @@ async def update_admin_config(
|
|||
form_data.API_KEYS_ALLOWED_ENDPOINTS
|
||||
)
|
||||
|
||||
request.app.state.config.ENABLE_FOLDERS = form_data.ENABLE_FOLDERS
|
||||
request.app.state.config.ENABLE_CHANNELS = form_data.ENABLE_CHANNELS
|
||||
request.app.state.config.ENABLE_NOTES = form_data.ENABLE_NOTES
|
||||
|
||||
|
|
@ -988,6 +1029,7 @@ async def update_admin_config(
|
|||
"JWT_EXPIRES_IN": request.app.state.config.JWT_EXPIRES_IN,
|
||||
"ENABLE_COMMUNITY_SHARING": request.app.state.config.ENABLE_COMMUNITY_SHARING,
|
||||
"ENABLE_MESSAGE_RATING": request.app.state.config.ENABLE_MESSAGE_RATING,
|
||||
"ENABLE_FOLDERS": request.app.state.config.ENABLE_FOLDERS,
|
||||
"ENABLE_CHANNELS": request.app.state.config.ENABLE_CHANNELS,
|
||||
"ENABLE_NOTES": request.app.state.config.ENABLE_NOTES,
|
||||
"ENABLE_USER_WEBHOOKS": request.app.state.config.ENABLE_USER_WEBHOOKS,
|
||||
|
|
@ -1130,8 +1172,7 @@ async def generate_api_key(request: Request, user=Depends(get_current_user)):
|
|||
# delete api key
|
||||
@router.delete("/api_key", response_model=bool)
|
||||
async def delete_api_key(user=Depends(get_current_user)):
|
||||
success = Users.update_user_api_key_by_id(user.id, None)
|
||||
return success
|
||||
return Users.delete_user_api_key_by_id(user.id)
|
||||
|
||||
|
||||
# get api key
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -3,10 +3,12 @@ import logging
|
|||
from typing import Optional
|
||||
|
||||
|
||||
from open_webui.utils.misc import get_message_list
|
||||
from open_webui.socket.main import get_event_emitter
|
||||
from open_webui.models.chats import (
|
||||
ChatForm,
|
||||
ChatImportForm,
|
||||
ChatUsageStatsListResponse,
|
||||
ChatsImportForm,
|
||||
ChatResponse,
|
||||
Chats,
|
||||
|
|
@ -17,7 +19,6 @@ from open_webui.models.folders import Folders
|
|||
|
||||
from open_webui.config import ENABLE_ADMIN_CHAT_ACCESS, ENABLE_ADMIN_EXPORT
|
||||
from open_webui.constants import ERROR_MESSAGES
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, status
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
|
@ -26,7 +27,6 @@ from open_webui.utils.auth import get_admin_user, get_verified_user
|
|||
from open_webui.utils.access_control import has_permission
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
|
@ -66,6 +66,132 @@ def get_session_user_chat_list(
|
|||
)
|
||||
|
||||
|
||||
############################
|
||||
# GetChatUsageStats
|
||||
# EXPERIMENTAL: may be removed in future releases
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/stats/usage", response_model=ChatUsageStatsListResponse)
|
||||
def get_session_user_chat_usage_stats(
|
||||
items_per_page: Optional[int] = 50,
|
||||
page: Optional[int] = 1,
|
||||
user=Depends(get_verified_user),
|
||||
):
|
||||
try:
|
||||
limit = items_per_page
|
||||
skip = (page - 1) * limit
|
||||
|
||||
result = Chats.get_chats_by_user_id(user.id, skip=skip, limit=limit)
|
||||
|
||||
chats = result.items
|
||||
total = result.total
|
||||
|
||||
chat_stats = []
|
||||
for chat in chats:
|
||||
messages_map = chat.chat.get("history", {}).get("messages", {})
|
||||
message_id = chat.chat.get("history", {}).get("currentId")
|
||||
|
||||
if messages_map and message_id:
|
||||
try:
|
||||
history_models = {}
|
||||
history_message_count = len(messages_map)
|
||||
history_user_messages = []
|
||||
history_assistant_messages = []
|
||||
|
||||
for message in messages_map.values():
|
||||
if message.get("role", "") == "user":
|
||||
history_user_messages.append(message)
|
||||
elif message.get("role", "") == "assistant":
|
||||
history_assistant_messages.append(message)
|
||||
model = message.get("model", None)
|
||||
if model:
|
||||
if model not in history_models:
|
||||
history_models[model] = 0
|
||||
history_models[model] += 1
|
||||
|
||||
average_user_message_content_length = (
|
||||
sum(
|
||||
len(message.get("content", ""))
|
||||
for message in history_user_messages
|
||||
)
|
||||
/ len(history_user_messages)
|
||||
if len(history_user_messages) > 0
|
||||
else 0
|
||||
)
|
||||
average_assistant_message_content_length = (
|
||||
sum(
|
||||
len(message.get("content", ""))
|
||||
for message in history_assistant_messages
|
||||
)
|
||||
/ len(history_assistant_messages)
|
||||
if len(history_assistant_messages) > 0
|
||||
else 0
|
||||
)
|
||||
|
||||
response_times = []
|
||||
for message in history_assistant_messages:
|
||||
user_message_id = message.get("parentId", None)
|
||||
if user_message_id and user_message_id in messages_map:
|
||||
user_message = messages_map[user_message_id]
|
||||
response_time = message.get(
|
||||
"timestamp", 0
|
||||
) - user_message.get("timestamp", 0)
|
||||
|
||||
response_times.append(response_time)
|
||||
|
||||
average_response_time = (
|
||||
sum(response_times) / len(response_times)
|
||||
if len(response_times) > 0
|
||||
else 0
|
||||
)
|
||||
|
||||
message_list = get_message_list(messages_map, message_id)
|
||||
message_count = len(message_list)
|
||||
|
||||
models = {}
|
||||
for message in reversed(message_list):
|
||||
if message.get("role") == "assistant":
|
||||
model = message.get("model", None)
|
||||
if model:
|
||||
if model not in models:
|
||||
models[model] = 0
|
||||
models[model] += 1
|
||||
|
||||
annotation = message.get("annotation", {})
|
||||
|
||||
chat_stats.append(
|
||||
{
|
||||
"id": chat.id,
|
||||
"models": models,
|
||||
"message_count": message_count,
|
||||
"history_models": history_models,
|
||||
"history_message_count": history_message_count,
|
||||
"history_user_message_count": len(history_user_messages),
|
||||
"history_assistant_message_count": len(
|
||||
history_assistant_messages
|
||||
),
|
||||
"average_response_time": average_response_time,
|
||||
"average_user_message_content_length": average_user_message_content_length,
|
||||
"average_assistant_message_content_length": average_assistant_message_content_length,
|
||||
"tags": chat.meta.get("tags", []),
|
||||
"last_message_at": message_list[-1].get("timestamp", None),
|
||||
"updated_at": chat.updated_at,
|
||||
"created_at": chat.created_at,
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
return ChatUsageStatsListResponse(items=chat_stats, total=total)
|
||||
|
||||
except Exception as e:
|
||||
log.exception(e)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST, detail=ERROR_MESSAGES.DEFAULT()
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
# DeleteAllChats
|
||||
############################
|
||||
|
|
|
|||
|
|
@ -18,7 +18,6 @@ from open_webui.utils.tools import (
|
|||
from open_webui.utils.mcp.client import MCPClient
|
||||
from open_webui.models.oauth_sessions import OAuthSessions
|
||||
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
from open_webui.utils.oauth import (
|
||||
get_discovery_urls,
|
||||
|
|
@ -32,7 +31,6 @@ from mcp.shared.auth import OAuthMetadata
|
|||
router = APIRouter()
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MAIN"])
|
||||
|
||||
|
||||
############################
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ from pydantic import BaseModel
|
|||
|
||||
from open_webui.models.users import Users, UserModel
|
||||
from open_webui.models.feedbacks import (
|
||||
FeedbackIdResponse,
|
||||
FeedbackModel,
|
||||
FeedbackResponse,
|
||||
FeedbackForm,
|
||||
|
|
@ -64,6 +65,12 @@ async def get_all_feedbacks(user=Depends(get_admin_user)):
|
|||
return feedbacks
|
||||
|
||||
|
||||
@router.get("/feedbacks/all/ids", response_model=list[FeedbackIdResponse])
|
||||
async def get_all_feedback_ids(user=Depends(get_admin_user)):
|
||||
feedbacks = Feedbacks.get_all_feedbacks()
|
||||
return feedbacks
|
||||
|
||||
|
||||
@router.delete("/feedbacks/all")
|
||||
async def delete_all_feedbacks(user=Depends(get_admin_user)):
|
||||
success = Feedbacks.delete_all_feedbacks()
|
||||
|
|
@ -71,7 +78,7 @@ async def delete_all_feedbacks(user=Depends(get_admin_user)):
|
|||
|
||||
|
||||
@router.get("/feedbacks/all/export", response_model=list[FeedbackModel])
|
||||
async def get_all_feedbacks(user=Depends(get_admin_user)):
|
||||
async def export_all_feedbacks(user=Depends(get_admin_user)):
|
||||
feedbacks = Feedbacks.get_all_feedbacks()
|
||||
return feedbacks
|
||||
|
||||
|
|
|
|||
|
|
@ -22,10 +22,11 @@ from fastapi import (
|
|||
)
|
||||
|
||||
from fastapi.responses import FileResponse, StreamingResponse
|
||||
|
||||
from open_webui.constants import ERROR_MESSAGES
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from open_webui.retrieval.vector.factory import VECTOR_DB_CLIENT
|
||||
|
||||
from open_webui.models.channels import Channels
|
||||
from open_webui.models.users import Users
|
||||
from open_webui.models.files import (
|
||||
FileForm,
|
||||
|
|
@ -33,17 +34,23 @@ from open_webui.models.files import (
|
|||
FileModelResponse,
|
||||
Files,
|
||||
)
|
||||
from open_webui.models.chats import Chats
|
||||
from open_webui.models.knowledge import Knowledges
|
||||
from open_webui.models.groups import Groups
|
||||
|
||||
|
||||
from open_webui.routers.knowledge import get_knowledge, get_knowledge_list
|
||||
from open_webui.routers.retrieval import ProcessFileForm, process_file
|
||||
from open_webui.routers.audio import transcribe
|
||||
|
||||
from open_webui.storage.provider import Storage
|
||||
|
||||
|
||||
from open_webui.utils.auth import get_admin_user, get_verified_user
|
||||
from open_webui.utils.access_control import has_access
|
||||
from open_webui.utils.misc import strict_match_mime_type
|
||||
from pydantic import BaseModel
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
|
@ -53,31 +60,48 @@ router = APIRouter()
|
|||
############################
|
||||
|
||||
|
||||
# TODO: Optimize this function to use the knowledge_file table for faster lookups.
|
||||
def has_access_to_file(
|
||||
file_id: Optional[str], access_type: str, user=Depends(get_verified_user)
|
||||
) -> bool:
|
||||
file = Files.get_file_by_id(file_id)
|
||||
log.debug(f"Checking if user has {access_type} access to file")
|
||||
|
||||
if not file:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=ERROR_MESSAGES.NOT_FOUND,
|
||||
)
|
||||
|
||||
has_access = False
|
||||
knowledge_base_id = file.meta.get("collection_name") if file.meta else None
|
||||
# Check if the file is associated with any knowledge bases the user has access to
|
||||
knowledge_bases = Knowledges.get_knowledges_by_file_id(file_id)
|
||||
user_group_ids = {group.id for group in Groups.get_groups_by_member_id(user.id)}
|
||||
for knowledge_base in knowledge_bases:
|
||||
if knowledge_base.user_id == user.id or has_access(
|
||||
user.id, access_type, knowledge_base.access_control, user_group_ids
|
||||
):
|
||||
return True
|
||||
|
||||
knowledge_base_id = file.meta.get("collection_name") if file.meta else None
|
||||
if knowledge_base_id:
|
||||
knowledge_bases = Knowledges.get_knowledge_bases_by_user_id(
|
||||
user.id, access_type
|
||||
)
|
||||
for knowledge_base in knowledge_bases:
|
||||
if knowledge_base.id == knowledge_base_id:
|
||||
has_access = True
|
||||
break
|
||||
return True
|
||||
|
||||
return has_access
|
||||
# Check if the file is associated with any channels the user has access to
|
||||
channels = Channels.get_channels_by_file_id_and_user_id(file_id, user.id)
|
||||
if access_type == "read" and channels:
|
||||
return True
|
||||
|
||||
# Check if the file is associated with any chats the user has access to
|
||||
# TODO: Granular access control for chats
|
||||
chats = Chats.get_shared_chats_by_file_id(file_id)
|
||||
if chats:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
############################
|
||||
|
|
@ -92,15 +116,7 @@ def process_uploaded_file(request, file, file_path, file_item, file_metadata, us
|
|||
request.app.state.config, "STT_SUPPORTED_CONTENT_TYPES", []
|
||||
)
|
||||
|
||||
if any(
|
||||
fnmatch(file.content_type, content_type)
|
||||
for content_type in (
|
||||
stt_supported_content_types
|
||||
if stt_supported_content_types
|
||||
and any(t.strip() for t in stt_supported_content_types)
|
||||
else ["audio/*", "video/webm"]
|
||||
)
|
||||
):
|
||||
if strict_match_mime_type(stt_supported_content_types, file.content_type):
|
||||
file_path = Storage.get_file(file_path)
|
||||
result = transcribe(request, file_path, file_metadata, user)
|
||||
|
||||
|
|
@ -124,6 +140,7 @@ def process_uploaded_file(request, file, file_path, file_item, file_metadata, us
|
|||
f"File type {file.content_type} is not provided, but trying to process anyway"
|
||||
)
|
||||
process_file(request, ProcessFileForm(file_id=file_item.id), user=user)
|
||||
|
||||
except Exception as e:
|
||||
log.error(f"Error processing file: {file_item.id}")
|
||||
Files.update_file_data_by_id(
|
||||
|
|
@ -165,7 +182,7 @@ def upload_file_handler(
|
|||
user=Depends(get_verified_user),
|
||||
background_tasks: Optional[BackgroundTasks] = None,
|
||||
):
|
||||
log.info(f"file.content_type: {file.content_type}")
|
||||
log.info(f"file.content_type: {file.content_type} {process}")
|
||||
|
||||
if isinstance(metadata, str):
|
||||
try:
|
||||
|
|
@ -233,6 +250,13 @@ def upload_file_handler(
|
|||
),
|
||||
)
|
||||
|
||||
if "channel_id" in file_metadata:
|
||||
channel = Channels.get_channel_by_id_and_user_id(
|
||||
file_metadata["channel_id"], user.id
|
||||
)
|
||||
if channel:
|
||||
Channels.add_file_to_channel_by_id(channel.id, file_item.id, user.id)
|
||||
|
||||
if process:
|
||||
if background_tasks and process_in_background:
|
||||
background_tasks.add_task(
|
||||
|
|
|
|||
|
|
@ -21,7 +21,6 @@ from open_webui.models.knowledge import Knowledges
|
|||
|
||||
|
||||
from open_webui.config import UPLOAD_DIR
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from open_webui.constants import ERROR_MESSAGES
|
||||
|
||||
|
||||
|
|
@ -34,7 +33,6 @@ from open_webui.utils.access_control import has_permission
|
|||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
|
||||
router = APIRouter()
|
||||
|
|
@ -46,7 +44,23 @@ router = APIRouter()
|
|||
|
||||
|
||||
@router.get("/", response_model=list[FolderNameIdResponse])
|
||||
async def get_folders(user=Depends(get_verified_user)):
|
||||
async def get_folders(request: Request, user=Depends(get_verified_user)):
|
||||
if request.app.state.config.ENABLE_FOLDERS is False:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
|
||||
if user.role != "admin" and not has_permission(
|
||||
user.id,
|
||||
"features.folders",
|
||||
request.app.state.config.USER_PERMISSIONS,
|
||||
):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
|
||||
folders = Folders.get_folders_by_user_id(user.id)
|
||||
|
||||
# Verify folder data integrity
|
||||
|
|
|
|||
|
|
@ -23,12 +23,10 @@ from open_webui.config import CACHE_DIR
|
|||
from open_webui.constants import ERROR_MESSAGES
|
||||
from fastapi import APIRouter, Depends, HTTPException, Request, status
|
||||
from open_webui.utils.auth import get_admin_user, get_verified_user
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from pydantic import BaseModel, HttpUrl
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MAIN"])
|
||||
|
||||
|
||||
router = APIRouter()
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ from pathlib import Path
|
|||
from typing import Optional
|
||||
import logging
|
||||
|
||||
from open_webui.models.users import Users
|
||||
from open_webui.models.users import Users, UserInfoResponse
|
||||
from open_webui.models.groups import (
|
||||
Groups,
|
||||
GroupForm,
|
||||
|
|
@ -17,11 +17,9 @@ from open_webui.constants import ERROR_MESSAGES
|
|||
from fastapi import APIRouter, Depends, HTTPException, Request, status
|
||||
|
||||
from open_webui.utils.auth import get_admin_user, get_verified_user
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MAIN"])
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
|
@ -32,31 +30,17 @@ router = APIRouter()
|
|||
|
||||
@router.get("/", response_model=list[GroupResponse])
|
||||
async def get_groups(share: Optional[bool] = None, user=Depends(get_verified_user)):
|
||||
if user.role == "admin":
|
||||
groups = Groups.get_groups()
|
||||
else:
|
||||
groups = Groups.get_groups_by_member_id(user.id)
|
||||
|
||||
group_list = []
|
||||
filter = {}
|
||||
if user.role != "admin":
|
||||
filter["member_id"] = user.id
|
||||
|
||||
for group in groups:
|
||||
if share is not None:
|
||||
# Check if the group has data and a config with share key
|
||||
if (
|
||||
group.data
|
||||
and "share" in group.data.get("config", {})
|
||||
and group.data["config"]["share"] != share
|
||||
):
|
||||
continue
|
||||
if share is not None:
|
||||
filter["share"] = share
|
||||
|
||||
group_list.append(
|
||||
GroupResponse(
|
||||
**group.model_dump(),
|
||||
member_count=Groups.get_group_member_count_by_id(group.id),
|
||||
)
|
||||
)
|
||||
groups = Groups.get_groups(filter=filter)
|
||||
|
||||
return group_list
|
||||
return groups
|
||||
|
||||
|
||||
############################
|
||||
|
|
@ -106,6 +90,50 @@ async def get_group_by_id(id: str, user=Depends(get_admin_user)):
|
|||
)
|
||||
|
||||
|
||||
############################
|
||||
# ExportGroupById
|
||||
############################
|
||||
|
||||
|
||||
class GroupExportResponse(GroupResponse):
|
||||
user_ids: list[str] = []
|
||||
pass
|
||||
|
||||
|
||||
@router.get("/id/{id}/export", response_model=Optional[GroupExportResponse])
|
||||
async def export_group_by_id(id: str, user=Depends(get_admin_user)):
|
||||
group = Groups.get_group_by_id(id)
|
||||
if group:
|
||||
return GroupExportResponse(
|
||||
**group.model_dump(),
|
||||
member_count=Groups.get_group_member_count_by_id(group.id),
|
||||
user_ids=Groups.get_group_user_ids_by_id(group.id),
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.NOT_FOUND,
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
# GetUsersInGroupById
|
||||
############################
|
||||
|
||||
|
||||
@router.post("/id/{id}/users", response_model=list[UserInfoResponse])
|
||||
async def get_users_in_group(id: str, user=Depends(get_admin_user)):
|
||||
try:
|
||||
users = Users.get_users_by_group_id(id)
|
||||
return users
|
||||
except Exception as e:
|
||||
log.exception(f"Error adding users to group {id}: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.DEFAULT(e),
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
# UpdateGroupById
|
||||
############################
|
||||
|
|
|
|||
|
|
@ -16,7 +16,9 @@ from fastapi.responses import FileResponse
|
|||
|
||||
from open_webui.config import CACHE_DIR
|
||||
from open_webui.constants import ERROR_MESSAGES
|
||||
from open_webui.env import ENABLE_FORWARD_USER_INFO_HEADERS, SRC_LOG_LEVELS
|
||||
from open_webui.env import ENABLE_FORWARD_USER_INFO_HEADERS
|
||||
|
||||
from open_webui.models.chats import Chats
|
||||
from open_webui.routers.files import upload_file_handler, get_file_content_by_id
|
||||
from open_webui.utils.auth import get_admin_user, get_verified_user
|
||||
from open_webui.utils.headers import include_user_info_headers
|
||||
|
|
@ -31,7 +33,6 @@ from open_webui.utils.images.comfyui import (
|
|||
from pydantic import BaseModel
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["IMAGES"])
|
||||
|
||||
IMAGE_CACHE_DIR = CACHE_DIR / "image" / "generations"
|
||||
IMAGE_CACHE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
|
@ -196,12 +197,12 @@ async def update_config(
|
|||
set_image_model(request, form_data.IMAGE_GENERATION_MODEL)
|
||||
if (
|
||||
form_data.IMAGE_SIZE == "auto"
|
||||
and form_data.IMAGE_GENERATION_MODEL != "gpt-image-1"
|
||||
and not form_data.IMAGE_GENERATION_MODEL.startswith("gpt-image")
|
||||
):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=ERROR_MESSAGES.INCORRECT_FORMAT(
|
||||
" (auto is only allowed with gpt-image-1)."
|
||||
" (auto is only allowed with gpt-image models)."
|
||||
),
|
||||
)
|
||||
|
||||
|
|
@ -380,6 +381,7 @@ def get_models(request: Request, user=Depends(get_verified_user)):
|
|||
{"id": "dall-e-2", "name": "DALL·E 2"},
|
||||
{"id": "dall-e-3", "name": "DALL·E 3"},
|
||||
{"id": "gpt-image-1", "name": "GPT-IMAGE 1"},
|
||||
{"id": "gpt-image-1.5", "name": "GPT-IMAGE 1.5"},
|
||||
]
|
||||
elif request.app.state.config.IMAGE_GENERATION_ENGINE == "gemini":
|
||||
return [
|
||||
|
|
@ -510,15 +512,36 @@ def upload_image(request, image_data, content_type, metadata, user):
|
|||
process=False,
|
||||
user=user,
|
||||
)
|
||||
|
||||
if file_item and file_item.id:
|
||||
# If chat_id and message_id are provided in metadata, link the file to the chat message
|
||||
chat_id = metadata.get("chat_id")
|
||||
message_id = metadata.get("message_id")
|
||||
|
||||
if chat_id and message_id:
|
||||
Chats.insert_chat_files(
|
||||
chat_id=chat_id,
|
||||
message_id=message_id,
|
||||
file_ids=[file_item.id],
|
||||
user_id=user.id,
|
||||
)
|
||||
|
||||
url = request.app.url_path_for("get_file_content_by_id", id=file_item.id)
|
||||
return url
|
||||
return file_item, url
|
||||
|
||||
|
||||
@router.post("/generations")
|
||||
async def generate_images(
|
||||
request: Request, form_data: CreateImageForm, user=Depends(get_verified_user)
|
||||
):
|
||||
return await image_generations(request, form_data, user=user)
|
||||
|
||||
|
||||
async def image_generations(
|
||||
request: Request,
|
||||
form_data: CreateImageForm,
|
||||
user=Depends(get_verified_user),
|
||||
metadata: Optional[dict] = None,
|
||||
user=None,
|
||||
):
|
||||
# if IMAGE_SIZE = 'auto', default WidthxHeight to the 512x512 default
|
||||
# This is only relevant when the user has set IMAGE_SIZE to 'auto' with an
|
||||
|
|
@ -535,6 +558,9 @@ async def image_generations(
|
|||
size = form_data.size
|
||||
|
||||
width, height = tuple(map(int, size.split("x")))
|
||||
|
||||
metadata = metadata or {}
|
||||
|
||||
model = get_image_model(request)
|
||||
|
||||
r = None
|
||||
|
|
@ -564,7 +590,9 @@ async def image_generations(
|
|||
),
|
||||
**(
|
||||
{}
|
||||
if "gpt-image-1" in request.app.state.config.IMAGE_GENERATION_MODEL
|
||||
if request.app.state.config.IMAGE_GENERATION_MODEL.startswith(
|
||||
"gpt-image"
|
||||
)
|
||||
else {"response_format": "b64_json"}
|
||||
),
|
||||
**(
|
||||
|
|
@ -593,7 +621,9 @@ async def image_generations(
|
|||
else:
|
||||
image_data, content_type = get_image_data(image["b64_json"])
|
||||
|
||||
url = upload_image(request, image_data, content_type, data, user)
|
||||
_, url = upload_image(
|
||||
request, image_data, content_type, {**data, **metadata}, user
|
||||
)
|
||||
images.append({"url": url})
|
||||
return images
|
||||
|
||||
|
|
@ -643,7 +673,9 @@ async def image_generations(
|
|||
image_data, content_type = get_image_data(
|
||||
image["bytesBase64Encoded"]
|
||||
)
|
||||
url = upload_image(request, image_data, content_type, data, user)
|
||||
_, url = upload_image(
|
||||
request, image_data, content_type, {**data, **metadata}, user
|
||||
)
|
||||
images.append({"url": url})
|
||||
elif model.endswith(":generateContent"):
|
||||
for image in res["candidates"]:
|
||||
|
|
@ -652,8 +684,12 @@ async def image_generations(
|
|||
image_data, content_type = get_image_data(
|
||||
part["inlineData"]["data"]
|
||||
)
|
||||
url = upload_image(
|
||||
request, image_data, content_type, data, user
|
||||
_, url = upload_image(
|
||||
request,
|
||||
image_data,
|
||||
content_type,
|
||||
{**data, **metadata},
|
||||
user,
|
||||
)
|
||||
images.append({"url": url})
|
||||
|
||||
|
|
@ -703,11 +739,11 @@ async def image_generations(
|
|||
}
|
||||
|
||||
image_data, content_type = get_image_data(image["url"], headers)
|
||||
url = upload_image(
|
||||
_, url = upload_image(
|
||||
request,
|
||||
image_data,
|
||||
content_type,
|
||||
form_data.model_dump(exclude_none=True),
|
||||
{**form_data.model_dump(exclude_none=True), **metadata},
|
||||
user,
|
||||
)
|
||||
images.append({"url": url})
|
||||
|
|
@ -750,11 +786,11 @@ async def image_generations(
|
|||
|
||||
for image in res["images"]:
|
||||
image_data, content_type = get_image_data(image)
|
||||
url = upload_image(
|
||||
_, url = upload_image(
|
||||
request,
|
||||
image_data,
|
||||
content_type,
|
||||
{**data, "info": res["info"]},
|
||||
{**data, "info": res["info"], **metadata},
|
||||
user,
|
||||
)
|
||||
images.append({"url": url})
|
||||
|
|
@ -781,10 +817,13 @@ class EditImageForm(BaseModel):
|
|||
async def image_edits(
|
||||
request: Request,
|
||||
form_data: EditImageForm,
|
||||
metadata: Optional[dict] = None,
|
||||
user=Depends(get_verified_user),
|
||||
):
|
||||
size = None
|
||||
width, height = None, None
|
||||
metadata = metadata or {}
|
||||
|
||||
if (
|
||||
request.app.state.config.IMAGE_EDIT_SIZE
|
||||
and "x" in request.app.state.config.IMAGE_EDIT_SIZE
|
||||
|
|
@ -867,7 +906,7 @@ async def image_edits(
|
|||
**({"size": size} if size else {}),
|
||||
**(
|
||||
{}
|
||||
if "gpt-image-1" in request.app.state.config.IMAGE_EDIT_MODEL
|
||||
if request.app.state.config.IMAGE_EDIT_MODEL.startswith("gpt-image")
|
||||
else {"response_format": "b64_json"}
|
||||
),
|
||||
}
|
||||
|
|
@ -902,7 +941,9 @@ async def image_edits(
|
|||
else:
|
||||
image_data, content_type = get_image_data(image["b64_json"])
|
||||
|
||||
url = upload_image(request, image_data, content_type, data, user)
|
||||
_, url = upload_image(
|
||||
request, image_data, content_type, {**data, **metadata}, user
|
||||
)
|
||||
images.append({"url": url})
|
||||
return images
|
||||
|
||||
|
|
@ -955,8 +996,12 @@ async def image_edits(
|
|||
image_data, content_type = get_image_data(
|
||||
part["inlineData"]["data"]
|
||||
)
|
||||
url = upload_image(
|
||||
request, image_data, content_type, data, user
|
||||
_, url = upload_image(
|
||||
request,
|
||||
image_data,
|
||||
content_type,
|
||||
{**data, **metadata},
|
||||
user,
|
||||
)
|
||||
images.append({"url": url})
|
||||
|
||||
|
|
@ -1033,11 +1078,11 @@ async def image_edits(
|
|||
}
|
||||
|
||||
image_data, content_type = get_image_data(image_url, headers)
|
||||
url = upload_image(
|
||||
_, url = upload_image(
|
||||
request,
|
||||
image_data,
|
||||
content_type,
|
||||
form_data.model_dump(exclude_none=True),
|
||||
{**form_data.model_dump(exclude_none=True), **metadata},
|
||||
user,
|
||||
)
|
||||
images.append({"url": url})
|
||||
|
|
|
|||
|
|
@ -4,7 +4,9 @@ from fastapi import APIRouter, Depends, HTTPException, status, Request, Query
|
|||
from fastapi.concurrency import run_in_threadpool
|
||||
import logging
|
||||
|
||||
from open_webui.models.groups import Groups
|
||||
from open_webui.models.knowledge import (
|
||||
KnowledgeFileListResponse,
|
||||
Knowledges,
|
||||
KnowledgeForm,
|
||||
KnowledgeResponse,
|
||||
|
|
@ -25,13 +27,11 @@ from open_webui.utils.auth import get_verified_user
|
|||
from open_webui.utils.access_control import has_access, has_permission
|
||||
|
||||
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
from open_webui.config import BYPASS_ADMIN_ACCESS_CONTROL
|
||||
from open_webui.models.models import Models, ModelForm
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
|
@ -39,100 +39,115 @@ router = APIRouter()
|
|||
# getKnowledgeBases
|
||||
############################
|
||||
|
||||
PAGE_ITEM_COUNT = 30
|
||||
|
||||
@router.get("/", response_model=list[KnowledgeUserResponse])
|
||||
async def get_knowledge(user=Depends(get_verified_user)):
|
||||
knowledge_bases = []
|
||||
|
||||
if user.role == "admin" and BYPASS_ADMIN_ACCESS_CONTROL:
|
||||
knowledge_bases = Knowledges.get_knowledge_bases()
|
||||
else:
|
||||
knowledge_bases = Knowledges.get_knowledge_bases_by_user_id(user.id, "read")
|
||||
class KnowledgeAccessResponse(KnowledgeUserResponse):
|
||||
write_access: Optional[bool] = False
|
||||
|
||||
# Get files for each knowledge base
|
||||
knowledge_with_files = []
|
||||
for knowledge_base in knowledge_bases:
|
||||
files = []
|
||||
if knowledge_base.data:
|
||||
files = Files.get_file_metadatas_by_ids(
|
||||
knowledge_base.data.get("file_ids", [])
|
||||
)
|
||||
|
||||
# Check if all files exist
|
||||
if len(files) != len(knowledge_base.data.get("file_ids", [])):
|
||||
missing_files = list(
|
||||
set(knowledge_base.data.get("file_ids", []))
|
||||
- set([file.id for file in files])
|
||||
)
|
||||
if missing_files:
|
||||
data = knowledge_base.data or {}
|
||||
file_ids = data.get("file_ids", [])
|
||||
class KnowledgeAccessListResponse(BaseModel):
|
||||
items: list[KnowledgeAccessResponse]
|
||||
total: int
|
||||
|
||||
for missing_file in missing_files:
|
||||
file_ids.remove(missing_file)
|
||||
|
||||
data["file_ids"] = file_ids
|
||||
Knowledges.update_knowledge_data_by_id(
|
||||
id=knowledge_base.id, data=data
|
||||
)
|
||||
@router.get("/", response_model=KnowledgeAccessListResponse)
|
||||
async def get_knowledge_bases(page: Optional[int] = 1, user=Depends(get_verified_user)):
|
||||
page = max(page, 1)
|
||||
limit = PAGE_ITEM_COUNT
|
||||
skip = (page - 1) * limit
|
||||
|
||||
files = Files.get_file_metadatas_by_ids(file_ids)
|
||||
filter = {}
|
||||
if not user.role == "admin" or not BYPASS_ADMIN_ACCESS_CONTROL:
|
||||
groups = Groups.get_groups_by_member_id(user.id)
|
||||
if groups:
|
||||
filter["group_ids"] = [group.id for group in groups]
|
||||
|
||||
knowledge_with_files.append(
|
||||
KnowledgeUserResponse(
|
||||
filter["user_id"] = user.id
|
||||
|
||||
result = Knowledges.search_knowledge_bases(
|
||||
user.id, filter=filter, skip=skip, limit=limit
|
||||
)
|
||||
|
||||
return KnowledgeAccessListResponse(
|
||||
items=[
|
||||
KnowledgeAccessResponse(
|
||||
**knowledge_base.model_dump(),
|
||||
files=files,
|
||||
write_access=(
|
||||
user.id == knowledge_base.user_id
|
||||
or has_access(user.id, "write", knowledge_base.access_control)
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
return knowledge_with_files
|
||||
for knowledge_base in result.items
|
||||
],
|
||||
total=result.total,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/list", response_model=list[KnowledgeUserResponse])
|
||||
async def get_knowledge_list(user=Depends(get_verified_user)):
|
||||
knowledge_bases = []
|
||||
@router.get("/search", response_model=KnowledgeAccessListResponse)
|
||||
async def search_knowledge_bases(
|
||||
query: Optional[str] = None,
|
||||
view_option: Optional[str] = None,
|
||||
page: Optional[int] = 1,
|
||||
user=Depends(get_verified_user),
|
||||
):
|
||||
page = max(page, 1)
|
||||
limit = PAGE_ITEM_COUNT
|
||||
skip = (page - 1) * limit
|
||||
|
||||
if user.role == "admin" and BYPASS_ADMIN_ACCESS_CONTROL:
|
||||
knowledge_bases = Knowledges.get_knowledge_bases()
|
||||
else:
|
||||
knowledge_bases = Knowledges.get_knowledge_bases_by_user_id(user.id, "write")
|
||||
filter = {}
|
||||
if query:
|
||||
filter["query"] = query
|
||||
if view_option:
|
||||
filter["view_option"] = view_option
|
||||
|
||||
# Get files for each knowledge base
|
||||
knowledge_with_files = []
|
||||
for knowledge_base in knowledge_bases:
|
||||
files = []
|
||||
if knowledge_base.data:
|
||||
files = Files.get_file_metadatas_by_ids(
|
||||
knowledge_base.data.get("file_ids", [])
|
||||
)
|
||||
if not user.role == "admin" or not BYPASS_ADMIN_ACCESS_CONTROL:
|
||||
groups = Groups.get_groups_by_member_id(user.id)
|
||||
if groups:
|
||||
filter["group_ids"] = [group.id for group in groups]
|
||||
|
||||
# Check if all files exist
|
||||
if len(files) != len(knowledge_base.data.get("file_ids", [])):
|
||||
missing_files = list(
|
||||
set(knowledge_base.data.get("file_ids", []))
|
||||
- set([file.id for file in files])
|
||||
)
|
||||
if missing_files:
|
||||
data = knowledge_base.data or {}
|
||||
file_ids = data.get("file_ids", [])
|
||||
filter["user_id"] = user.id
|
||||
|
||||
for missing_file in missing_files:
|
||||
file_ids.remove(missing_file)
|
||||
result = Knowledges.search_knowledge_bases(
|
||||
user.id, filter=filter, skip=skip, limit=limit
|
||||
)
|
||||
|
||||
data["file_ids"] = file_ids
|
||||
Knowledges.update_knowledge_data_by_id(
|
||||
id=knowledge_base.id, data=data
|
||||
)
|
||||
|
||||
files = Files.get_file_metadatas_by_ids(file_ids)
|
||||
|
||||
knowledge_with_files.append(
|
||||
KnowledgeUserResponse(
|
||||
return KnowledgeAccessListResponse(
|
||||
items=[
|
||||
KnowledgeAccessResponse(
|
||||
**knowledge_base.model_dump(),
|
||||
files=files,
|
||||
write_access=(
|
||||
user.id == knowledge_base.user_id
|
||||
or has_access(user.id, "write", knowledge_base.access_control)
|
||||
),
|
||||
)
|
||||
)
|
||||
return knowledge_with_files
|
||||
for knowledge_base in result.items
|
||||
],
|
||||
total=result.total,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/search/files", response_model=KnowledgeFileListResponse)
|
||||
async def search_knowledge_files(
|
||||
query: Optional[str] = None,
|
||||
page: Optional[int] = 1,
|
||||
user=Depends(get_verified_user),
|
||||
):
|
||||
page = max(page, 1)
|
||||
limit = PAGE_ITEM_COUNT
|
||||
skip = (page - 1) * limit
|
||||
|
||||
filter = {}
|
||||
if query:
|
||||
filter["query"] = query
|
||||
|
||||
groups = Groups.get_groups_by_member_id(user.id)
|
||||
if groups:
|
||||
filter["group_ids"] = [group.id for group in groups]
|
||||
|
||||
filter["user_id"] = user.id
|
||||
|
||||
return Knowledges.search_knowledge_files(filter=filter, skip=skip, limit=limit)
|
||||
|
||||
|
||||
############################
|
||||
|
|
@ -192,26 +207,9 @@ async def reindex_knowledge_files(request: Request, user=Depends(get_verified_us
|
|||
|
||||
log.info(f"Starting reindexing for {len(knowledge_bases)} knowledge bases")
|
||||
|
||||
deleted_knowledge_bases = []
|
||||
|
||||
for knowledge_base in knowledge_bases:
|
||||
# -- Robust error handling for missing or invalid data
|
||||
if not knowledge_base.data or not isinstance(knowledge_base.data, dict):
|
||||
log.warning(
|
||||
f"Knowledge base {knowledge_base.id} has no data or invalid data ({knowledge_base.data!r}). Deleting."
|
||||
)
|
||||
try:
|
||||
Knowledges.delete_knowledge_by_id(id=knowledge_base.id)
|
||||
deleted_knowledge_bases.append(knowledge_base.id)
|
||||
except Exception as e:
|
||||
log.error(
|
||||
f"Failed to delete invalid knowledge base {knowledge_base.id}: {e}"
|
||||
)
|
||||
continue
|
||||
|
||||
try:
|
||||
file_ids = knowledge_base.data.get("file_ids", [])
|
||||
files = Files.get_files_by_ids(file_ids)
|
||||
files = Knowledges.get_files_by_id(knowledge_base.id)
|
||||
try:
|
||||
if VECTOR_DB_CLIENT.has_collection(collection_name=knowledge_base.id):
|
||||
VECTOR_DB_CLIENT.delete_collection(
|
||||
|
|
@ -251,9 +249,7 @@ async def reindex_knowledge_files(request: Request, user=Depends(get_verified_us
|
|||
for failed in failed_files:
|
||||
log.warning(f"File ID: {failed['file_id']}, Error: {failed['error']}")
|
||||
|
||||
log.info(
|
||||
f"Reindexing completed. Deleted {len(deleted_knowledge_bases)} invalid knowledge bases: {deleted_knowledge_bases}"
|
||||
)
|
||||
log.info(f"Reindexing completed.")
|
||||
return True
|
||||
|
||||
|
||||
|
|
@ -263,7 +259,8 @@ async def reindex_knowledge_files(request: Request, user=Depends(get_verified_us
|
|||
|
||||
|
||||
class KnowledgeFilesResponse(KnowledgeResponse):
|
||||
files: list[FileMetadataResponse]
|
||||
files: Optional[list[FileMetadataResponse]] = None
|
||||
write_access: Optional[bool] = False
|
||||
|
||||
|
||||
@router.get("/{id}", response_model=Optional[KnowledgeFilesResponse])
|
||||
|
|
@ -271,19 +268,18 @@ async def get_knowledge_by_id(id: str, user=Depends(get_verified_user)):
|
|||
knowledge = Knowledges.get_knowledge_by_id(id=id)
|
||||
|
||||
if knowledge:
|
||||
|
||||
if (
|
||||
user.role == "admin"
|
||||
or knowledge.user_id == user.id
|
||||
or has_access(user.id, "read", knowledge.access_control)
|
||||
):
|
||||
|
||||
file_ids = knowledge.data.get("file_ids", []) if knowledge.data else []
|
||||
files = Files.get_file_metadatas_by_ids(file_ids)
|
||||
|
||||
return KnowledgeFilesResponse(
|
||||
**knowledge.model_dump(),
|
||||
files=files,
|
||||
write_access=(
|
||||
user.id == knowledge.user_id
|
||||
or has_access(user.id, "write", knowledge.access_control)
|
||||
),
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
|
|
@ -335,12 +331,9 @@ async def update_knowledge_by_id(
|
|||
|
||||
knowledge = Knowledges.update_knowledge_by_id(id=id, form_data=form_data)
|
||||
if knowledge:
|
||||
file_ids = knowledge.data.get("file_ids", []) if knowledge.data else []
|
||||
files = Files.get_file_metadatas_by_ids(file_ids)
|
||||
|
||||
return KnowledgeFilesResponse(
|
||||
**knowledge.model_dump(),
|
||||
files=files,
|
||||
files=Knowledges.get_file_metadatas_by_id(knowledge.id),
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
|
|
@ -349,6 +342,59 @@ async def update_knowledge_by_id(
|
|||
)
|
||||
|
||||
|
||||
############################
|
||||
# GetKnowledgeFilesById
|
||||
############################
|
||||
|
||||
|
||||
@router.get("/{id}/files", response_model=KnowledgeFileListResponse)
|
||||
async def get_knowledge_files_by_id(
|
||||
id: str,
|
||||
query: Optional[str] = None,
|
||||
view_option: Optional[str] = None,
|
||||
order_by: Optional[str] = None,
|
||||
direction: Optional[str] = None,
|
||||
page: Optional[int] = 1,
|
||||
user=Depends(get_verified_user),
|
||||
):
|
||||
|
||||
knowledge = Knowledges.get_knowledge_by_id(id=id)
|
||||
if not knowledge:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.NOT_FOUND,
|
||||
)
|
||||
|
||||
if not (
|
||||
user.role == "admin"
|
||||
or knowledge.user_id == user.id
|
||||
or has_access(user.id, "read", knowledge.access_control)
|
||||
):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.ACCESS_PROHIBITED,
|
||||
)
|
||||
|
||||
page = max(page, 1)
|
||||
|
||||
limit = 30
|
||||
skip = (page - 1) * limit
|
||||
|
||||
filter = {}
|
||||
if query:
|
||||
filter["query"] = query
|
||||
if view_option:
|
||||
filter["view_option"] = view_option
|
||||
if order_by:
|
||||
filter["order_by"] = order_by
|
||||
if direction:
|
||||
filter["direction"] = direction
|
||||
|
||||
return Knowledges.search_files_by_id(
|
||||
id, user.id, filter=filter, skip=skip, limit=limit
|
||||
)
|
||||
|
||||
|
||||
############################
|
||||
# AddFileToKnowledge
|
||||
############################
|
||||
|
|
@ -366,7 +412,6 @@ def add_file_to_knowledge_by_id(
|
|||
user=Depends(get_verified_user),
|
||||
):
|
||||
knowledge = Knowledges.get_knowledge_by_id(id=id)
|
||||
|
||||
if not knowledge:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
|
|
@ -402,6 +447,11 @@ def add_file_to_knowledge_by_id(
|
|||
ProcessFileForm(file_id=form_data.file_id, collection_name=id),
|
||||
user=user,
|
||||
)
|
||||
|
||||
# Add file to knowledge base
|
||||
Knowledges.add_file_to_knowledge_by_id(
|
||||
knowledge_id=id, file_id=form_data.file_id, user_id=user.id
|
||||
)
|
||||
except Exception as e:
|
||||
log.debug(e)
|
||||
raise HTTPException(
|
||||
|
|
@ -410,32 +460,10 @@ def add_file_to_knowledge_by_id(
|
|||
)
|
||||
|
||||
if knowledge:
|
||||
data = knowledge.data or {}
|
||||
file_ids = data.get("file_ids", [])
|
||||
|
||||
if form_data.file_id not in file_ids:
|
||||
file_ids.append(form_data.file_id)
|
||||
data["file_ids"] = file_ids
|
||||
|
||||
knowledge = Knowledges.update_knowledge_data_by_id(id=id, data=data)
|
||||
|
||||
if knowledge:
|
||||
files = Files.get_file_metadatas_by_ids(file_ids)
|
||||
|
||||
return KnowledgeFilesResponse(
|
||||
**knowledge.model_dump(),
|
||||
files=files,
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.DEFAULT("knowledge"),
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.DEFAULT("file_id"),
|
||||
)
|
||||
return KnowledgeFilesResponse(
|
||||
**knowledge.model_dump(),
|
||||
files=Knowledges.get_file_metadatas_by_id(knowledge.id),
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
|
|
@ -494,14 +522,9 @@ def update_file_from_knowledge_by_id(
|
|||
)
|
||||
|
||||
if knowledge:
|
||||
data = knowledge.data or {}
|
||||
file_ids = data.get("file_ids", [])
|
||||
|
||||
files = Files.get_file_metadatas_by_ids(file_ids)
|
||||
|
||||
return KnowledgeFilesResponse(
|
||||
**knowledge.model_dump(),
|
||||
files=files,
|
||||
files=Knowledges.get_file_metadatas_by_id(knowledge.id),
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
|
|
@ -546,11 +569,19 @@ def remove_file_from_knowledge_by_id(
|
|||
detail=ERROR_MESSAGES.NOT_FOUND,
|
||||
)
|
||||
|
||||
Knowledges.remove_file_from_knowledge_by_id(
|
||||
knowledge_id=id, file_id=form_data.file_id
|
||||
)
|
||||
|
||||
# Remove content from the vector database
|
||||
try:
|
||||
VECTOR_DB_CLIENT.delete(
|
||||
collection_name=knowledge.id, filter={"file_id": form_data.file_id}
|
||||
)
|
||||
) # Remove by file_id first
|
||||
|
||||
VECTOR_DB_CLIENT.delete(
|
||||
collection_name=knowledge.id, filter={"hash": file.hash}
|
||||
) # Remove by hash as well in case of duplicates
|
||||
except Exception as e:
|
||||
log.debug("This was most likely caused by bypassing embedding processing")
|
||||
log.debug(e)
|
||||
|
|
@ -571,32 +602,10 @@ def remove_file_from_knowledge_by_id(
|
|||
Files.delete_file_by_id(form_data.file_id)
|
||||
|
||||
if knowledge:
|
||||
data = knowledge.data or {}
|
||||
file_ids = data.get("file_ids", [])
|
||||
|
||||
if form_data.file_id in file_ids:
|
||||
file_ids.remove(form_data.file_id)
|
||||
data["file_ids"] = file_ids
|
||||
|
||||
knowledge = Knowledges.update_knowledge_data_by_id(id=id, data=data)
|
||||
|
||||
if knowledge:
|
||||
files = Files.get_file_metadatas_by_ids(file_ids)
|
||||
|
||||
return KnowledgeFilesResponse(
|
||||
**knowledge.model_dump(),
|
||||
files=files,
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.DEFAULT("knowledge"),
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=ERROR_MESSAGES.DEFAULT("file_id"),
|
||||
)
|
||||
return KnowledgeFilesResponse(
|
||||
**knowledge.model_dump(),
|
||||
files=Knowledges.get_file_metadatas_by_id(knowledge.id),
|
||||
)
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
|
|
@ -697,8 +706,7 @@ async def reset_knowledge_by_id(id: str, user=Depends(get_verified_user)):
|
|||
log.debug(e)
|
||||
pass
|
||||
|
||||
knowledge = Knowledges.update_knowledge_data_by_id(id=id, data={"file_ids": []})
|
||||
|
||||
knowledge = Knowledges.reset_knowledge_by_id(id=id)
|
||||
return knowledge
|
||||
|
||||
|
||||
|
|
@ -708,7 +716,7 @@ async def reset_knowledge_by_id(id: str, user=Depends(get_verified_user)):
|
|||
|
||||
|
||||
@router.post("/{id}/files/batch/add", response_model=Optional[KnowledgeFilesResponse])
|
||||
def add_files_to_knowledge_batch(
|
||||
async def add_files_to_knowledge_batch(
|
||||
request: Request,
|
||||
id: str,
|
||||
form_data: list[KnowledgeFileIdForm],
|
||||
|
|
@ -748,7 +756,7 @@ def add_files_to_knowledge_batch(
|
|||
|
||||
# Process files
|
||||
try:
|
||||
result = process_files_batch(
|
||||
result = await process_files_batch(
|
||||
request=request,
|
||||
form_data=BatchProcessFilesForm(files=files, collection_name=id),
|
||||
user=user,
|
||||
|
|
@ -759,25 +767,19 @@ def add_files_to_knowledge_batch(
|
|||
)
|
||||
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e))
|
||||
|
||||
# Add successful files to knowledge base
|
||||
data = knowledge.data or {}
|
||||
existing_file_ids = data.get("file_ids", [])
|
||||
|
||||
# Only add files that were successfully processed
|
||||
successful_file_ids = [r.file_id for r in result.results if r.status == "completed"]
|
||||
for file_id in successful_file_ids:
|
||||
if file_id not in existing_file_ids:
|
||||
existing_file_ids.append(file_id)
|
||||
|
||||
data["file_ids"] = existing_file_ids
|
||||
knowledge = Knowledges.update_knowledge_data_by_id(id=id, data=data)
|
||||
Knowledges.add_file_to_knowledge_by_id(
|
||||
knowledge_id=id, file_id=file_id, user_id=user.id
|
||||
)
|
||||
|
||||
# If there were any errors, include them in the response
|
||||
if result.errors:
|
||||
error_details = [f"{err.file_id}: {err.error}" for err in result.errors]
|
||||
return KnowledgeFilesResponse(
|
||||
**knowledge.model_dump(),
|
||||
files=Files.get_file_metadatas_by_ids(existing_file_ids),
|
||||
files=Knowledges.get_file_metadatas_by_id(knowledge.id),
|
||||
warnings={
|
||||
"message": "Some files failed to process",
|
||||
"errors": error_details,
|
||||
|
|
@ -786,5 +788,5 @@ def add_files_to_knowledge_batch(
|
|||
|
||||
return KnowledgeFilesResponse(
|
||||
**knowledge.model_dump(),
|
||||
files=Files.get_file_metadatas_by_ids(existing_file_ids),
|
||||
files=Knowledges.get_file_metadatas_by_id(knowledge.id),
|
||||
)
|
||||
|
|
|
|||
|
|
@ -7,11 +7,9 @@ from typing import Optional
|
|||
from open_webui.models.memories import Memories, MemoryModel
|
||||
from open_webui.retrieval.vector.factory import VECTOR_DB_CLIENT
|
||||
from open_webui.utils.auth import get_verified_user
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import json
|
|||
import asyncio
|
||||
import logging
|
||||
|
||||
from open_webui.models.groups import Groups
|
||||
from open_webui.models.models import (
|
||||
ModelForm,
|
||||
ModelModel,
|
||||
|
|
@ -78,6 +79,10 @@ async def get_models(
|
|||
filter["direction"] = direction
|
||||
|
||||
if not user.role == "admin" or not BYPASS_ADMIN_ACCESS_CONTROL:
|
||||
groups = Groups.get_groups_by_member_id(user.id)
|
||||
if groups:
|
||||
filter["group_ids"] = [group.id for group in groups]
|
||||
|
||||
filter["user_id"] = user.id
|
||||
|
||||
return Models.search_models(user.id, filter=filter, skip=skip, limit=limit)
|
||||
|
|
@ -286,12 +291,15 @@ async def get_model_by_id(id: str, user=Depends(get_verified_user)):
|
|||
@router.get("/model/profile/image")
|
||||
async def get_model_profile_image(id: str, user=Depends(get_verified_user)):
|
||||
model = Models.get_model_by_id(id)
|
||||
# Cache-control headers to prevent stale cached images
|
||||
cache_headers = {"Cache-Control": "no-cache, must-revalidate"}
|
||||
|
||||
if model:
|
||||
if model.meta.profile_image_url:
|
||||
if model.meta.profile_image_url.startswith("http"):
|
||||
return Response(
|
||||
status_code=status.HTTP_302_FOUND,
|
||||
headers={"Location": model.meta.profile_image_url},
|
||||
headers={"Location": model.meta.profile_image_url, **cache_headers},
|
||||
)
|
||||
elif model.meta.profile_image_url.startswith("data:image"):
|
||||
try:
|
||||
|
|
@ -302,14 +310,17 @@ async def get_model_profile_image(id: str, user=Depends(get_verified_user)):
|
|||
return StreamingResponse(
|
||||
image_buffer,
|
||||
media_type="image/png",
|
||||
headers={"Content-Disposition": "inline; filename=image.png"},
|
||||
headers={
|
||||
"Content-Disposition": "inline; filename=image.png",
|
||||
**cache_headers,
|
||||
},
|
||||
)
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
return FileResponse(f"{STATIC_DIR}/favicon.png")
|
||||
return FileResponse(f"{STATIC_DIR}/favicon.png", headers=cache_headers)
|
||||
else:
|
||||
return FileResponse(f"{STATIC_DIR}/favicon.png")
|
||||
return FileResponse(f"{STATIC_DIR}/favicon.png", headers=cache_headers)
|
||||
|
||||
|
||||
############################
|
||||
|
|
|
|||
|
|
@ -8,20 +8,28 @@ from pydantic import BaseModel
|
|||
|
||||
from open_webui.socket.main import sio
|
||||
|
||||
|
||||
from open_webui.models.groups import Groups
|
||||
from open_webui.models.users import Users, UserResponse
|
||||
from open_webui.models.notes import Notes, NoteModel, NoteForm, NoteUserResponse
|
||||
from open_webui.models.notes import (
|
||||
NoteListResponse,
|
||||
Notes,
|
||||
NoteModel,
|
||||
NoteForm,
|
||||
NoteUserResponse,
|
||||
)
|
||||
|
||||
from open_webui.config import ENABLE_ADMIN_CHAT_ACCESS, ENABLE_ADMIN_EXPORT
|
||||
from open_webui.config import (
|
||||
BYPASS_ADMIN_ACCESS_CONTROL,
|
||||
ENABLE_ADMIN_CHAT_ACCESS,
|
||||
ENABLE_ADMIN_EXPORT,
|
||||
)
|
||||
from open_webui.constants import ERROR_MESSAGES
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
|
||||
from open_webui.utils.auth import get_admin_user, get_verified_user
|
||||
from open_webui.utils.access_control import has_access, has_permission
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MODELS"])
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
|
@ -30,39 +38,17 @@ router = APIRouter()
|
|||
############################
|
||||
|
||||
|
||||
@router.get("/", response_model=list[NoteUserResponse])
|
||||
async def get_notes(request: Request, user=Depends(get_verified_user)):
|
||||
|
||||
if user.role != "admin" and not has_permission(
|
||||
user.id, "features.notes", request.app.state.config.USER_PERMISSIONS
|
||||
):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.UNAUTHORIZED,
|
||||
)
|
||||
|
||||
notes = [
|
||||
NoteUserResponse(
|
||||
**{
|
||||
**note.model_dump(),
|
||||
"user": UserResponse(**Users.get_user_by_id(note.user_id).model_dump()),
|
||||
}
|
||||
)
|
||||
for note in Notes.get_notes_by_permission(user.id, "write")
|
||||
]
|
||||
|
||||
return notes
|
||||
|
||||
|
||||
class NoteTitleIdResponse(BaseModel):
|
||||
class NoteItemResponse(BaseModel):
|
||||
id: str
|
||||
title: str
|
||||
data: Optional[dict]
|
||||
updated_at: int
|
||||
created_at: int
|
||||
user: Optional[UserResponse] = None
|
||||
|
||||
|
||||
@router.get("/list", response_model=list[NoteTitleIdResponse])
|
||||
async def get_note_list(
|
||||
@router.get("/", response_model=list[NoteItemResponse])
|
||||
async def get_notes(
|
||||
request: Request, page: Optional[int] = None, user=Depends(get_verified_user)
|
||||
):
|
||||
if user.role != "admin" and not has_permission(
|
||||
|
|
@ -80,15 +66,64 @@ async def get_note_list(
|
|||
skip = (page - 1) * limit
|
||||
|
||||
notes = [
|
||||
NoteTitleIdResponse(**note.model_dump())
|
||||
for note in Notes.get_notes_by_permission(
|
||||
user.id, "write", skip=skip, limit=limit
|
||||
NoteUserResponse(
|
||||
**{
|
||||
**note.model_dump(),
|
||||
"user": UserResponse(**Users.get_user_by_id(note.user_id).model_dump()),
|
||||
}
|
||||
)
|
||||
for note in Notes.get_notes_by_user_id(user.id, "read", skip=skip, limit=limit)
|
||||
]
|
||||
|
||||
return notes
|
||||
|
||||
|
||||
@router.get("/search", response_model=NoteListResponse)
|
||||
async def search_notes(
|
||||
request: Request,
|
||||
query: Optional[str] = None,
|
||||
view_option: Optional[str] = None,
|
||||
permission: Optional[str] = None,
|
||||
order_by: Optional[str] = None,
|
||||
direction: Optional[str] = None,
|
||||
page: Optional[int] = 1,
|
||||
user=Depends(get_verified_user),
|
||||
):
|
||||
if user.role != "admin" and not has_permission(
|
||||
user.id, "features.notes", request.app.state.config.USER_PERMISSIONS
|
||||
):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail=ERROR_MESSAGES.UNAUTHORIZED,
|
||||
)
|
||||
|
||||
limit = None
|
||||
skip = None
|
||||
if page is not None:
|
||||
limit = 60
|
||||
skip = (page - 1) * limit
|
||||
|
||||
filter = {}
|
||||
if query:
|
||||
filter["query"] = query
|
||||
if view_option:
|
||||
filter["view_option"] = view_option
|
||||
if permission:
|
||||
filter["permission"] = permission
|
||||
if order_by:
|
||||
filter["order_by"] = order_by
|
||||
if direction:
|
||||
filter["direction"] = direction
|
||||
|
||||
if not user.role == "admin" or not BYPASS_ADMIN_ACCESS_CONTROL:
|
||||
groups = Groups.get_groups_by_member_id(user.id)
|
||||
if groups:
|
||||
filter["group_ids"] = [group.id for group in groups]
|
||||
|
||||
filter["user_id"] = user.id
|
||||
|
||||
return Notes.search_notes(user.id, filter, skip=skip, limit=limit)
|
||||
|
||||
|
||||
############################
|
||||
# CreateNewNote
|
||||
############################
|
||||
|
|
@ -98,7 +133,6 @@ async def get_note_list(
|
|||
async def create_new_note(
|
||||
request: Request, form_data: NoteForm, user=Depends(get_verified_user)
|
||||
):
|
||||
|
||||
if user.role != "admin" and not has_permission(
|
||||
user.id, "features.notes", request.app.state.config.USER_PERMISSIONS
|
||||
):
|
||||
|
|
@ -122,7 +156,11 @@ async def create_new_note(
|
|||
############################
|
||||
|
||||
|
||||
@router.get("/{id}", response_model=Optional[NoteModel])
|
||||
class NoteResponse(NoteModel):
|
||||
write_access: bool = False
|
||||
|
||||
|
||||
@router.get("/{id}", response_model=Optional[NoteResponse])
|
||||
async def get_note_by_id(request: Request, id: str, user=Depends(get_verified_user)):
|
||||
if user.role != "admin" and not has_permission(
|
||||
user.id, "features.notes", request.app.state.config.USER_PERMISSIONS
|
||||
|
|
@ -146,7 +184,15 @@ async def get_note_by_id(request: Request, id: str, user=Depends(get_verified_us
|
|||
status_code=status.HTTP_403_FORBIDDEN, detail=ERROR_MESSAGES.DEFAULT()
|
||||
)
|
||||
|
||||
return note
|
||||
write_access = (
|
||||
user.role == "admin"
|
||||
or (user.id == note.user_id)
|
||||
or has_access(
|
||||
user.id, type="write", access_control=note.access_control, strict=False
|
||||
)
|
||||
)
|
||||
|
||||
return NoteResponse(**note.model_dump(), write_access=write_access)
|
||||
|
||||
|
||||
############################
|
||||
|
|
|
|||
|
|
@ -58,7 +58,6 @@ from open_webui.config import (
|
|||
)
|
||||
from open_webui.env import (
|
||||
ENV,
|
||||
SRC_LOG_LEVELS,
|
||||
MODELS_CACHE_TTL,
|
||||
AIOHTTP_CLIENT_SESSION_SSL,
|
||||
AIOHTTP_CLIENT_TIMEOUT,
|
||||
|
|
@ -68,7 +67,6 @@ from open_webui.env import (
|
|||
from open_webui.constants import ERROR_MESSAGES
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["OLLAMA"])
|
||||
|
||||
|
||||
##########################################
|
||||
|
|
@ -879,6 +877,7 @@ async def delete_model(
|
|||
url = request.app.state.config.OLLAMA_BASE_URLS[url_idx]
|
||||
key = get_api_key(url_idx, url, request.app.state.config.OLLAMA_API_CONFIGS)
|
||||
|
||||
r = None
|
||||
try:
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
|
|
@ -892,7 +891,7 @@ async def delete_model(
|
|||
method="DELETE",
|
||||
url=f"{url}/api/delete",
|
||||
headers=headers,
|
||||
data=form_data.model_dump_json(exclude_none=True).encode(),
|
||||
json=form_data,
|
||||
)
|
||||
r.raise_for_status()
|
||||
|
||||
|
|
@ -949,10 +948,7 @@ async def show_model_info(
|
|||
headers = include_user_info_headers(headers, user)
|
||||
|
||||
r = requests.request(
|
||||
method="POST",
|
||||
url=f"{url}/api/show",
|
||||
headers=headers,
|
||||
data=form_data.model_dump_json(exclude_none=True).encode(),
|
||||
method="POST", url=f"{url}/api/show", headers=headers, json=form_data
|
||||
)
|
||||
r.raise_for_status()
|
||||
|
||||
|
|
@ -1282,7 +1278,12 @@ async def generate_chat_completion(
|
|||
|
||||
if model_info:
|
||||
if model_info.base_model_id:
|
||||
payload["model"] = model_info.base_model_id
|
||||
base_model_id = (
|
||||
request.base_model_id
|
||||
if hasattr(request, "base_model_id")
|
||||
else model_info.base_model_id
|
||||
) # Use request's base_model_id if available
|
||||
payload["model"] = base_model_id
|
||||
|
||||
params = model_info.params.model_dump()
|
||||
|
||||
|
|
|
|||
|
|
@ -35,7 +35,6 @@ from open_webui.env import (
|
|||
from open_webui.models.users import UserModel
|
||||
|
||||
from open_webui.constants import ERROR_MESSAGES
|
||||
from open_webui.env import SRC_LOG_LEVELS
|
||||
|
||||
|
||||
from open_webui.utils.payload import (
|
||||
|
|
@ -53,7 +52,6 @@ from open_webui.utils.headers import include_user_info_headers
|
|||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["OPENAI"])
|
||||
|
||||
|
||||
##########################################
|
||||
|
|
@ -814,8 +812,13 @@ async def generate_chat_completion(
|
|||
# Check model info and override the payload
|
||||
if model_info:
|
||||
if model_info.base_model_id:
|
||||
payload["model"] = model_info.base_model_id
|
||||
model_id = model_info.base_model_id
|
||||
base_model_id = (
|
||||
request.base_model_id
|
||||
if hasattr(request, "base_model_id")
|
||||
else model_info.base_model_id
|
||||
) # Use request's base_model_id if available
|
||||
payload["model"] = base_model_id
|
||||
model_id = base_model_id
|
||||
|
||||
params = model_info.params.model_dump()
|
||||
|
||||
|
|
@ -891,10 +894,11 @@ async def generate_chat_completion(
|
|||
del payload["max_tokens"]
|
||||
|
||||
# Convert the modified body back to JSON
|
||||
if "logit_bias" in payload:
|
||||
payload["logit_bias"] = json.loads(
|
||||
convert_logit_bias_input_to_json(payload["logit_bias"])
|
||||
)
|
||||
if "logit_bias" in payload and payload["logit_bias"]:
|
||||
logit_bias = convert_logit_bias_input_to_json(payload["logit_bias"])
|
||||
|
||||
if logit_bias:
|
||||
payload["logit_bias"] = json.loads(logit_bias)
|
||||
|
||||
headers, cookies = await get_headers_and_cookies(
|
||||
request, url, key, api_config, metadata, user=user
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ from pydantic import BaseModel
|
|||
from starlette.responses import FileResponse
|
||||
from typing import Optional
|
||||
|
||||
from open_webui.env import SRC_LOG_LEVELS, AIOHTTP_CLIENT_SESSION_SSL
|
||||
from open_webui.env import AIOHTTP_CLIENT_SESSION_SSL
|
||||
from open_webui.config import CACHE_DIR
|
||||
from open_webui.constants import ERROR_MESSAGES
|
||||
|
||||
|
|
@ -28,7 +28,6 @@ from open_webui.routers.openai import get_all_models_responses
|
|||
from open_webui.utils.auth import get_admin_user
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log.setLevel(SRC_LOG_LEVELS["MAIN"])
|
||||
|
||||
|
||||
##################################
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue