2024-12-19 09:00:32 +00:00
|
|
|
# tasks.py
|
|
|
|
|
import asyncio
|
|
|
|
|
from typing import Dict
|
|
|
|
|
from uuid import uuid4
|
2025-06-08 16:58:31 +00:00
|
|
|
import json
|
2025-07-02 23:25:39 +00:00
|
|
|
import logging
|
2025-06-09 13:21:10 +00:00
|
|
|
from redis.asyncio import Redis
|
2025-06-08 17:20:30 +00:00
|
|
|
from fastapi import Request
|
|
|
|
|
from typing import Dict, List, Optional
|
2025-08-19 17:10:15 +00:00
|
|
|
from builtins import ExceptionGroup
|
2024-12-19 09:00:32 +00:00
|
|
|
|
2025-07-22 17:40:29 +00:00
|
|
|
from open_webui.env import SRC_LOG_LEVELS, REDIS_KEY_PREFIX
|
2025-07-02 23:25:39 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
log = logging.getLogger(__name__)
|
|
|
|
|
log.setLevel(SRC_LOG_LEVELS["MAIN"])
|
|
|
|
|
|
2024-12-19 09:00:32 +00:00
|
|
|
# A dictionary to keep track of active tasks
|
|
|
|
|
tasks: Dict[str, asyncio.Task] = {}
|
2025-07-11 14:14:48 +00:00
|
|
|
item_tasks = {}
|
2024-12-19 09:00:32 +00:00
|
|
|
|
|
|
|
|
|
2025-07-22 17:40:29 +00:00
|
|
|
REDIS_TASKS_KEY = f"{REDIS_KEY_PREFIX}:tasks"
|
|
|
|
|
REDIS_ITEM_TASKS_KEY = f"{REDIS_KEY_PREFIX}:tasks:item"
|
|
|
|
|
REDIS_PUBSUB_CHANNEL = f"{REDIS_KEY_PREFIX}:tasks:commands"
|
2025-06-08 17:20:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
async def redis_task_command_listener(app):
|
|
|
|
|
redis: Redis = app.state.redis
|
|
|
|
|
pubsub = redis.pubsub()
|
|
|
|
|
await pubsub.subscribe(REDIS_PUBSUB_CHANNEL)
|
|
|
|
|
|
|
|
|
|
async for message in pubsub.listen():
|
|
|
|
|
if message["type"] != "message":
|
|
|
|
|
continue
|
|
|
|
|
try:
|
2025-08-19 17:10:15 +00:00
|
|
|
# Check if message data is empty or None
|
|
|
|
|
if not message["data"]:
|
|
|
|
|
log.warning("Received empty message data from Redis pub/sub")
|
|
|
|
|
continue
|
|
|
|
|
|
|
|
|
|
# Attempt to parse JSON
|
|
|
|
|
try:
|
|
|
|
|
command = json.loads(message["data"])
|
|
|
|
|
except json.JSONDecodeError as json_error:
|
2025-08-19 18:12:01 +00:00
|
|
|
log.warning(
|
|
|
|
|
f"Invalid JSON in Redis message: {message['data'][:100]}... Error: {json_error}"
|
|
|
|
|
)
|
2025-08-19 17:10:15 +00:00
|
|
|
continue
|
|
|
|
|
|
2025-06-08 17:20:30 +00:00
|
|
|
if command.get("action") == "stop":
|
|
|
|
|
task_id = command.get("task_id")
|
|
|
|
|
local_task = tasks.get(task_id)
|
|
|
|
|
if local_task:
|
2025-08-19 17:10:15 +00:00
|
|
|
try:
|
|
|
|
|
local_task.cancel()
|
|
|
|
|
# Wait briefly for cancellation to complete
|
|
|
|
|
await asyncio.sleep(0.1)
|
|
|
|
|
except Exception as cancel_error:
|
|
|
|
|
log.error(f"Error cancelling task {task_id}: {cancel_error}")
|
|
|
|
|
except ExceptionGroup as eg:
|
|
|
|
|
# Handle multiple concurrent exceptions
|
2025-08-19 18:12:01 +00:00
|
|
|
log.error(
|
|
|
|
|
f"Multiple errors in task command processing: {len(eg.exceptions)} exceptions"
|
|
|
|
|
)
|
2025-08-19 17:10:15 +00:00
|
|
|
for i, exc in enumerate(eg.exceptions):
|
|
|
|
|
log.error(f" Exception {i+1}: {type(exc).__name__}: {exc}")
|
2025-06-08 17:20:30 +00:00
|
|
|
except Exception as e:
|
2025-07-02 23:25:39 +00:00
|
|
|
log.exception(f"Error handling distributed task command: {e}")
|
2025-06-08 17:20:30 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
### ------------------------------
|
|
|
|
|
### REDIS-ENABLED HANDLERS
|
|
|
|
|
### ------------------------------
|
|
|
|
|
|
|
|
|
|
|
2025-07-11 14:14:48 +00:00
|
|
|
async def redis_save_task(redis: Redis, task_id: str, item_id: Optional[str]):
|
2025-06-08 17:20:30 +00:00
|
|
|
pipe = redis.pipeline()
|
2025-07-11 14:14:48 +00:00
|
|
|
pipe.hset(REDIS_TASKS_KEY, task_id, item_id or "")
|
|
|
|
|
if item_id:
|
|
|
|
|
pipe.sadd(f"{REDIS_ITEM_TASKS_KEY}:{item_id}", task_id)
|
2025-06-09 13:21:10 +00:00
|
|
|
await pipe.execute()
|
2025-06-08 17:20:30 +00:00
|
|
|
|
|
|
|
|
|
2025-07-11 14:14:48 +00:00
|
|
|
async def redis_cleanup_task(redis: Redis, task_id: str, item_id: Optional[str]):
|
2025-06-08 17:20:30 +00:00
|
|
|
pipe = redis.pipeline()
|
|
|
|
|
pipe.hdel(REDIS_TASKS_KEY, task_id)
|
2025-07-11 14:14:48 +00:00
|
|
|
if item_id:
|
|
|
|
|
pipe.srem(f"{REDIS_ITEM_TASKS_KEY}:{item_id}", task_id)
|
|
|
|
|
if (await pipe.scard(f"{REDIS_ITEM_TASKS_KEY}:{item_id}").execute())[-1] == 0:
|
|
|
|
|
pipe.delete(f"{REDIS_ITEM_TASKS_KEY}:{item_id}") # Remove if empty set
|
2025-06-09 13:21:10 +00:00
|
|
|
await pipe.execute()
|
2025-06-08 17:20:30 +00:00
|
|
|
|
|
|
|
|
|
2025-06-09 13:21:10 +00:00
|
|
|
async def redis_list_tasks(redis: Redis) -> List[str]:
|
|
|
|
|
return list(await redis.hkeys(REDIS_TASKS_KEY))
|
2025-06-08 17:20:30 +00:00
|
|
|
|
|
|
|
|
|
2025-07-11 14:14:48 +00:00
|
|
|
async def redis_list_item_tasks(redis: Redis, item_id: str) -> List[str]:
|
|
|
|
|
return list(await redis.smembers(f"{REDIS_ITEM_TASKS_KEY}:{item_id}"))
|
2025-06-08 17:20:30 +00:00
|
|
|
|
|
|
|
|
|
2025-06-09 13:21:10 +00:00
|
|
|
async def redis_send_command(redis: Redis, command: dict):
|
|
|
|
|
await redis.publish(REDIS_PUBSUB_CHANNEL, json.dumps(command))
|
2025-06-08 17:20:30 +00:00
|
|
|
|
|
|
|
|
|
2025-07-11 13:53:53 +00:00
|
|
|
async def cleanup_task(redis, task_id: str, id=None):
|
2024-12-19 09:00:32 +00:00
|
|
|
"""
|
2025-08-19 17:10:15 +00:00
|
|
|
Remove a completed or canceled task with proper exception handling.
|
2024-12-19 09:00:32 +00:00
|
|
|
"""
|
2025-08-19 17:10:15 +00:00
|
|
|
cleanup_errors = []
|
2024-12-19 09:00:32 +00:00
|
|
|
|
2025-08-19 17:10:15 +00:00
|
|
|
# Redis cleanup
|
|
|
|
|
if redis:
|
|
|
|
|
try:
|
|
|
|
|
await redis_cleanup_task(redis, task_id, id)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
cleanup_errors.append(e)
|
|
|
|
|
log.error(f"Redis cleanup failed for task {task_id}: {e}")
|
|
|
|
|
|
|
|
|
|
# Local cleanup
|
|
|
|
|
try:
|
|
|
|
|
tasks.pop(task_id, None)
|
|
|
|
|
if id and task_id in item_tasks.get(id, []):
|
|
|
|
|
item_tasks[id].remove(task_id)
|
|
|
|
|
if not item_tasks[id]:
|
|
|
|
|
item_tasks.pop(id, None)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
cleanup_errors.append(e)
|
|
|
|
|
log.error(f"Local cleanup failed for task {task_id}: {e}")
|
|
|
|
|
|
|
|
|
|
# If multiple errors occurred, group them
|
|
|
|
|
if len(cleanup_errors) > 1 and ExceptionGroup:
|
2025-08-19 18:12:01 +00:00
|
|
|
raise ExceptionGroup(
|
|
|
|
|
f"Multiple cleanup errors for task {task_id}", cleanup_errors
|
|
|
|
|
)
|
2025-08-19 17:10:15 +00:00
|
|
|
elif cleanup_errors:
|
|
|
|
|
raise cleanup_errors[0]
|
2024-12-19 09:00:32 +00:00
|
|
|
|
2025-04-13 03:51:02 +00:00
|
|
|
|
2025-07-11 13:53:53 +00:00
|
|
|
async def create_task(redis, coroutine, id=None):
|
2024-12-19 09:00:32 +00:00
|
|
|
"""
|
|
|
|
|
Create a new asyncio task and add it to the global task dictionary.
|
|
|
|
|
"""
|
|
|
|
|
task_id = str(uuid4()) # Generate a unique ID for the task
|
|
|
|
|
task = asyncio.create_task(coroutine) # Create the task
|
|
|
|
|
|
|
|
|
|
# Add a done callback for cleanup
|
2025-06-09 13:21:10 +00:00
|
|
|
task.add_done_callback(
|
2025-07-11 13:53:53 +00:00
|
|
|
lambda t: asyncio.create_task(cleanup_task(redis, task_id, id))
|
2025-06-09 13:21:10 +00:00
|
|
|
)
|
2024-12-19 09:00:32 +00:00
|
|
|
tasks[task_id] = task
|
2025-04-13 03:51:02 +00:00
|
|
|
|
|
|
|
|
# If an ID is provided, associate the task with that ID
|
2025-07-11 14:14:48 +00:00
|
|
|
if item_tasks.get(id):
|
|
|
|
|
item_tasks[id].append(task_id)
|
2025-04-13 03:51:02 +00:00
|
|
|
else:
|
2025-07-11 14:14:48 +00:00
|
|
|
item_tasks[id] = [task_id]
|
2025-04-13 03:51:02 +00:00
|
|
|
|
2025-07-11 13:53:53 +00:00
|
|
|
if redis:
|
|
|
|
|
await redis_save_task(redis, task_id, id)
|
2025-06-08 17:20:30 +00:00
|
|
|
|
2024-12-19 09:00:32 +00:00
|
|
|
return task_id, task
|
|
|
|
|
|
|
|
|
|
|
2025-07-11 13:53:53 +00:00
|
|
|
async def list_tasks(redis):
|
2024-12-19 09:00:32 +00:00
|
|
|
"""
|
|
|
|
|
List all currently active task IDs.
|
|
|
|
|
"""
|
2025-07-11 13:53:53 +00:00
|
|
|
if redis:
|
|
|
|
|
return await redis_list_tasks(redis)
|
2024-12-19 09:00:32 +00:00
|
|
|
return list(tasks.keys())
|
|
|
|
|
|
|
|
|
|
|
2025-07-11 14:14:48 +00:00
|
|
|
async def list_task_ids_by_item_id(redis, id):
|
2025-04-13 03:51:02 +00:00
|
|
|
"""
|
|
|
|
|
List all tasks associated with a specific ID.
|
|
|
|
|
"""
|
2025-07-11 13:53:53 +00:00
|
|
|
if redis:
|
2025-07-11 14:14:48 +00:00
|
|
|
return await redis_list_item_tasks(redis, id)
|
|
|
|
|
return item_tasks.get(id, [])
|
2025-04-13 03:51:02 +00:00
|
|
|
|
|
|
|
|
|
2025-07-11 13:53:53 +00:00
|
|
|
async def stop_task(redis, task_id: str):
|
2024-12-19 09:00:32 +00:00
|
|
|
"""
|
|
|
|
|
Cancel a running task and remove it from the global task list.
|
|
|
|
|
"""
|
2025-07-11 13:53:53 +00:00
|
|
|
if redis:
|
2025-06-08 17:20:30 +00:00
|
|
|
# PUBSUB: All instances check if they have this task, and stop if so.
|
2025-06-09 13:21:10 +00:00
|
|
|
await redis_send_command(
|
2025-07-11 13:53:53 +00:00
|
|
|
redis,
|
2025-06-08 17:20:30 +00:00
|
|
|
{
|
|
|
|
|
"action": "stop",
|
|
|
|
|
"task_id": task_id,
|
|
|
|
|
},
|
|
|
|
|
)
|
|
|
|
|
# Optionally check if task_id still in Redis a few moments later for feedback?
|
|
|
|
|
return {"status": True, "message": f"Stop signal sent for {task_id}"}
|
|
|
|
|
|
2025-07-11 23:00:40 +00:00
|
|
|
task = tasks.pop(task_id)
|
2024-12-19 09:00:32 +00:00
|
|
|
if not task:
|
|
|
|
|
raise ValueError(f"Task with ID {task_id} not found.")
|
|
|
|
|
|
|
|
|
|
task.cancel() # Request task cancellation
|
|
|
|
|
try:
|
|
|
|
|
await task # Wait for the task to handle the cancellation
|
|
|
|
|
except asyncio.CancelledError:
|
|
|
|
|
# Task successfully canceled
|
|
|
|
|
return {"status": True, "message": f"Task {task_id} successfully stopped."}
|
|
|
|
|
|
|
|
|
|
return {"status": False, "message": f"Failed to stop task {task_id}."}
|
2025-07-11 14:41:09 +00:00
|
|
|
|
|
|
|
|
|
|
|
|
|
async def stop_item_tasks(redis: Redis, item_id: str):
|
|
|
|
|
"""
|
|
|
|
|
Stop all tasks associated with a specific item ID.
|
|
|
|
|
"""
|
|
|
|
|
task_ids = await list_task_ids_by_item_id(redis, item_id)
|
|
|
|
|
if not task_ids:
|
|
|
|
|
return {"status": True, "message": f"No tasks found for item {item_id}."}
|
|
|
|
|
|
|
|
|
|
for task_id in task_ids:
|
|
|
|
|
result = await stop_task(redis, task_id)
|
|
|
|
|
if not result["status"]:
|
|
|
|
|
return result # Return the first failure
|
|
|
|
|
|
|
|
|
|
return {"status": True, "message": f"All tasks for item {item_id} stopped."}
|