mirror of
https://github.com/open-webui/open-webui.git
synced 2025-12-12 04:15:25 +00:00
feat: Re-use Redis connection pools via local cache to prevent transient exhaustion
Every call to get_redis_connection() spawned a new pool, so workers slowly accumulated thousands of open sockets. Even though connections were eventually released, skewed release timing still pushed us past Redis’ max-clients and the cluster egress IP cap. A module-level _CONNECTION_CACHE now memoises pools by (redis_url, sentinel_hosts, async_mode, decode_responses). Result: flat connection count, no more IP or FD exhaustion. Public API unchanged. Signed-off-by: Sihyeon Jang <sihyeon.jang@navercorp.com>
This commit is contained in:
parent
78754fbe96
commit
f59da361f1
1 changed files with 18 additions and 8 deletions
|
|
@ -10,6 +10,9 @@ from open_webui.env import REDIS_SENTINEL_MAX_RETRY_COUNT
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
_CONNECTION_CACHE = {}
|
||||||
|
|
||||||
|
|
||||||
class SentinelRedisProxy:
|
class SentinelRedisProxy:
|
||||||
def __init__(self, sentinel, service, *, async_mode: bool = True, **kw):
|
def __init__(self, sentinel, service, *, async_mode: bool = True, **kw):
|
||||||
self._sentinel = sentinel
|
self._sentinel = sentinel
|
||||||
|
|
@ -108,6 +111,14 @@ def parse_redis_service_url(redis_url):
|
||||||
def get_redis_connection(
|
def get_redis_connection(
|
||||||
redis_url, redis_sentinels, async_mode=False, decode_responses=True
|
redis_url, redis_sentinels, async_mode=False, decode_responses=True
|
||||||
):
|
):
|
||||||
|
|
||||||
|
cache_key = (redis_url, tuple(redis_sentinels) if redis_sentinels else (), async_mode, decode_responses)
|
||||||
|
|
||||||
|
if cache_key in _CONNECTION_CACHE:
|
||||||
|
return _CONNECTION_CACHE[cache_key]
|
||||||
|
|
||||||
|
connection = None
|
||||||
|
|
||||||
if async_mode:
|
if async_mode:
|
||||||
import redis.asyncio as redis
|
import redis.asyncio as redis
|
||||||
|
|
||||||
|
|
@ -122,15 +133,13 @@ def get_redis_connection(
|
||||||
password=redis_config["password"],
|
password=redis_config["password"],
|
||||||
decode_responses=decode_responses,
|
decode_responses=decode_responses,
|
||||||
)
|
)
|
||||||
return SentinelRedisProxy(
|
connection = SentinelRedisProxy(
|
||||||
sentinel,
|
sentinel,
|
||||||
redis_config["service"],
|
redis_config["service"],
|
||||||
async_mode=async_mode,
|
async_mode=async_mode,
|
||||||
)
|
)
|
||||||
elif redis_url:
|
elif redis_url:
|
||||||
return redis.from_url(redis_url, decode_responses=decode_responses)
|
connection = redis.from_url(redis_url, decode_responses=decode_responses)
|
||||||
else:
|
|
||||||
return None
|
|
||||||
else:
|
else:
|
||||||
import redis
|
import redis
|
||||||
|
|
||||||
|
|
@ -144,15 +153,16 @@ def get_redis_connection(
|
||||||
password=redis_config["password"],
|
password=redis_config["password"],
|
||||||
decode_responses=decode_responses,
|
decode_responses=decode_responses,
|
||||||
)
|
)
|
||||||
return SentinelRedisProxy(
|
connection = SentinelRedisProxy(
|
||||||
sentinel,
|
sentinel,
|
||||||
redis_config["service"],
|
redis_config["service"],
|
||||||
async_mode=async_mode,
|
async_mode=async_mode,
|
||||||
)
|
)
|
||||||
elif redis_url:
|
elif redis_url:
|
||||||
return redis.Redis.from_url(redis_url, decode_responses=decode_responses)
|
connection = redis.Redis.from_url(redis_url, decode_responses=decode_responses)
|
||||||
else:
|
|
||||||
return None
|
_CONNECTION_CACHE[cache_key] = connection
|
||||||
|
return connection
|
||||||
|
|
||||||
|
|
||||||
def get_sentinels_from_env(sentinel_hosts_env, sentinel_port_env):
|
def get_sentinels_from_env(sentinel_hosts_env, sentinel_port_env):
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue