feat: Allow configuration of not process large single-line data

This commit is contained in:
Shirasawa 2025-11-06 04:02:49 +00:00
parent 89c0e150c8
commit ce1079d358

View file

@ -542,17 +542,21 @@ def extract_urls(text: str) -> list[str]:
return url_pattern.findall(text)
async def handle_large_stream_chunks(stream: aiohttp.StreamReader, max_buffer_size: int = CHAT_STREAM_RESPONSE_CHUNK_MAX_BUFFER_SIZE):
def handle_large_stream_chunks(stream: aiohttp.StreamReader, max_buffer_size: int = CHAT_STREAM_RESPONSE_CHUNK_MAX_BUFFER_SIZE):
"""
Handle stream response chunks, supporting large data chunks that exceed the original 16kb limit.
When a single line exceeds max_buffer_size, returns an empty JSON string {} and skips subsequent data
until encountering normally sized data.
:param stream: The stream reader to handle.
:param max_buffer_size: The maximum buffer size in bytes.
:param max_buffer_size: The maximum buffer size in bytes, -1 means not handle large chunks, default is 10MB.
:return: An async generator that yields the stream data.
"""
if max_buffer_size <= 0:
return stream
async def handle_stream_chunks():
buffer = b""
skip_mode = False
@ -599,3 +603,5 @@ async def handle_large_stream_chunks(stream: aiohttp.StreamReader, max_buffer_si
# Process remaining buffer data
if buffer and not skip_mode:
yield buffer
return handle_stream_chunks()