mirror of
https://github.com/open-webui/open-webui.git
synced 2025-12-12 04:15:25 +00:00
payload and response modifed for compatibility
This commit is contained in:
parent
e0769c6a1f
commit
8f6c3f46d6
2 changed files with 82 additions and 0 deletions
|
|
@ -329,3 +329,33 @@ def convert_payload_openai_to_ollama(openai_payload: dict) -> dict:
|
||||||
ollama_payload["format"] = format
|
ollama_payload["format"] = format
|
||||||
|
|
||||||
return ollama_payload
|
return ollama_payload
|
||||||
|
|
||||||
|
|
||||||
|
def convert_embedding_payload_openai_to_ollama(openai_payload: dict) -> dict:
|
||||||
|
"""
|
||||||
|
Convert an embeddings request payload from OpenAI format to Ollama format.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
openai_payload (dict): The original payload designed for OpenAI API usage.
|
||||||
|
Example: {"model": "...", "input": [str, ...] or str}
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: A payload compatible with the Ollama API embeddings endpoint.
|
||||||
|
Example: {"model": "...", "input": [str, ...]}
|
||||||
|
"""
|
||||||
|
ollama_payload = {
|
||||||
|
"model": openai_payload.get("model")
|
||||||
|
}
|
||||||
|
input_value = openai_payload.get("input")
|
||||||
|
# Ollama expects 'input' as a list. If it's a string, wrap it in a list.
|
||||||
|
if isinstance(input_value, list):
|
||||||
|
ollama_payload["input"] = input_value
|
||||||
|
else:
|
||||||
|
ollama_payload["input"] = [input_value]
|
||||||
|
|
||||||
|
# Optionally forward 'options', 'truncate', 'keep_alive' if present in OpenAI request
|
||||||
|
for optional_key in ("options", "truncate", "keep_alive"):
|
||||||
|
if optional_key in openai_payload:
|
||||||
|
ollama_payload[optional_key] = openai_payload[optional_key]
|
||||||
|
|
||||||
|
return ollama_payload
|
||||||
|
|
|
||||||
|
|
@ -125,3 +125,55 @@ async def convert_streaming_response_ollama_to_openai(ollama_streaming_response)
|
||||||
yield line
|
yield line
|
||||||
|
|
||||||
yield "data: [DONE]\n\n"
|
yield "data: [DONE]\n\n"
|
||||||
|
|
||||||
|
def convert_response_ollama_to_openai(response):
|
||||||
|
"""
|
||||||
|
Convert the response from Ollama embeddings endpoint to the OpenAI-compatible format.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
response (dict): The response from the Ollama API,
|
||||||
|
e.g. {"embedding": [...], "model": "..."}
|
||||||
|
or {"embeddings": [{"embedding": [...], "index": 0}, ...], "model": "..."}
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Response adapted to OpenAI's embeddings API format.
|
||||||
|
e.g. {
|
||||||
|
"object": "list",
|
||||||
|
"data": [
|
||||||
|
{"object": "embedding", "embedding": [...], "index": 0},
|
||||||
|
...
|
||||||
|
],
|
||||||
|
"model": "...",
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
# Ollama batch-style output
|
||||||
|
if isinstance(response, dict) and "embeddings" in response:
|
||||||
|
openai_data = []
|
||||||
|
for i, emb in enumerate(response["embeddings"]):
|
||||||
|
openai_data.append({
|
||||||
|
"object": "embedding",
|
||||||
|
"embedding": emb.get("embedding"),
|
||||||
|
"index": emb.get("index", i),
|
||||||
|
})
|
||||||
|
return {
|
||||||
|
"object": "list",
|
||||||
|
"data": openai_data,
|
||||||
|
"model": response.get("model"),
|
||||||
|
}
|
||||||
|
# Ollama single output
|
||||||
|
elif isinstance(response, dict) and "embedding" in response:
|
||||||
|
return {
|
||||||
|
"object": "list",
|
||||||
|
"data": [{
|
||||||
|
"object": "embedding",
|
||||||
|
"embedding": response["embedding"],
|
||||||
|
"index": 0,
|
||||||
|
}],
|
||||||
|
"model": response.get("model"),
|
||||||
|
}
|
||||||
|
# Already OpenAI-compatible?
|
||||||
|
elif isinstance(response, dict) and "data" in response and isinstance(response["data"], list):
|
||||||
|
return response
|
||||||
|
|
||||||
|
# Fallback: return as is if unrecognized
|
||||||
|
return response
|
||||||
Loading…
Reference in a new issue