mirror of
https://github.com/open-webui/open-webui.git
synced 2025-12-13 04:45:19 +00:00
Merge pull request #2943 from choltha/fix/temperature-params
fix: model settings temperature not passed correctly from model settings to ollama/openai api
This commit is contained in:
commit
8c95a8be3a
2 changed files with 3 additions and 3 deletions
|
|
@ -764,7 +764,7 @@ async def generate_chat_completion(
|
|||
"frequency_penalty", None
|
||||
)
|
||||
|
||||
if model_info.params.get("temperature", None):
|
||||
if model_info.params.get("temperature", None) is not None:
|
||||
payload["options"]["temperature"] = model_info.params.get(
|
||||
"temperature", None
|
||||
)
|
||||
|
|
|
|||
|
|
@ -373,8 +373,8 @@ async def proxy(path: str, request: Request, user=Depends(get_verified_user)):
|
|||
model_info.params = model_info.params.model_dump()
|
||||
|
||||
if model_info.params:
|
||||
if model_info.params.get("temperature", None):
|
||||
payload["temperature"] = int(
|
||||
if model_info.params.get("temperature", None) is not None:
|
||||
payload["temperature"] = float(
|
||||
model_info.params.get("temperature")
|
||||
)
|
||||
|
||||
|
|
|
|||
Loading…
Reference in a new issue