mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
fix(ollama.py): support format for ollama
This commit is contained in:
parent
dd87386cad
commit
d1db67890c
2 changed files with 12 additions and 1 deletions
|
@ -146,12 +146,15 @@ def get_ollama_response(
|
|||
optional_params[k] = v
|
||||
|
||||
stream = optional_params.pop("stream", False)
|
||||
format = optional_params.pop("format", None)
|
||||
data = {
|
||||
"model": model,
|
||||
"messages": messages,
|
||||
"options": optional_params,
|
||||
"stream": stream,
|
||||
}
|
||||
if format is not None:
|
||||
data["format"] = format
|
||||
## LOGGING
|
||||
logging_obj.pre_call(
|
||||
input=None,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue