mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 19:24:27 +00:00
fix(ollama.py): support format for ollama
This commit is contained in:
parent
dd87386cad
commit
d1db67890c
2 changed files with 12 additions and 1 deletions
|
@ -146,7 +146,15 @@ def get_ollama_response(
|
||||||
optional_params[k] = v
|
optional_params[k] = v
|
||||||
|
|
||||||
stream = optional_params.pop("stream", False)
|
stream = optional_params.pop("stream", False)
|
||||||
data = {"model": model, "prompt": prompt, "options": optional_params}
|
format = optional_params.pop("format", None)
|
||||||
|
data = {
|
||||||
|
"model": model,
|
||||||
|
"prompt": prompt,
|
||||||
|
"options": optional_params,
|
||||||
|
"stream": stream,
|
||||||
|
}
|
||||||
|
if format is not None:
|
||||||
|
data["format"] = format
|
||||||
|
|
||||||
## LOGGING
|
## LOGGING
|
||||||
logging_obj.pre_call(
|
logging_obj.pre_call(
|
||||||
|
|
|
@ -146,12 +146,15 @@ def get_ollama_response(
|
||||||
optional_params[k] = v
|
optional_params[k] = v
|
||||||
|
|
||||||
stream = optional_params.pop("stream", False)
|
stream = optional_params.pop("stream", False)
|
||||||
|
format = optional_params.pop("format", None)
|
||||||
data = {
|
data = {
|
||||||
"model": model,
|
"model": model,
|
||||||
"messages": messages,
|
"messages": messages,
|
||||||
"options": optional_params,
|
"options": optional_params,
|
||||||
"stream": stream,
|
"stream": stream,
|
||||||
}
|
}
|
||||||
|
if format is not None:
|
||||||
|
data["format"] = format
|
||||||
## LOGGING
|
## LOGGING
|
||||||
logging_obj.pre_call(
|
logging_obj.pre_call(
|
||||||
input=None,
|
input=None,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue