mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
fix(ollama_chat.py): explicitly state if ollama call is streaming or not
This commit is contained in:
parent
9b2a2e6c8b
commit
50fb54883b
1 changed files with 6 additions and 1 deletions
|
@ -146,7 +146,12 @@ def get_ollama_response(
|
|||
optional_params[k] = v
|
||||
|
||||
stream = optional_params.pop("stream", False)
|
||||
data = {"model": model, "messages": messages, "options": optional_params}
|
||||
data = {
|
||||
"model": model,
|
||||
"messages": messages,
|
||||
"options": optional_params,
|
||||
"stream": stream,
|
||||
}
|
||||
## LOGGING
|
||||
logging_obj.pre_call(
|
||||
input=None,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue