fix(ollama_chat.py): explicitly state if ollama call is streaming or not

This commit is contained in:
Krrish Dholakia 2024-02-06 07:43:47 -08:00
parent 9b2a2e6c8b
commit 50fb54883b

View file

@ -146,7 +146,12 @@ def get_ollama_response(
optional_params[k] = v
stream = optional_params.pop("stream", False)
data = {"model": model, "messages": messages, "options": optional_params}
data = {
"model": model,
"messages": messages,
"options": optional_params,
"stream": stream,
}
## LOGGING
logging_obj.pre_call(
input=None,