fix(ollama.py): fix api connection error

https://github.com/BerriAI/litellm/issues/1735
This commit is contained in:
Krrish Dholakia 2024-02-03 20:22:25 -08:00
parent dc506e3df1
commit 01cef1fe9e

View file

@ -179,7 +179,9 @@ def get_ollama_response(
elif stream == True:
return ollama_completion_stream(url=url, data=data, logging_obj=logging_obj)
response = requests.post(url=f"{url}", json=data, timeout=litellm.request_timeout)
response = requests.post(
url=f"{url}", json={**data, "stream": stream}, timeout=litellm.request_timeout
)
if response.status_code != 200:
raise OllamaError(status_code=response.status_code, message=response.text)