forked from phoenix/litellm-mirror
fix(ollama.py): fix api connection error
https://github.com/BerriAI/litellm/issues/1735
This commit is contained in:
parent
dc506e3df1
commit
01cef1fe9e
1 changed files with 3 additions and 1 deletions
|
@ -179,7 +179,9 @@ def get_ollama_response(
|
|||
elif stream == True:
|
||||
return ollama_completion_stream(url=url, data=data, logging_obj=logging_obj)
|
||||
|
||||
response = requests.post(url=f"{url}", json=data, timeout=litellm.request_timeout)
|
||||
response = requests.post(
|
||||
url=f"{url}", json={**data, "stream": stream}, timeout=litellm.request_timeout
|
||||
)
|
||||
if response.status_code != 200:
|
||||
raise OllamaError(status_code=response.status_code, message=response.text)
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue