mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
fix(ollama.py): fix returned error message for streaming error
This commit is contained in:
parent
9527a9770f
commit
e990c70beb
1 changed files with 1 additions and 1 deletions
|
@ -234,7 +234,7 @@ def ollama_completion_stream(url, data, logging_obj):
|
||||||
try:
|
try:
|
||||||
if response.status_code != 200:
|
if response.status_code != 200:
|
||||||
raise OllamaError(
|
raise OllamaError(
|
||||||
status_code=response.status_code, message=response.text
|
status_code=response.status_code, message=str(response)
|
||||||
)
|
)
|
||||||
|
|
||||||
streamwrapper = litellm.CustomStreamWrapper(
|
streamwrapper = litellm.CustomStreamWrapper(
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue