mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
removed close
Can't just close() the response because you still need to stream the results
This commit is contained in:
parent
a3c70a099e
commit
2ab4fc96e4
1 changed files with 1 additions and 1 deletions
|
@ -460,7 +460,7 @@ class OpenAIChatCompletion(BaseLLM):
|
|||
else:
|
||||
headers = {}
|
||||
response = raw_response.parse()
|
||||
raw_response.http_response.close()
|
||||
# raw_response.http_response.close()
|
||||
return headers, response
|
||||
except Exception as e:
|
||||
if raw_response is not None:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue