fix(anthropic.py): fix parallel streaming on anthropic.py

prevent parallel requests from cancelling each other

Fixes https://github.com/BerriAI/litellm/issues/3881
This commit is contained in:
Krrish Dholakia 2024-05-28 16:29:09 -07:00
parent 073bca78d4
commit 324bf027f5
3 changed files with 152 additions and 180 deletions

View file

@ -43,12 +43,13 @@ class AsyncHTTPHandler:
self,
url: str,
data: Optional[Union[dict, str]] = None, # type: ignore
json: Optional[dict] = None,
params: Optional[dict] = None,
headers: Optional[dict] = None,
stream: bool = False,
):
req = self.client.build_request(
"POST", url, data=data, params=params, headers=headers # type: ignore
"POST", url, data=data, json=json, params=params, headers=headers # type: ignore
)
response = await self.client.send(req, stream=stream)
return response