forked from phoenix/litellm-mirror
fix(anthropic.py): fix parallel streaming on anthropic.py
prevent parallel requests from cancelling each other Fixes https://github.com/BerriAI/litellm/issues/3881
This commit is contained in:
parent
073bca78d4
commit
324bf027f5
3 changed files with 152 additions and 180 deletions
|
@ -43,12 +43,13 @@ class AsyncHTTPHandler:
|
|||
self,
|
||||
url: str,
|
||||
data: Optional[Union[dict, str]] = None, # type: ignore
|
||||
json: Optional[dict] = None,
|
||||
params: Optional[dict] = None,
|
||||
headers: Optional[dict] = None,
|
||||
stream: bool = False,
|
||||
):
|
||||
req = self.client.build_request(
|
||||
"POST", url, data=data, params=params, headers=headers # type: ignore
|
||||
"POST", url, data=data, json=json, params=params, headers=headers # type: ignore
|
||||
)
|
||||
response = await self.client.send(req, stream=stream)
|
||||
return response
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue