mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
fix(anthropic.py): fix parallel streaming on anthropic.py
prevent parallel requests from cancelling each other Fixes https://github.com/BerriAI/litellm/issues/3881
This commit is contained in:
parent
073bca78d4
commit
324bf027f5
3 changed files with 152 additions and 180 deletions
|
@ -379,13 +379,12 @@ class AnthropicChatCompletion(BaseLLM):
|
|||
logger_fn=None,
|
||||
headers={},
|
||||
):
|
||||
self.async_handler = AsyncHTTPHandler(
|
||||
timeout=httpx.Timeout(timeout=600.0, connect=5.0)
|
||||
|
||||
async_handler = AsyncHTTPHandler(
|
||||
timeout=httpx.Timeout(timeout=600.0, connect=20.0)
|
||||
)
|
||||
data["stream"] = True
|
||||
response = await self.async_handler.post(
|
||||
api_base, headers=headers, data=json.dumps(data), stream=True
|
||||
)
|
||||
response = await async_handler.post(api_base, headers=headers, json=data)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise AnthropicError(
|
||||
|
@ -421,12 +420,10 @@ class AnthropicChatCompletion(BaseLLM):
|
|||
logger_fn=None,
|
||||
headers={},
|
||||
) -> Union[ModelResponse, CustomStreamWrapper]:
|
||||
self.async_handler = AsyncHTTPHandler(
|
||||
async_handler = AsyncHTTPHandler(
|
||||
timeout=httpx.Timeout(timeout=600.0, connect=5.0)
|
||||
)
|
||||
response = await self.async_handler.post(
|
||||
api_base, headers=headers, data=json.dumps(data)
|
||||
)
|
||||
response = await async_handler.post(api_base, headers=headers, json=data)
|
||||
if stream and _is_function_call:
|
||||
return self.process_streaming_response(
|
||||
model=model,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue