mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
async streaming for anthropic
This commit is contained in:
parent
32c3aab34e
commit
2f1b55dd70
1 changed files with 0 additions and 13 deletions
|
@ -1197,19 +1197,6 @@ def completion(
|
||||||
logging_obj=logging,
|
logging_obj=logging,
|
||||||
headers=headers,
|
headers=headers,
|
||||||
)
|
)
|
||||||
if (
|
|
||||||
"stream" in optional_params
|
|
||||||
and optional_params["stream"] == True
|
|
||||||
and not isinstance(response, CustomStreamWrapper)
|
|
||||||
):
|
|
||||||
# don't try to access stream object,
|
|
||||||
response = CustomStreamWrapper(
|
|
||||||
response,
|
|
||||||
model,
|
|
||||||
custom_llm_provider="anthropic",
|
|
||||||
logging_obj=logging,
|
|
||||||
)
|
|
||||||
|
|
||||||
if optional_params.get("stream", False) or acompletion == True:
|
if optional_params.get("stream", False) or acompletion == True:
|
||||||
## LOGGING
|
## LOGGING
|
||||||
logging.post_call(
|
logging.post_call(
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue