forked from phoenix/litellm-mirror
async ahtropic streaming
This commit is contained in:
parent
5c796b4365
commit
2cf41d3d9f
1 changed files with 9 additions and 2 deletions
|
@ -4,7 +4,7 @@ from enum import Enum
|
||||||
import requests
|
import requests
|
||||||
import time
|
import time
|
||||||
from typing import Callable, Optional
|
from typing import Callable, Optional
|
||||||
from litellm.utils import ModelResponse, Usage
|
from litellm.utils import ModelResponse, Usage, CustomStreamWrapper
|
||||||
import litellm
|
import litellm
|
||||||
from .prompt_templates.factory import prompt_factory, custom_prompt
|
from .prompt_templates.factory import prompt_factory, custom_prompt
|
||||||
import httpx
|
import httpx
|
||||||
|
@ -162,8 +162,15 @@ def completion(
|
||||||
raise AnthropicError(
|
raise AnthropicError(
|
||||||
status_code=response.status_code, message=response.text
|
status_code=response.status_code, message=response.text
|
||||||
)
|
)
|
||||||
|
completion_stream = response.iter_lines()
|
||||||
|
stream_response = CustomStreamWrapper(
|
||||||
|
completion_stream=completion_stream,
|
||||||
|
model=model,
|
||||||
|
custom_llm_provider="anthropic",
|
||||||
|
logging_obj=logging_obj,
|
||||||
|
)
|
||||||
|
return stream_response
|
||||||
|
|
||||||
return response.iter_lines()
|
|
||||||
else:
|
else:
|
||||||
response = requests.post(api_base, headers=headers, data=json.dumps(data))
|
response = requests.post(api_base, headers=headers, data=json.dumps(data))
|
||||||
if response.status_code != 200:
|
if response.status_code != 200:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue