feat(anthropic_adapter.py): support streaming requests for /v1/messages endpoint

Fixes https://github.com/BerriAI/litellm/issues/5011
This commit is contained in:
Krrish Dholakia 2024-08-03 20:16:19 -07:00
parent 39a98a2882
commit ac6c39c283
9 changed files with 425 additions and 35 deletions

View file

@ -10,7 +10,7 @@ from pydantic import BaseModel
from litellm.caching import DualCache
from litellm.proxy._types import UserAPIKeyAuth
from litellm.types.llms.openai import ChatCompletionRequest
from litellm.types.utils import ModelResponse
from litellm.types.utils import AdapterCompletionStreamWrapper, ModelResponse
class CustomLogger: # https://docs.litellm.ai/docs/observability/custom_callback#callback-class
@ -76,7 +76,9 @@ class CustomLogger: # https://docs.litellm.ai/docs/observability/custom_callbac
"""
pass
def translate_completion_output_params_streaming(self) -> Optional[BaseModel]:
def translate_completion_output_params_streaming(
self, completion_stream: Any
) -> Optional[AdapterCompletionStreamWrapper]:
"""
Translates the streaming chunk, from the OpenAI format to the custom format.
"""