Merge pull request #4635 from BerriAI/litellm_anthropic_adapter

Anthropic `/v1/messages` endpoint support
This commit is contained in:
Krish Dholakia 2024-07-10 22:41:53 -07:00 committed by GitHub
commit f4d140efec
15 changed files with 1163 additions and 161 deletions

View file

@ -5,9 +5,12 @@ import traceback
from typing import Literal, Optional, Union
import dotenv
from pydantic import BaseModel
from litellm.caching import DualCache
from litellm.proxy._types import UserAPIKeyAuth
from litellm.types.llms.openai import ChatCompletionRequest
from litellm.types.utils import ModelResponse
class CustomLogger: # https://docs.litellm.ai/docs/observability/custom_callback#callback-class
@ -55,6 +58,30 @@ class CustomLogger: # https://docs.litellm.ai/docs/observability/custom_callbac
def pre_call_check(self, deployment: dict) -> Optional[dict]:
pass
#### ADAPTERS #### Allow calling 100+ LLMs in custom format - https://github.com/BerriAI/litellm/pulls
def translate_completion_input_params(
self, kwargs
) -> Optional[ChatCompletionRequest]:
"""
Translates the input params, from the provider's native format to the litellm.completion() format.
"""
pass
def translate_completion_output_params(
self, response: ModelResponse
) -> Optional[BaseModel]:
"""
Translates the output params, from the OpenAI format to the custom format.
"""
pass
def translate_completion_output_params_streaming(self) -> Optional[BaseModel]:
"""
Translates the streaming chunk, from the OpenAI format to the custom format.
"""
pass
#### CALL HOOKS - proxy only ####
"""
Control the modify incoming / outgoung data before calling the model