feat(anthropic_adapter.py): support for translating anthropic params to openai format

This commit is contained in:
Krrish Dholakia 2024-07-10 00:32:28 -07:00
parent 43d86528c1
commit 5d6e172d5c
10 changed files with 542 additions and 4213 deletions

View file

@ -881,3 +881,8 @@ from .assistants.main import *
from .batches.main import *
from .scheduler import *
from .cost_calculator import response_cost_calculator, cost_per_token
### ADAPTERS ###
from .types.adapter import AdapterItem
adapters: List[AdapterItem] = []

View file

@ -0,0 +1,43 @@
# What is this?
## Translates OpenAI call to Anthropic `/v1/messages` format
import json
import os
import traceback
import uuid
from typing import Literal, Optional
import dotenv
import httpx
import litellm
from litellm import ChatCompletionRequest, verbose_logger
from litellm.integrations.custom_logger import CustomLogger
from litellm.types.llms.anthropic import AnthropicMessagesRequest
class AnthropicAdapter(CustomLogger):
def __init__(self) -> None:
super().__init__()
def translate_completion_input_params(
self, kwargs
) -> Optional[ChatCompletionRequest]:
"""
- translate params, where needed
- pass rest, as is
"""
request_body = AnthropicMessagesRequest(**kwargs) # type: ignore
translated_body = litellm.AnthropicConfig().translate_anthropic_to_openai(
anthropic_message_request=request_body
)
return translated_body
def translate_completion_output_params(self, response: litellm.ModelResponse):
return super().translate_completion_output_params(response)
def translate_completion_output_params_streaming(self):
return super().translate_completion_output_params_streaming()
anthropic_adapter = AnthropicAdapter()

View file

@ -8,6 +8,8 @@ import dotenv
from litellm.caching import DualCache
from litellm.proxy._types import UserAPIKeyAuth
from litellm.types.llms.openai import ChatCompletionRequest
from litellm.types.utils import ModelResponse
class CustomLogger: # https://docs.litellm.ai/docs/observability/custom_callback#callback-class
@ -55,6 +57,28 @@ class CustomLogger: # https://docs.litellm.ai/docs/observability/custom_callbac
def pre_call_check(self, deployment: dict) -> Optional[dict]:
pass
#### ADAPTERS #### Allow calling 100+ LLMs in custom format - https://github.com/BerriAI/litellm/pulls
def translate_completion_input_params(
self, kwargs
) -> Optional[ChatCompletionRequest]:
"""
Translates the input params, from the provider's native format to the litellm.completion() format.
"""
pass
def translate_completion_output_params(self, response: ModelResponse):
"""
Translates the output params, from the OpenAI format to the custom format.
"""
pass
def translate_completion_output_params_streaming(self):
"""
Translates the streaming chunk, from the OpenAI format to the custom format.
"""
pass
#### CALL HOOKS - proxy only ####
"""
Control the modify incoming / outgoung data before calling the model

View file

@ -20,17 +20,36 @@ from litellm.llms.custom_httpx.http_handler import (
_get_httpx_client,
)
from litellm.types.llms.anthropic import (
AnthopicMessagesAssistantMessageParam,
AnthropicMessagesRequest,
AnthropicMessagesTool,
AnthropicMessagesToolChoice,
AnthropicMessagesUserMessageParam,
ContentBlockDelta,
ContentBlockStart,
MessageBlockDelta,
MessageStartBlock,
)
from litellm.types.llms.openai import (
AllMessageValues,
ChatCompletionAssistantMessage,
ChatCompletionAssistantToolCall,
ChatCompletionImageObject,
ChatCompletionImageUrlObject,
ChatCompletionRequest,
ChatCompletionResponseMessage,
ChatCompletionSystemMessage,
ChatCompletionTextObject,
ChatCompletionToolCallChunk,
ChatCompletionToolCallFunctionChunk,
ChatCompletionToolChoiceFunctionParam,
ChatCompletionToolChoiceObjectParam,
ChatCompletionToolChoiceValues,
ChatCompletionToolMessage,
ChatCompletionToolParam,
ChatCompletionToolParamFunctionChunk,
ChatCompletionUsageBlock,
ChatCompletionUserMessage,
)
from litellm.types.utils import GenericStreamingChunk
from litellm.utils import CustomStreamWrapper, ModelResponse, Usage
@ -168,6 +187,210 @@ class AnthropicConfig:
optional_params["top_p"] = value
return optional_params
def translatable_anthropic_params(self) -> List:
"""
Which anthropic params, we need to translate to the openai format.
"""
return ["messages", "metadata", "system", "tool_choice", "tools"]
def translate_anthropic_messages_to_openai(
self,
messages: List[
Union[
AnthropicMessagesUserMessageParam,
AnthopicMessagesAssistantMessageParam,
]
],
) -> List:
new_messages: List[AllMessageValues] = []
for m in messages:
user_message: Optional[ChatCompletionUserMessage] = None
tool_message_list: List[ChatCompletionToolMessage] = []
## USER MESSAGE ##
if m["role"] == "user":
## translate user message
if isinstance(m["content"], str):
user_message = ChatCompletionUserMessage(
role="user", content=m["content"]
)
elif isinstance(m["content"], list):
new_user_content_list: List[
Union[ChatCompletionTextObject, ChatCompletionImageObject]
] = []
for content in m["content"]:
if content["type"] == "text":
text_obj = ChatCompletionTextObject(
type="text", text=content["text"]
)
new_user_content_list.append(text_obj)
elif content["type"] == "image":
image_url = ChatCompletionImageUrlObject(
url=f"data:{content['type']};base64,{content['source']}"
)
image_obj = ChatCompletionImageObject(
type="image_url", image_url=image_url
)
new_user_content_list.append(image_obj)
elif content["type"] == "tool_result":
if "content" not in content:
tool_result = ChatCompletionToolMessage(
role="tool",
tool_call_id=content["tool_use_id"],
content="",
)
tool_message_list.append(tool_result)
elif isinstance(content["content"], str):
tool_result = ChatCompletionToolMessage(
role="tool",
tool_call_id=content["tool_use_id"],
content=content["content"],
)
tool_message_list.append(tool_result)
elif isinstance(content["content"], list):
for c in content["content"]:
if c["type"] == "text":
tool_result = ChatCompletionToolMessage(
role="tool",
tool_call_id=content["tool_use_id"],
content=c["text"],
)
tool_message_list.append(tool_result)
elif c["type"] == "image":
image_str = (
f"data:{c['type']};base64,{c['source']}"
)
tool_result = ChatCompletionToolMessage(
role="tool",
tool_call_id=content["tool_use_id"],
content=image_str,
)
tool_message_list.append(tool_result)
if user_message is not None:
new_messages.append(user_message)
if len(tool_message_list) > 0:
new_messages.extend(tool_message_list)
## ASSISTANT MESSAGE ##
assistant_message_str: Optional[str] = None
tool_calls: List[ChatCompletionAssistantToolCall] = []
if m["role"] == "assistant":
if isinstance(m["content"], str):
assistant_message_str = m["content"]
elif isinstance(m["content"], list):
for content in m["content"]:
if content["type"] == "text":
if assistant_message_str is None:
assistant_message_str = content["text"]
else:
assistant_message_str += content["text"]
elif content["type"] == "tool_use":
function_chunk = ChatCompletionToolCallFunctionChunk(
name=content["name"],
arguments=json.dumps(content["input"]),
)
tool_calls.append(
ChatCompletionAssistantToolCall(
id=content["id"],
type="function",
function=function_chunk,
)
)
assistant_message = ChatCompletionAssistantMessage(
role="assistant", content=assistant_message_str, tool_calls=tool_calls
)
new_messages.append(assistant_message)
return new_messages
def translate_anthropic_tool_choice_to_openai(
self, tool_choice: AnthropicMessagesToolChoice
) -> ChatCompletionToolChoiceValues:
if tool_choice["type"] == "any":
return "required"
elif tool_choice["type"] == "auto":
return "auto"
elif tool_choice["type"] == "tool":
tc_function_param = ChatCompletionToolChoiceFunctionParam(
name=tool_choice.get("name", "")
)
return ChatCompletionToolChoiceObjectParam(
type="function", function=tc_function_param
)
else:
raise ValueError(
"Incompatible tool choice param submitted - {}".format(tool_choice)
)
def translate_anthropic_tools_to_openai(
self, tools: List[AnthropicMessagesTool]
) -> List[ChatCompletionToolParam]:
new_tools: List[ChatCompletionToolParam] = []
for tool in tools:
function_chunk = ChatCompletionToolParamFunctionChunk(
name=tool["name"],
parameters=tool["input_schema"],
)
if "description" in tool:
function_chunk["description"] = tool["description"]
new_tools.append(
ChatCompletionToolParam(type="function", function=function_chunk)
)
return new_tools
def translate_anthropic_to_openai(
self, anthropic_message_request: AnthropicMessagesRequest
) -> ChatCompletionRequest:
"""
This is used by the beta Anthropic Adapter, for translating anthropic `/v1/messages` requests to the openai format.
"""
new_messages: List[AllMessageValues] = []
## CONVERT ANTHROPIC MESSAGES TO OPENAI
new_messages = self.translate_anthropic_messages_to_openai(
messages=anthropic_message_request["messages"]
)
## ADD SYSTEM MESSAGE TO MESSAGES
if "system" in anthropic_message_request:
new_messages.insert(
0,
ChatCompletionSystemMessage(
role="system", content=anthropic_message_request["system"]
),
)
new_kwargs: ChatCompletionRequest = {
"model": anthropic_message_request["model"],
"messages": new_messages,
}
## CONVERT METADATA (user_id)
if "metadata" in anthropic_message_request:
if "user_id" in anthropic_message_request["metadata"]:
new_kwargs["user"] = anthropic_message_request["metadata"]["user_id"]
## CONVERT TOOL CHOICE
if "tool_choice" in anthropic_message_request:
new_kwargs["tool_choice"] = self.translate_anthropic_tool_choice_to_openai(
tool_choice=anthropic_message_request["tool_choice"]
)
## CONVERT TOOLS
if "tools" in anthropic_message_request:
new_kwargs["tools"] = self.translate_anthropic_tools_to_openai(
tools=anthropic_message_request["tools"]
)
translatable_params = self.translatable_anthropic_params()
for k, v in anthropic_message_request.items():
if k not in translatable_params: # pass remaining params as is
new_kwargs[k] = v # type: ignore
return new_kwargs
# makes headers for API call
def validate_environment(api_key, user_headers):

View file

@ -48,6 +48,7 @@ from litellm import ( # type: ignore
get_litellm_params,
get_optional_params,
)
from litellm.integrations.custom_logger import CustomLogger
from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj
from litellm.utils import (
CustomStreamWrapper,
@ -3943,6 +3944,33 @@ def text_completion(
return text_completion_response
###### Adapter Completion ################
def adapter_completion(*, adapter_id: str, **kwargs) -> Any:
translation_obj: Optional[CustomLogger] = None
for item in litellm.adapters:
if item["id"] == adapter_id:
translation_obj = item["adapter"]
if translation_obj is None:
raise ValueError(
"No matching adapter given. Received 'adapter_id'={}, litellm.adapters={}".format(
adapter_id, litellm.adapters
)
)
new_kwargs = translation_obj.translate_completion_input_params(kwargs=kwargs)
response: ModelResponse = completion(**new_kwargs) # type: ignore
translated_response = translation_obj.translate_completion_output_params(
response=response
)
return translated_response
##### Moderation #######################

View file

@ -2,17 +2,6 @@ model_list:
- model_name: "*"
litellm_params:
model: "openai/*"
- model_name: gemini-1.5-flash
litellm_params:
model: gemini/gemini-1.5-flash
- model_name: whisper
litellm_params:
model: azure/azure-whisper
api_version: 2024-02-15-preview
api_base: os.environ/AZURE_EUROPE_API_BASE
api_key: os.environ/AZURE_EUROPE_API_KEY
model_info:
mode: audio_transcription

File diff suppressed because it is too large Load diff

10
litellm/types/adapter.py Normal file
View file

@ -0,0 +1,10 @@
from typing import List
from typing_extensions import Dict, Required, TypedDict, override
from litellm.integrations.custom_logger import CustomLogger
class AdapterItem(TypedDict):
id: str
adapter: CustomLogger

View file

@ -9,25 +9,27 @@ class AnthropicMessagesToolChoice(TypedDict, total=False):
name: str
class AnthopicMessagesAssistantMessageTextContentParam(TypedDict, total=False):
type: Required[Literal["text"]]
class AnthropicMessagesTool(TypedDict, total=False):
name: Required[str]
description: str
input_schema: Required[dict]
class AnthropicMessagesTextParam(TypedDict):
type: Literal["text"]
text: str
class AnthopicMessagesAssistantMessageToolCallParam(TypedDict, total=False):
type: Required[Literal["tool_use"]]
class AnthropicMessagesToolUseParam(TypedDict):
type: Literal["tool_use"]
id: str
name: str
input: dict
AnthropicMessagesAssistantMessageValues = Union[
AnthopicMessagesAssistantMessageTextContentParam,
AnthopicMessagesAssistantMessageToolCallParam,
AnthropicMessagesTextParam,
AnthropicMessagesToolUseParam,
]
@ -46,6 +48,72 @@ class AnthopicMessagesAssistantMessageParam(TypedDict, total=False):
"""
class AnthropicImageParamSource(TypedDict):
type: Literal["base64"]
media_type: str
data: str
class AnthropicMessagesImageParam(TypedDict):
type: Literal["image"]
source: AnthropicImageParamSource
class AnthropicMessagesToolResultContent(TypedDict):
type: Literal["text"]
text: str
class AnthropicMessagesToolResultParam(TypedDict, total=False):
type: Required[Literal["tool_result"]]
tool_use_id: Required[str]
is_error: bool
content: Union[
str,
Iterable[
Union[AnthropicMessagesToolResultContent, AnthropicMessagesImageParam]
],
]
AnthropicMessagesUserMessageValues = Union[
AnthropicMessagesTextParam,
AnthropicMessagesImageParam,
AnthropicMessagesToolResultParam,
]
class AnthropicMessagesUserMessageParam(TypedDict, total=False):
role: Required[Literal["user"]]
content: Required[Union[str, Iterable[AnthropicMessagesUserMessageValues]]]
class AnthropicMetadata(TypedDict, total=False):
user_id: str
class AnthropicMessagesRequest(TypedDict, total=False):
model: Required[str]
messages: Required[
List[
Union[
AnthropicMessagesUserMessageParam,
AnthopicMessagesAssistantMessageParam,
]
]
]
max_tokens: Required[int]
metadata: AnthropicMetadata
stop_sequences: List[str]
stream: bool
system: str
temperature: float
tool_choice: AnthropicMessagesToolChoice
tools: List[AnthropicMessagesTool]
top_k: int
top_p: float
class ContentTextBlockDelta(TypedDict):
"""
'delta': {'type': 'text_delta', 'text': 'Hello'}

View file

@ -305,7 +305,13 @@ class ChatCompletionToolCallFunctionChunk(TypedDict, total=False):
arguments: str
class ChatCompletionToolCallChunk(TypedDict):
class ChatCompletionAssistantToolCall(TypedDict):
id: Optional[str]
type: Literal["function"]
function: ChatCompletionToolCallFunctionChunk
class ChatCompletionToolCallChunk(TypedDict): # result of /chat/completions call
id: Optional[str]
type: Literal["function"]
function: ChatCompletionToolCallFunctionChunk
@ -319,6 +325,107 @@ class ChatCompletionDeltaToolCallChunk(TypedDict, total=False):
index: int
class ChatCompletionTextObject(TypedDict):
type: Literal["text"]
text: str
class ChatCompletionImageUrlObject(TypedDict, total=False):
url: Required[str]
detail: str
class ChatCompletionImageObject(TypedDict):
type: Literal["image_url"]
image_url: ChatCompletionImageUrlObject
class ChatCompletionUserMessage(TypedDict):
role: Literal["user"]
content: Union[
str, Iterable[Union[ChatCompletionTextObject, ChatCompletionImageObject]]
]
class ChatCompletionAssistantMessage(TypedDict, total=False):
role: Required[Literal["assistant"]]
content: Optional[str]
name: str
tool_calls: List[ChatCompletionAssistantToolCall]
class ChatCompletionToolMessage(TypedDict):
role: Literal["tool"]
content: str
tool_call_id: str
class ChatCompletionSystemMessage(TypedDict, total=False):
role: Required[Literal["system"]]
content: Required[str]
name: str
AllMessageValues = Union[
ChatCompletionUserMessage,
ChatCompletionAssistantMessage,
ChatCompletionToolMessage,
ChatCompletionSystemMessage,
]
class ChatCompletionToolChoiceFunctionParam(TypedDict):
name: str
class ChatCompletionToolChoiceObjectParam(TypedDict):
type: Literal["function"]
function: ChatCompletionToolChoiceFunctionParam
ChatCompletionToolChoiceStringValues = Literal["none", "auto", "required"]
ChatCompletionToolChoiceValues = Union[
ChatCompletionToolChoiceStringValues, ChatCompletionToolChoiceObjectParam
]
class ChatCompletionToolParamFunctionChunk(TypedDict, total=False):
name: Required[str]
description: str
parameters: dict
class ChatCompletionToolParam(TypedDict):
type: Literal["function"]
function: ChatCompletionToolParamFunctionChunk
class ChatCompletionRequest(TypedDict, total=False):
model: Required[str]
messages: Required[List[AllMessageValues]]
frequency_penalty: float
logit_bias: dict
logprobs: bool
top_logprobs: int
max_tokens: int
n: int
presence_penalty: float
response_format: dict
seed: int
service_tier: str
stop: Union[str, List[str]]
stream_options: dict
temperature: float
top_p: float
tools: List[ChatCompletionToolParam]
tool_choice: ChatCompletionToolChoiceValues
parallel_tool_calls: bool
function_call: Union[str, dict]
functions: List
user: str
class ChatCompletionDeltaChunk(TypedDict, total=False):
content: Optional[str]
tool_calls: List[ChatCompletionDeltaToolCallChunk]