forked from phoenix/litellm-mirror
Merge pull request #3694 from BerriAI/litellm_allow_setting_anthropic_beta
[Feat] Support Anthropic `tools-2024-05-16` - Set Custom Anthropic Custom Headers
This commit is contained in:
commit
97324800ec
4 changed files with 35 additions and 3 deletions
|
@ -223,6 +223,34 @@ assert isinstance(
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Setting `anthropic-beta` Header in Requests
|
||||||
|
|
||||||
|
Pass the the `extra_headers` param to litellm, All headers will be forwarded to Anthropic API
|
||||||
|
|
||||||
|
```python
|
||||||
|
response = completion(
|
||||||
|
model="anthropic/claude-3-opus-20240229",
|
||||||
|
messages=messages,
|
||||||
|
tools=tools,
|
||||||
|
extra_headers={"anthropic-beta": "tools-2024-05-16"},
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
### Forcing Anthropic Tool Use
|
||||||
|
|
||||||
|
If you want Claude to use a specific tool to answer the user’s question
|
||||||
|
|
||||||
|
You can do this by specifying the tool in the `tool_choice` field like so:
|
||||||
|
```python
|
||||||
|
response = completion(
|
||||||
|
model="anthropic/claude-3-opus-20240229",
|
||||||
|
messages=messages,
|
||||||
|
tools=tools,
|
||||||
|
tool_choice={"type": "tool", "name": "get_weather"},
|
||||||
|
extra_headers={"anthropic-beta": "tools-2024-05-16"},
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
### Parallel Function Calling
|
### Parallel Function Calling
|
||||||
|
|
||||||
|
|
|
@ -93,6 +93,7 @@ class AnthropicConfig:
|
||||||
"max_tokens",
|
"max_tokens",
|
||||||
"tools",
|
"tools",
|
||||||
"tool_choice",
|
"tool_choice",
|
||||||
|
"extra_headers",
|
||||||
]
|
]
|
||||||
|
|
||||||
def map_openai_params(self, non_default_params: dict, optional_params: dict):
|
def map_openai_params(self, non_default_params: dict, optional_params: dict):
|
||||||
|
@ -504,7 +505,9 @@ class AnthropicChatCompletion(BaseLLM):
|
||||||
## Handle Tool Calling
|
## Handle Tool Calling
|
||||||
if "tools" in optional_params:
|
if "tools" in optional_params:
|
||||||
_is_function_call = True
|
_is_function_call = True
|
||||||
headers["anthropic-beta"] = "tools-2024-04-04"
|
if "anthropic-beta" not in headers:
|
||||||
|
# default to v1 of "anthropic-beta"
|
||||||
|
headers["anthropic-beta"] = "tools-2024-04-04"
|
||||||
|
|
||||||
anthropic_tools = []
|
anthropic_tools = []
|
||||||
for tool in optional_params["tools"]:
|
for tool in optional_params["tools"]:
|
||||||
|
|
|
@ -552,7 +552,7 @@ def completion(
|
||||||
model_info = kwargs.get("model_info", None)
|
model_info = kwargs.get("model_info", None)
|
||||||
proxy_server_request = kwargs.get("proxy_server_request", None)
|
proxy_server_request = kwargs.get("proxy_server_request", None)
|
||||||
fallbacks = kwargs.get("fallbacks", None)
|
fallbacks = kwargs.get("fallbacks", None)
|
||||||
headers = kwargs.get("headers", None)
|
headers = kwargs.get("headers", None) or extra_headers
|
||||||
num_retries = kwargs.get("num_retries", None) ## deprecated
|
num_retries = kwargs.get("num_retries", None) ## deprecated
|
||||||
max_retries = kwargs.get("max_retries", None)
|
max_retries = kwargs.get("max_retries", None)
|
||||||
context_window_fallback_dict = kwargs.get("context_window_fallback_dict", None)
|
context_window_fallback_dict = kwargs.get("context_window_fallback_dict", None)
|
||||||
|
|
|
@ -278,7 +278,8 @@ def test_completion_claude_3_function_call():
|
||||||
model="anthropic/claude-3-opus-20240229",
|
model="anthropic/claude-3-opus-20240229",
|
||||||
messages=messages,
|
messages=messages,
|
||||||
tools=tools,
|
tools=tools,
|
||||||
tool_choice="auto",
|
tool_choice={"type": "tool", "name": "get_weather"},
|
||||||
|
extra_headers={"anthropic-beta": "tools-2024-05-16"},
|
||||||
)
|
)
|
||||||
# Add any assertions, here to check response args
|
# Add any assertions, here to check response args
|
||||||
print(response)
|
print(response)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue