mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-24 18:24:20 +00:00
fix(converse_transformation.py): support all bedrock - openai params for arn models
Fixes https://github.com/BerriAI/litellm/issues/10207
This commit is contained in:
parent
1014529ed6
commit
da72ac4477
2 changed files with 35 additions and 0 deletions
|
@ -107,6 +107,15 @@ class AmazonConverseConfig(BaseConfig):
|
|||
"response_format",
|
||||
]
|
||||
|
||||
if (
|
||||
"arn" in model
|
||||
): # we can't infer the model from the arn, so just add all params
|
||||
supported_params.append("tools")
|
||||
supported_params.append("tool_choice")
|
||||
supported_params.append("thinking")
|
||||
supported_params.append("reasoning_effort")
|
||||
return supported_params
|
||||
|
||||
## Filter out 'cross-region' from model name
|
||||
base_model = BedrockModelInfo.get_base_model(model)
|
||||
|
||||
|
|
|
@ -2972,6 +2972,30 @@ def test_bedrock_application_inference_profile():
|
|||
client = HTTPHandler()
|
||||
client2 = HTTPHandler()
|
||||
|
||||
tools = [{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "get_current_weather",
|
||||
"description": "Get the current weather in a given location",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"location": {
|
||||
"type": "string",
|
||||
"description": "The city and state, e.g. San Francisco, CA",
|
||||
},
|
||||
"unit": {
|
||||
"type": "string",
|
||||
"enum": ["celsius", "fahrenheit"],
|
||||
},
|
||||
},
|
||||
"required": ["location"],
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
with patch.object(client, "post") as mock_post, patch.object(
|
||||
client2, "post"
|
||||
) as mock_post2:
|
||||
|
@ -2981,6 +3005,7 @@ def test_bedrock_application_inference_profile():
|
|||
messages=[{"role": "user", "content": "Hello, how are you?"}],
|
||||
model_id="arn:aws:bedrock:eu-central-1:000000000000:application-inference-profile/a0a0a0a0a0a0",
|
||||
client=client,
|
||||
tools=tools
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
@ -2990,6 +3015,7 @@ def test_bedrock_application_inference_profile():
|
|||
model="bedrock/converse/arn:aws:bedrock:eu-central-1:000000000000:application-inference-profile/a0a0a0a0a0a0",
|
||||
messages=[{"role": "user", "content": "Hello, how are you?"}],
|
||||
client=client2,
|
||||
tools=tools
|
||||
)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue