(Feat) add bedrock/deepseek custom import models (#8132)
All checks were successful
Read Version from pyproject.toml / read-version (push) Successful in 16s

* add support for using llama spec with bedrock

* fix get_bedrock_invoke_provider

* add support for using bedrock provider in mappings

* working request

* test_bedrock_custom_deepseek

* test_bedrock_custom_deepseek

* fix _get_model_id_for_llama_like_model

* test_bedrock_custom_deepseek

* doc DeepSeek-R1-Distill-Llama-70B

* test_bedrock_custom_deepseek
This commit is contained in:
Ishaan Jaff 2025-01-31 18:40:44 -08:00 committed by GitHub
parent 29a8a613a7
commit 9ff27809b2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 212 additions and 17 deletions

View file

@ -6045,20 +6045,23 @@ class ProviderConfigManager:
return litellm.PetalsConfig()
elif litellm.LlmProviders.BEDROCK == provider:
base_model = litellm.AmazonConverseConfig()._get_base_model(model)
bedrock_provider = litellm.BedrockLLM.get_bedrock_invoke_provider(model)
if (
base_model in litellm.bedrock_converse_models
or "converse_like" in model
):
return litellm.AmazonConverseConfig()
elif "amazon" in model: # amazon titan llms
elif bedrock_provider == "amazon": # amazon titan llms
return litellm.AmazonTitanConfig()
elif "meta" in model: # amazon / meta llms
elif (
bedrock_provider == "meta" or bedrock_provider == "llama"
): # amazon / meta llms
return litellm.AmazonLlamaConfig()
elif "ai21" in model: # ai21 llms
elif bedrock_provider == "ai21": # ai21 llms
return litellm.AmazonAI21Config()
elif "cohere" in model: # cohere models on bedrock
elif bedrock_provider == "cohere": # cohere models on bedrock
return litellm.AmazonCohereConfig()
elif "mistral" in model: # mistral models on bedrock
elif bedrock_provider == "mistral": # mistral models on bedrock
return litellm.AmazonMistralConfig()
return litellm.OpenAIGPTConfig()