Merge pull request #5439 from BerriAI/litellm_use_correct_ai21_pricing

[Fix] Use correct Vertex AI AI21 Cost tracking
This commit is contained in:
Ishaan Jaff 2024-08-30 10:49:42 -07:00 committed by GitHub
commit a842723bb0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 11 additions and 1 deletions

View file

@ -358,6 +358,7 @@ vertex_code_text_models: List = []
vertex_embedding_models: List = [] vertex_embedding_models: List = []
vertex_anthropic_models: List = [] vertex_anthropic_models: List = []
vertex_llama3_models: List = [] vertex_llama3_models: List = []
vertex_ai_ai21_models: List = []
vertex_mistral_models: List = [] vertex_mistral_models: List = []
ai21_models: List = [] ai21_models: List = []
nlp_cloud_models: List = [] nlp_cloud_models: List = []
@ -408,6 +409,9 @@ for key, value in model_cost.items():
elif value.get("litellm_provider") == "vertex_ai-mistral_models": elif value.get("litellm_provider") == "vertex_ai-mistral_models":
key = key.replace("vertex_ai/", "") key = key.replace("vertex_ai/", "")
vertex_mistral_models.append(key) vertex_mistral_models.append(key)
elif value.get("litellm_provider") == "vertex_ai-ai21_models":
key = key.replace("vertex_ai/", "")
vertex_ai_ai21_models.append(key)
elif value.get("litellm_provider") == "ai21": elif value.get("litellm_provider") == "ai21":
ai21_models.append(key) ai21_models.append(key)
elif value.get("litellm_provider") == "nlp_cloud": elif value.get("litellm_provider") == "nlp_cloud":

View file

@ -2610,3 +2610,7 @@ async def test_partner_models_httpx_ai21():
assert response.usage.total_tokens == 194 assert response.usage.total_tokens == 194
print(f"response: {response}") print(f"response: {response}")
print("hidden params from response=", response._hidden_params)
assert response._hidden_params["response_cost"] > 0

View file

@ -3267,7 +3267,7 @@ def get_optional_params(
non_default_params=non_default_params, non_default_params=non_default_params,
optional_params=optional_params, optional_params=optional_params,
) )
elif custom_llm_provider == "vertex_ai" and model in litellm.ai21_models: elif custom_llm_provider == "vertex_ai" and model in litellm.vertex_ai_ai21_models:
supported_params = get_supported_openai_params( supported_params = get_supported_openai_params(
model=model, custom_llm_provider=custom_llm_provider model=model, custom_llm_provider=custom_llm_provider
) )
@ -5182,6 +5182,8 @@ def get_model_info(model: str, custom_llm_provider: Optional[str] = None) -> Mod
model = "meta/" + model model = "meta/" + model
elif model + "@latest" in litellm.vertex_mistral_models: elif model + "@latest" in litellm.vertex_mistral_models:
model = model + "@latest" model = model + "@latest"
elif model + "@latest" in litellm.vertex_ai_ai21_models:
model = model + "@latest"
########################## ##########################
if custom_llm_provider is None: if custom_llm_provider is None:
# Get custom_llm_provider # Get custom_llm_provider