mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
Merge pull request #5439 from BerriAI/litellm_use_correct_ai21_pricing
[Fix] Use correct Vertex AI AI21 Cost tracking
This commit is contained in:
commit
a842723bb0
3 changed files with 11 additions and 1 deletions
|
@ -358,6 +358,7 @@ vertex_code_text_models: List = []
|
|||
vertex_embedding_models: List = []
|
||||
vertex_anthropic_models: List = []
|
||||
vertex_llama3_models: List = []
|
||||
vertex_ai_ai21_models: List = []
|
||||
vertex_mistral_models: List = []
|
||||
ai21_models: List = []
|
||||
nlp_cloud_models: List = []
|
||||
|
@ -408,6 +409,9 @@ for key, value in model_cost.items():
|
|||
elif value.get("litellm_provider") == "vertex_ai-mistral_models":
|
||||
key = key.replace("vertex_ai/", "")
|
||||
vertex_mistral_models.append(key)
|
||||
elif value.get("litellm_provider") == "vertex_ai-ai21_models":
|
||||
key = key.replace("vertex_ai/", "")
|
||||
vertex_ai_ai21_models.append(key)
|
||||
elif value.get("litellm_provider") == "ai21":
|
||||
ai21_models.append(key)
|
||||
elif value.get("litellm_provider") == "nlp_cloud":
|
||||
|
|
|
@ -2610,3 +2610,7 @@ async def test_partner_models_httpx_ai21():
|
|||
assert response.usage.total_tokens == 194
|
||||
|
||||
print(f"response: {response}")
|
||||
|
||||
print("hidden params from response=", response._hidden_params)
|
||||
|
||||
assert response._hidden_params["response_cost"] > 0
|
||||
|
|
|
@ -3267,7 +3267,7 @@ def get_optional_params(
|
|||
non_default_params=non_default_params,
|
||||
optional_params=optional_params,
|
||||
)
|
||||
elif custom_llm_provider == "vertex_ai" and model in litellm.ai21_models:
|
||||
elif custom_llm_provider == "vertex_ai" and model in litellm.vertex_ai_ai21_models:
|
||||
supported_params = get_supported_openai_params(
|
||||
model=model, custom_llm_provider=custom_llm_provider
|
||||
)
|
||||
|
@ -5182,6 +5182,8 @@ def get_model_info(model: str, custom_llm_provider: Optional[str] = None) -> Mod
|
|||
model = "meta/" + model
|
||||
elif model + "@latest" in litellm.vertex_mistral_models:
|
||||
model = model + "@latest"
|
||||
elif model + "@latest" in litellm.vertex_ai_ai21_models:
|
||||
model = model + "@latest"
|
||||
##########################
|
||||
if custom_llm_provider is None:
|
||||
# Get custom_llm_provider
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue