fix(utils.py): support fireworks ai finetuned models

Fixes https://github.com/BerriAI/litellm/issues/4923
This commit is contained in:
Krrish Dholakia 2024-07-27 15:38:27 -07:00
parent 05ba34b9b7
commit d1989b6063
2 changed files with 10 additions and 1 deletions

View file

@ -25,6 +25,15 @@ def test_get_llm_provider():
# test_get_llm_provider() # test_get_llm_provider()
def test_get_llm_provider_fireworks(): # tests finetuned fireworks models - https://github.com/BerriAI/litellm/issues/4923
model, custom_llm_provider, _, _ = litellm.get_llm_provider(
model="fireworks_ai/accounts/my-test-1234"
)
assert custom_llm_provider == "fireworks_ai"
assert model == "accounts/my-test-1234"
def test_get_llm_provider_catch_all(): def test_get_llm_provider_catch_all():
_, response, _, _ = litellm.get_llm_provider(model="*") _, response, _, _ = litellm.get_llm_provider(model="*")
assert response == "openai" assert response == "openai"

View file

@ -4463,7 +4463,7 @@ def get_llm_provider(
dynamic_api_key = api_key or get_secret("DEEPSEEK_API_KEY") dynamic_api_key = api_key or get_secret("DEEPSEEK_API_KEY")
elif custom_llm_provider == "fireworks_ai": elif custom_llm_provider == "fireworks_ai":
# fireworks is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.fireworks.ai/inference/v1 # fireworks is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.fireworks.ai/inference/v1
if not model.startswith("accounts/fireworks/models"): if not model.startswith("accounts/"):
model = f"accounts/fireworks/models/{model}" model = f"accounts/fireworks/models/{model}"
api_base = api_base or "https://api.fireworks.ai/inference/v1" api_base = api_base or "https://api.fireworks.ai/inference/v1"
dynamic_api_key = api_key or ( dynamic_api_key = api_key or (