Merge pull request #2473 from BerriAI/litellm_fix_compatible_provider_model_name

fix(openai.py): return model name with custom llm provider for openai-compatible endpoints (e.g. mistral, together ai, etc.)
This commit is contained in:
Krish Dholakia 2024-03-12 12:58:29 -07:00 committed by GitHub
commit 0d18f3c0ca
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 5 additions and 1 deletions

View file

@ -239,6 +239,7 @@ class OpenAIChatCompletion(BaseLLM):
)
if custom_llm_provider != "openai":
model_response.model = f"{custom_llm_provider}/{model}"
# process all OpenAI compatible provider logic here
if custom_llm_provider == "mistral":
# check if message content passed in as list, and not string
@ -254,6 +255,7 @@ class OpenAIChatCompletion(BaseLLM):
messages=messages,
custom_llm_provider=custom_llm_provider,
)
for _ in range(
2
): # if call fails due to alternating messages, retry with reformatted message