fix get llm provider

This commit is contained in:
Krrish Dholakia 2023-09-12 11:48:42 -07:00
parent ff36b9c6da
commit 05f2f3dde7
3 changed files with 4 additions and 1 deletions

View file

@ -947,6 +947,9 @@ def get_llm_provider(model: str, custom_llm_provider: Optional[str] = None):
## openai - chatcompletion + text completion ## openai - chatcompletion + text completion
if model in litellm.open_ai_chat_completion_models or model in litellm.open_ai_text_completion_models: if model in litellm.open_ai_chat_completion_models or model in litellm.open_ai_text_completion_models:
custom_llm_provider = "openai" custom_llm_provider = "openai"
## anthropic
elif model in litellm.anthropic_models:
custom_llm_provider = "anthropic"
## cohere ## cohere
elif model in litellm.cohere_models: elif model in litellm.cohere_models:
custom_llm_provider = "cohere" custom_llm_provider = "cohere"

View file

@ -1,6 +1,6 @@
[tool.poetry] [tool.poetry]
name = "litellm" name = "litellm"
version = "0.1.603" version = "0.1.604"
description = "Library to easily interface with LLM API providers" description = "Library to easily interface with LLM API providers"
authors = ["BerriAI"] authors = ["BerriAI"]
license = "MIT License" license = "MIT License"