mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
raise better exception if llm provider isn't passed in or inferred
This commit is contained in:
parent
4acca3d4d9
commit
baa69734b0
8 changed files with 63 additions and 1 deletions
|
@ -17,6 +17,7 @@ from litellm.utils import (
|
|||
CustomStreamWrapper,
|
||||
read_config_args,
|
||||
completion_with_fallbacks,
|
||||
get_llm_provider
|
||||
)
|
||||
from .llms import anthropic
|
||||
from .llms import together_ai
|
||||
|
@ -168,6 +169,7 @@ def completion(
|
|||
completion_call_id=id
|
||||
)
|
||||
logging.update_environment_variables(model=model, user=user, optional_params=optional_params, litellm_params=litellm_params)
|
||||
get_llm_provider(model=model, custom_llm_provider=custom_llm_provider)
|
||||
if custom_llm_provider == "azure":
|
||||
# azure configs
|
||||
openai.api_type = get_secret("AZURE_API_TYPE") or "azure"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue