diff --git a/litellm/__pycache__/main.cpython-311.pyc b/litellm/__pycache__/main.cpython-311.pyc index 5e5c135a9f..59e843454f 100644 Binary files a/litellm/__pycache__/main.cpython-311.pyc and b/litellm/__pycache__/main.cpython-311.pyc differ diff --git a/litellm/main.py b/litellm/main.py index a11c5826ca..2c23f02406 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -239,6 +239,7 @@ def completion( model ] # update the model to the actual value if an alias has been passed in model_response = ModelResponse() + if deployment_id != None: # azure llms model=deployment_id custom_llm_provider="azure" @@ -706,11 +707,7 @@ def completion( original_response=response, additional_args={"headers": litellm.headers}, ) - elif ( - ( - model in litellm.huggingface_models and - custom_llm_provider!="custom" # if users use a hf model, with a custom/provider. See implementation of custom_llm_provider == custom - ) or + elif ( custom_llm_provider == "huggingface" ): custom_llm_provider = "huggingface" diff --git a/pyproject.toml b/pyproject.toml index ad35986de9..bc40410027 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "litellm" -version = "0.1.794" +version = "0.1.795" description = "Library to easily interface with LLM API providers" authors = ["BerriAI"] license = "MIT License"