diff --git a/litellm/__pycache__/__init__.cpython-311.pyc b/litellm/__pycache__/__init__.cpython-311.pyc index 991b3d6358..480251bd56 100644 Binary files a/litellm/__pycache__/__init__.cpython-311.pyc and b/litellm/__pycache__/__init__.cpython-311.pyc differ diff --git a/litellm/__pycache__/main.cpython-311.pyc b/litellm/__pycache__/main.cpython-311.pyc index 7c1a53cf54..da220e0b9b 100644 Binary files a/litellm/__pycache__/main.cpython-311.pyc and b/litellm/__pycache__/main.cpython-311.pyc differ diff --git a/litellm/__pycache__/timeout.cpython-311.pyc b/litellm/__pycache__/timeout.cpython-311.pyc index 09f9769939..68f0223aaa 100644 Binary files a/litellm/__pycache__/timeout.cpython-311.pyc and b/litellm/__pycache__/timeout.cpython-311.pyc differ diff --git a/litellm/__pycache__/utils.cpython-311.pyc b/litellm/__pycache__/utils.cpython-311.pyc index b872895f98..ba16f47ba9 100644 Binary files a/litellm/__pycache__/utils.cpython-311.pyc and b/litellm/__pycache__/utils.cpython-311.pyc differ diff --git a/litellm/integrations/__pycache__/__init__.cpython-311.pyc b/litellm/integrations/__pycache__/__init__.cpython-311.pyc index e951c50a33..5bd4bfeb85 100644 Binary files a/litellm/integrations/__pycache__/__init__.cpython-311.pyc and b/litellm/integrations/__pycache__/__init__.cpython-311.pyc differ diff --git a/litellm/integrations/__pycache__/aispend.cpython-311.pyc b/litellm/integrations/__pycache__/aispend.cpython-311.pyc index 9e2d468cb3..111b4eba1b 100644 Binary files a/litellm/integrations/__pycache__/aispend.cpython-311.pyc and b/litellm/integrations/__pycache__/aispend.cpython-311.pyc differ diff --git a/litellm/integrations/__pycache__/berrispend.cpython-311.pyc b/litellm/integrations/__pycache__/berrispend.cpython-311.pyc index 87b3f5e36e..ccb4bb900f 100644 Binary files a/litellm/integrations/__pycache__/berrispend.cpython-311.pyc and b/litellm/integrations/__pycache__/berrispend.cpython-311.pyc differ diff --git a/litellm/integrations/__pycache__/helicone.cpython-311.pyc b/litellm/integrations/__pycache__/helicone.cpython-311.pyc index 03de753b4e..972c339ed3 100644 Binary files a/litellm/integrations/__pycache__/helicone.cpython-311.pyc and b/litellm/integrations/__pycache__/helicone.cpython-311.pyc differ diff --git a/litellm/integrations/__pycache__/supabase.cpython-311.pyc b/litellm/integrations/__pycache__/supabase.cpython-311.pyc index c3f60037e5..3a77f3a039 100644 Binary files a/litellm/integrations/__pycache__/supabase.cpython-311.pyc and b/litellm/integrations/__pycache__/supabase.cpython-311.pyc differ diff --git a/litellm/main.py b/litellm/main.py index a8f7fbd5e0..d41f0e91fa 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -94,6 +94,9 @@ def completion( model_response = ModelResponse() if azure: # this flag is deprecated, remove once notebooks are also updated. custom_llm_provider = "azure" + elif model.split("/", 1)[0] in litellm.provider_list: # allow custom provider to be passed in via the model name "azure/chatgpt-test" + custom_llm_provider = model.split("/", 1)[0] + model = model.split("/", 1)[1] args = locals() # check if user passed in any of the OpenAI optional params optional_params = get_optional_params( diff --git a/litellm/tests/test_completion.py b/litellm/tests/test_completion.py index e168c23244..fc99544593 100644 --- a/litellm/tests/test_completion.py +++ b/litellm/tests/test_completion.py @@ -25,6 +25,18 @@ def logger_fn(user_model_dict): print(f"user_model_dict: {user_model_dict}") +def test_completion_custom_provider_model_name(): + try: + response = completion( + model="together_ai/togethercomputer/llama-2-70b-chat", messages=messages, logger_fn=logger_fn + ) + # Add any assertions here to check the response + print(response) + except Exception as e: + pytest.fail(f"Error occurred: {e}") + +test_completion_custom_provider_model_name() + def test_completion_claude(): try: response = completion(