diff --git a/litellm/__pycache__/__init__.cpython-311.pyc b/litellm/__pycache__/__init__.cpython-311.pyc index 4dd38f2b1..50d941ff7 100644 Binary files a/litellm/__pycache__/__init__.cpython-311.pyc and b/litellm/__pycache__/__init__.cpython-311.pyc differ diff --git a/litellm/__pycache__/main.cpython-311.pyc b/litellm/__pycache__/main.cpython-311.pyc index d99268a6c..7e7b9313d 100644 Binary files a/litellm/__pycache__/main.cpython-311.pyc and b/litellm/__pycache__/main.cpython-311.pyc differ diff --git a/litellm/__pycache__/utils.cpython-311.pyc b/litellm/__pycache__/utils.cpython-311.pyc index 286b1dabf..6f76a3a82 100644 Binary files a/litellm/__pycache__/utils.cpython-311.pyc and b/litellm/__pycache__/utils.cpython-311.pyc differ diff --git a/litellm/main.py b/litellm/main.py index 7d39afab4..98033a66e 100644 --- a/litellm/main.py +++ b/litellm/main.py @@ -162,6 +162,7 @@ def completion( ): # allow custom provider to be passed in via the model name "azure/chatgpt-test" custom_llm_provider = model.split("/", 1)[0] model = model.split("/", 1)[1] + model, custom_llm_provider = get_llm_provider(model=model, custom_llm_provider=custom_llm_provider) # check if user passed in any of the OpenAI optional params optional_params = get_optional_params( functions=functions, @@ -199,7 +200,6 @@ def completion( completion_call_id=id ) logging.update_environment_variables(model=model, user=user, optional_params=optional_params, litellm_params=litellm_params) - model, custom_llm_provider = get_llm_provider(model=model, custom_llm_provider=custom_llm_provider) if custom_llm_provider == "azure": # azure configs api_type = get_secret("AZURE_API_TYPE") or "azure" @@ -281,7 +281,6 @@ def completion( litellm.openai_key or get_secret("OPENAI_API_KEY") ) - ## LOGGING logging.pre_call( input=messages, @@ -375,7 +374,7 @@ def completion( **optional_params ) if "stream" in optional_params and optional_params["stream"] == True: - response = CustomStreamWrapper(response, model, logging_obj=logging) + response = CustomStreamWrapper(response, model, custom_llm_provider="openai", logging_obj=logging) return response ## LOGGING logging.post_call( diff --git a/litellm/tests/__pycache__/test_bad_params.cpython-311-pytest-7.4.0.pyc b/litellm/tests/__pycache__/test_bad_params.cpython-311-pytest-7.4.0.pyc deleted file mode 100644 index 753da9901..000000000 Binary files a/litellm/tests/__pycache__/test_bad_params.cpython-311-pytest-7.4.0.pyc and /dev/null differ diff --git a/litellm/tests/__pycache__/test_client.cpython-311-pytest-7.4.0.pyc b/litellm/tests/__pycache__/test_client.cpython-311-pytest-7.4.0.pyc deleted file mode 100644 index 33522b54a..000000000 Binary files a/litellm/tests/__pycache__/test_client.cpython-311-pytest-7.4.0.pyc and /dev/null differ diff --git a/litellm/tests/__pycache__/test_completion.cpython-311-pytest-7.4.0.pyc b/litellm/tests/__pycache__/test_completion.cpython-311-pytest-7.4.0.pyc deleted file mode 100644 index 0c5fa166d..000000000 Binary files a/litellm/tests/__pycache__/test_completion.cpython-311-pytest-7.4.0.pyc and /dev/null differ diff --git a/litellm/tests/__pycache__/test_exceptions.cpython-311-pytest-7.4.0.pyc b/litellm/tests/__pycache__/test_exceptions.cpython-311-pytest-7.4.0.pyc deleted file mode 100644 index 62f9422f8..000000000 Binary files a/litellm/tests/__pycache__/test_exceptions.cpython-311-pytest-7.4.0.pyc and /dev/null differ diff --git a/litellm/tests/__pycache__/test_logging.cpython-311-pytest-7.4.0.pyc b/litellm/tests/__pycache__/test_logging.cpython-311-pytest-7.4.0.pyc deleted file mode 100644 index 9f71ef3a1..000000000 Binary files a/litellm/tests/__pycache__/test_logging.cpython-311-pytest-7.4.0.pyc and /dev/null differ diff --git a/litellm/tests/__pycache__/test_model_fallback.cpython-311-pytest-7.4.0.pyc b/litellm/tests/__pycache__/test_model_fallback.cpython-311-pytest-7.4.0.pyc deleted file mode 100644 index 864247d09..000000000 Binary files a/litellm/tests/__pycache__/test_model_fallback.cpython-311-pytest-7.4.0.pyc and /dev/null differ diff --git a/litellm/tests/__pycache__/test_timeout.cpython-311-pytest-7.4.0.pyc b/litellm/tests/__pycache__/test_timeout.cpython-311-pytest-7.4.0.pyc deleted file mode 100644 index f291f8a8d..000000000 Binary files a/litellm/tests/__pycache__/test_timeout.cpython-311-pytest-7.4.0.pyc and /dev/null differ diff --git a/litellm/tests/test_streaming.py b/litellm/tests/test_streaming.py index d29184e29..5e35fcb9d 100644 --- a/litellm/tests/test_streaming.py +++ b/litellm/tests/test_streaming.py @@ -698,4 +698,4 @@ def test_openai_streaming_and_function_calling(): pytest.fail(f"Error occurred: {e}") raise e -test_openai_streaming_and_function_calling() +# test_openai_streaming_and_function_calling() diff --git a/litellm/utils.py b/litellm/utils.py index 5865557da..b81f88a09 100644 --- a/litellm/utils.py +++ b/litellm/utils.py @@ -889,7 +889,7 @@ def get_optional_params( # use the openai defaults optional_params["return_full_text"] = return_full_text optional_params["details"] = True optional_params["task"] = task - elif custom_llm_provider == "together_ai" or ("togethercomputer" in model): + elif custom_llm_provider == "together_ai": if stream: optional_params["stream_tokens"] = stream if temperature != 1: @@ -2520,8 +2520,7 @@ class CustomStreamWrapper: chunk = next(self.completion_stream) completion_obj["content"] = chunk elif ( - self.custom_llm_provider and self.custom_llm_provider == "together_ai" - ) or ("togethercomputer" in self.model): + self.custom_llm_provider and self.custom_llm_provider == "together_ai"): chunk = next(self.completion_stream) text_data = self.handle_together_ai_chunk(chunk) if text_data == "": diff --git a/proxy-server/.DS_Store b/proxy-server/.DS_Store index 739982f14..7a42e831c 100644 Binary files a/proxy-server/.DS_Store and b/proxy-server/.DS_Store differ diff --git a/pyproject.toml b/pyproject.toml index e43e2416d..dcce77dae 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "litellm" -version = "0.1.681" +version = "0.1.682" description = "Library to easily interface with LLM API providers" authors = ["BerriAI"] license = "MIT License"