except custom openai proxy

This commit is contained in:
Krrish Dholakia 2023-09-16 16:15:44 -07:00
parent 15bc5f2bdc
commit 122c993e6f
15 changed files with 6 additions and 8 deletions

View file

@ -162,6 +162,7 @@ def completion(
): # allow custom provider to be passed in via the model name "azure/chatgpt-test"
custom_llm_provider = model.split("/", 1)[0]
model = model.split("/", 1)[1]
model, custom_llm_provider = get_llm_provider(model=model, custom_llm_provider=custom_llm_provider)
# check if user passed in any of the OpenAI optional params
optional_params = get_optional_params(
functions=functions,
@ -199,7 +200,6 @@ def completion(
completion_call_id=id
)
logging.update_environment_variables(model=model, user=user, optional_params=optional_params, litellm_params=litellm_params)
model, custom_llm_provider = get_llm_provider(model=model, custom_llm_provider=custom_llm_provider)
if custom_llm_provider == "azure":
# azure configs
api_type = get_secret("AZURE_API_TYPE") or "azure"
@ -281,7 +281,6 @@ def completion(
litellm.openai_key or
get_secret("OPENAI_API_KEY")
)
## LOGGING
logging.pre_call(
input=messages,
@ -375,7 +374,7 @@ def completion(
**optional_params
)
if "stream" in optional_params and optional_params["stream"] == True:
response = CustomStreamWrapper(response, model, logging_obj=logging)
response = CustomStreamWrapper(response, model, custom_llm_provider="openai", logging_obj=logging)
return response
## LOGGING
logging.post_call(

View file

@ -698,4 +698,4 @@ def test_openai_streaming_and_function_calling():
pytest.fail(f"Error occurred: {e}")
raise e
test_openai_streaming_and_function_calling()
# test_openai_streaming_and_function_calling()

View file

@ -889,7 +889,7 @@ def get_optional_params( # use the openai defaults
optional_params["return_full_text"] = return_full_text
optional_params["details"] = True
optional_params["task"] = task
elif custom_llm_provider == "together_ai" or ("togethercomputer" in model):
elif custom_llm_provider == "together_ai":
if stream:
optional_params["stream_tokens"] = stream
if temperature != 1:
@ -2520,8 +2520,7 @@ class CustomStreamWrapper:
chunk = next(self.completion_stream)
completion_obj["content"] = chunk
elif (
self.custom_llm_provider and self.custom_llm_provider == "together_ai"
) or ("togethercomputer" in self.model):
self.custom_llm_provider and self.custom_llm_provider == "together_ai"):
chunk = next(self.completion_stream)
text_data = self.handle_together_ai_chunk(chunk)
if text_data == "":

BIN
proxy-server/.DS_Store vendored

Binary file not shown.

View file

@ -1,6 +1,6 @@
[tool.poetry]
name = "litellm"
version = "0.1.681"
version = "0.1.682"
description = "Library to easily interface with LLM API providers"
authors = ["BerriAI"]
license = "MIT License"