fix(utils.py): route together ai calls to openai client

together ai is now openai-compatible

n
This commit is contained in:
Krrish Dholakia 2024-02-03 19:22:48 -08:00
parent c2f674ebe0
commit c49c88c8e5
6 changed files with 23 additions and 4 deletions

View file

@ -285,6 +285,7 @@ openai_compatible_endpoints: List = [
"api.endpoints.anyscale.com/v1",
"api.deepinfra.com/v1/openai",
"api.mistral.ai/v1",
"api.together.xyz/v1",
]
# this is maintained for Exception Mapping
@ -294,6 +295,7 @@ openai_compatible_providers: List = [
"deepinfra",
"perplexity",
"xinference",
"together_ai",
]

View file

@ -440,8 +440,8 @@ class OpenAIChatCompletion(BaseLLM):
input=data["messages"],
api_key=api_key,
additional_args={
"headers": headers,
"api_base": api_base,
"headers": {"Authorization": f"Bearer {openai_client.api_key}"},
"api_base": openai_client._base_url._uri_reference,
"acompletion": False,
"complete_input_dict": data,
},

View file

@ -1,3 +1,7 @@
"""
Deprecated. We now do together ai calls via the openai client.
Reference: https://docs.together.ai/docs/openai-api-compatibility
"""
import os, types
import json
from enum import Enum

View file

@ -791,6 +791,7 @@ def completion(
or custom_llm_provider == "anyscale"
or custom_llm_provider == "mistral"
or custom_llm_provider == "openai"
or custom_llm_provider == "together_ai"
or "ft:gpt-3.5-turbo" in model # finetune gpt-3.5-turbo
): # allow user to make an openai call with a custom base
# note: if a user sets a custom base - we should ensure this works
@ -1330,6 +1331,9 @@ def completion(
or ("togethercomputer" in model)
or (model in litellm.together_ai_models)
):
"""
Deprecated. We now do together ai calls via the openai client - https://docs.together.ai/docs/openai-api-compatibility
"""
custom_llm_provider = "together_ai"
together_ai_key = (
api_key

View file

@ -1994,11 +1994,12 @@ def test_completion_palm_stream():
def test_completion_together_ai_stream():
litellm.set_verbose = True
user_message = "Write 1pg about YC & litellm"
messages = [{"content": user_message, "role": "user"}]
try:
response = completion(
model="together_ai/mistralai/Mistral-7B-Instruct-v0.1",
model="together_ai/mistralai/Mixtral-8x7B-Instruct-v0.1",
messages=messages,
stream=True,
max_tokens=5,

View file

@ -863,6 +863,7 @@ class Logging:
curl_command += additional_args.get("request_str", None)
elif api_base == "":
curl_command = self.model_call_details
print_verbose(f"\033[92m{curl_command}\033[0m\n")
verbose_logger.info(f"\033[92m{curl_command}\033[0m\n")
if self.logger_fn and callable(self.logger_fn):
try:
@ -4043,7 +4044,7 @@ def get_optional_params(
_check_valid_arg(supported_params=supported_params)
if stream:
optional_params["stream_tokens"] = stream
optional_params["stream"] = stream
if temperature is not None:
optional_params["temperature"] = temperature
if top_p is not None:
@ -4677,6 +4678,13 @@ def get_llm_provider(
# voyage is openai compatible, we just need to set this to custom_openai and have the api_base be https://api.voyageai.com/v1
api_base = "https://api.voyageai.com/v1"
dynamic_api_key = get_secret("VOYAGE_API_KEY")
elif custom_llm_provider == "together_ai":
api_base = "https://api.together.xyz/v1"
dynamic_api_key = (
get_secret("TOGETHER_API_KEY")
or get_secret("TOGETHER_AI_API_KEY")
or get_secret("TOGETHERAI_API_KEY")
)
return model, custom_llm_provider, dynamic_api_key, api_base
elif model.split("/", 1)[0] in litellm.provider_list:
custom_llm_provider = model.split("/", 1)[0]