fix(utils.py): support get_secret("TOGETHER_AI_TOKEN")

This commit is contained in:
Krrish Dholakia 2024-02-03 19:35:09 -08:00
parent c49c88c8e5
commit efb6123d28
2 changed files with 3 additions and 2 deletions

View file

@ -37,7 +37,7 @@ def test_completion_custom_provider_model_name():
try:
litellm.cache = None
response = completion(
model="together_ai/mistralai/Mistral-7B-Instruct-v0.1",
model="together_ai/mistralai/Mixtral-8x7B-Instruct-v0.1",
messages=messages,
logger_fn=logger_fn,
)
@ -1369,7 +1369,7 @@ def test_customprompt_together_ai():
print(litellm.success_callback)
print(litellm._async_success_callback)
response = completion(
model="together_ai/mistralai/Mistral-7B-Instruct-v0.1",
model="together_ai/mistralai/Mixtral-8x7B-Instruct-v0.1",
messages=messages,
roles={
"system": {

View file

@ -4684,6 +4684,7 @@ def get_llm_provider(
get_secret("TOGETHER_API_KEY")
or get_secret("TOGETHER_AI_API_KEY")
or get_secret("TOGETHERAI_API_KEY")
or get_secret("TOGETHER_AI_TOKEN")
)
return model, custom_llm_provider, dynamic_api_key, api_base
elif model.split("/", 1)[0] in litellm.provider_list: