removed hard coding

This commit is contained in:
Sunny Wan 2025-03-07 18:36:28 -05:00
parent aa9c231526
commit eafb376bc2

View file

@ -9,6 +9,7 @@ Docs: https://docs.together.ai/reference/completions-1
from typing import Optional
from litellm import get_model_info, verbose_logger
from litellm.utils import supports_function_calling
from ..openai.chat.gpt_transformation import OpenAIGPTConfig
@ -20,25 +21,12 @@ class TogetherAIConfig(OpenAIGPTConfig):
Docs: https://docs.together.ai/docs/json-mode
"""
supports_function_calling: Optional[bool] = None
supported_models = [
"deepseek-ai/DeepSeek-V3",
"meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo",
"meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo",
"meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo",
"meta-llama/Llama-3.3-70B-Instruct-Turbo",
"mistralai/Mixtral-8x7B-Instruct-v0.1",
"mistralai/Mistral-7B-Instruct-v0.1",
"Qwen/Qwen2.5-7B-Instruct-Turbo",
"Qwen/Qwen2.5-72B-Instruct-Turbo"
]
function_calling: Optional[bool] = supports_function_calling(model, custom_llm_provider = "together_ai")
supports_function_calling = model in supported_models
optional_params = super().get_supported_openai_params(model)
if supports_function_calling is not True:
if function_calling is not True:
verbose_logger.debug(
"Only some together models support function calling/response_format. Docs - https://docs.together.ai/docs/function-calling"
)