add BaseResponsesAPIConfig

This commit is contained in:
Ishaan Jaff 2025-03-11 15:57:53 -07:00
parent 8dfd1dc136
commit f5e452556b
2 changed files with 25 additions and 26 deletions

View file

@ -209,6 +209,7 @@ from litellm.llms.base_llm.image_variations.transformation import (
BaseImageVariationConfig,
)
from litellm.llms.base_llm.rerank.transformation import BaseRerankConfig
from litellm.llms.base_llm.responses.transformation import BaseResponsesAPIConfig
from ._logging import _is_debugging_on, verbose_logger
from .caching.caching import (
@ -5103,7 +5104,7 @@ def prompt_token_calculator(model, messages):
from anthropic import AI_PROMPT, HUMAN_PROMPT, Anthropic
anthropic_obj = Anthropic()
num_tokens = anthropic_obj.count_tokens(text)
num_tokens = anthropic_obj.count_tokens(text) # type: ignore
else:
num_tokens = len(encoding.encode(text))
return num_tokens
@ -6275,6 +6276,15 @@ class ProviderConfigManager:
return litellm.DeepgramAudioTranscriptionConfig()
return None
@staticmethod
def get_provider_responses_api_config(
model: str,
provider: LlmProviders,
) -> Optional[BaseResponsesAPIConfig]:
if litellm.LlmProviders.OPENAI == provider:
return litellm.OpenAIResponsesAPIConfig()
return None
@staticmethod
def get_provider_text_completion_config(
model: str,