mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 11:43:54 +00:00
add BaseResponsesAPIConfig
This commit is contained in:
parent
8dfd1dc136
commit
f5e452556b
2 changed files with 25 additions and 26 deletions
|
@ -52,17 +52,6 @@ class BaseResponsesAPIConfig(ABC):
|
|||
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def validate_environment(
|
||||
self,
|
||||
headers: dict,
|
||||
model: str,
|
||||
optional_params: dict,
|
||||
api_key: Optional[str] = None,
|
||||
api_base: Optional[str] = None,
|
||||
) -> dict:
|
||||
pass
|
||||
|
||||
def get_complete_url(
|
||||
self,
|
||||
api_base: Optional[str],
|
||||
|
@ -81,18 +70,18 @@ class BaseResponsesAPIConfig(ABC):
|
|||
raise ValueError("api_base is required")
|
||||
return api_base
|
||||
|
||||
@abstractmethod
|
||||
def transform_request(
|
||||
self,
|
||||
model: str,
|
||||
optional_params: dict,
|
||||
litellm_params: dict,
|
||||
headers: dict,
|
||||
) -> dict:
|
||||
pass
|
||||
# @abstractmethod
|
||||
# def transform_request(
|
||||
# self,
|
||||
# model: str,
|
||||
# optional_params: dict,
|
||||
# litellm_params: dict,
|
||||
# headers: dict,
|
||||
# ) -> dict:
|
||||
# pass
|
||||
|
||||
@abstractmethod
|
||||
def transform_response(
|
||||
self,
|
||||
):
|
||||
pass
|
||||
# @abstractmethod
|
||||
# def transform_response(
|
||||
# self,
|
||||
# ):
|
||||
# pass
|
||||
|
|
|
@ -209,6 +209,7 @@ from litellm.llms.base_llm.image_variations.transformation import (
|
|||
BaseImageVariationConfig,
|
||||
)
|
||||
from litellm.llms.base_llm.rerank.transformation import BaseRerankConfig
|
||||
from litellm.llms.base_llm.responses.transformation import BaseResponsesAPIConfig
|
||||
|
||||
from ._logging import _is_debugging_on, verbose_logger
|
||||
from .caching.caching import (
|
||||
|
@ -5103,7 +5104,7 @@ def prompt_token_calculator(model, messages):
|
|||
from anthropic import AI_PROMPT, HUMAN_PROMPT, Anthropic
|
||||
|
||||
anthropic_obj = Anthropic()
|
||||
num_tokens = anthropic_obj.count_tokens(text)
|
||||
num_tokens = anthropic_obj.count_tokens(text) # type: ignore
|
||||
else:
|
||||
num_tokens = len(encoding.encode(text))
|
||||
return num_tokens
|
||||
|
@ -6275,6 +6276,15 @@ class ProviderConfigManager:
|
|||
return litellm.DeepgramAudioTranscriptionConfig()
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def get_provider_responses_api_config(
|
||||
model: str,
|
||||
provider: LlmProviders,
|
||||
) -> Optional[BaseResponsesAPIConfig]:
|
||||
if litellm.LlmProviders.OPENAI == provider:
|
||||
return litellm.OpenAIResponsesAPIConfig()
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def get_provider_text_completion_config(
|
||||
model: str,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue