mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
fixes to test
This commit is contained in:
parent
4e2b04a1e0
commit
e8b9b4f68b
4 changed files with 78 additions and 24 deletions
|
@ -10,6 +10,9 @@ from litellm.constants import request_timeout
|
|||
from litellm.litellm_core_utils.litellm_logging import Logging as LiteLLMLoggingObj
|
||||
from litellm.llms.base_llm.responses.transformation import BaseResponsesAPIConfig
|
||||
from litellm.llms.custom_httpx.llm_http_handler import BaseLLMHTTPHandler
|
||||
from litellm.responses.litellm_completion_transformation.handler import (
|
||||
LiteLLMCompletionTransformationHandler,
|
||||
)
|
||||
from litellm.responses.utils import ResponsesAPIRequestUtils
|
||||
from litellm.types.llms.openai import (
|
||||
Reasoning,
|
||||
|
@ -29,6 +32,7 @@ from .streaming_iterator import BaseResponsesAPIStreamingIterator
|
|||
####### ENVIRONMENT VARIABLES ###################
|
||||
# Initialize any necessary instances or variables here
|
||||
base_llm_http_handler = BaseLLMHTTPHandler()
|
||||
litellm_completion_transformation_handler = LiteLLMCompletionTransformationHandler()
|
||||
#################################################
|
||||
|
||||
|
||||
|
@ -178,19 +182,12 @@ def responses(
|
|||
)
|
||||
|
||||
# get provider config
|
||||
responses_api_provider_config: Optional[
|
||||
BaseResponsesAPIConfig
|
||||
] = ProviderConfigManager.get_provider_responses_api_config(
|
||||
model=model,
|
||||
provider=litellm.LlmProviders(custom_llm_provider),
|
||||
)
|
||||
|
||||
if responses_api_provider_config is None:
|
||||
raise litellm.BadRequestError(
|
||||
responses_api_provider_config: Optional[BaseResponsesAPIConfig] = (
|
||||
ProviderConfigManager.get_provider_responses_api_config(
|
||||
model=model,
|
||||
llm_provider=custom_llm_provider,
|
||||
message=f"Responses API not available for custom_llm_provider={custom_llm_provider}, model: {model}",
|
||||
provider=litellm.LlmProviders(custom_llm_provider),
|
||||
)
|
||||
)
|
||||
|
||||
local_vars.update(kwargs)
|
||||
# Get ResponsesAPIOptionalRequestParams with only valid parameters
|
||||
|
@ -200,6 +197,16 @@ def responses(
|
|||
)
|
||||
)
|
||||
|
||||
if responses_api_provider_config is None:
|
||||
return litellm_completion_transformation_handler.response_api_handler(
|
||||
model=model,
|
||||
input=input,
|
||||
responses_api_request=ResponsesAPIOptionalRequestParams(),
|
||||
custom_llm_provider=custom_llm_provider,
|
||||
_is_async=_is_async,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
# Get optional parameters for the responses API
|
||||
responses_api_request_params: Dict = (
|
||||
ResponsesAPIRequestUtils.get_optional_params_responses_api(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue