forked from phoenix/litellm-mirror
fix(utils.py): cleanup 'additionalProperties=False' for tool calling with zod
Fixes issue with zod passing in additionalProperties=False, causing vertex ai / gemini calls to fail
This commit is contained in:
parent
25a0066123
commit
20e39d6acc
6 changed files with 27 additions and 4 deletions
|
@ -1799,7 +1799,6 @@ def set_callbacks(callback_list, function_id=None):
|
|||
|
||||
try:
|
||||
for callback in callback_list:
|
||||
print_verbose(f"init callback list: {callback}")
|
||||
if callback == "sentry":
|
||||
try:
|
||||
import sentry_sdk
|
||||
|
|
|
@ -2,8 +2,9 @@ model_list:
|
|||
- model_name: tts
|
||||
litellm_params:
|
||||
model: "openai/*"
|
||||
litellm_settings:
|
||||
success_callback: ["langfuse"]
|
||||
- model_name: gemini-1.5-flash
|
||||
litellm_params:
|
||||
model: gemini/gemini-1.5-flash
|
||||
|
||||
general_settings:
|
||||
alerting: ["slack"]
|
||||
|
|
|
@ -176,6 +176,7 @@ async def add_litellm_data_to_request(
|
|||
|
||||
def _add_otel_traceparent_to_data(data: dict, request: Request):
|
||||
from litellm.proxy.proxy_server import open_telemetry_logger
|
||||
|
||||
if data is None:
|
||||
return
|
||||
if open_telemetry_logger is None:
|
||||
|
|
|
@ -2720,6 +2720,10 @@ async def chat_completion(
|
|||
except:
|
||||
data = json.loads(body_str)
|
||||
|
||||
verbose_proxy_logger.debug(
|
||||
"Request received by LiteLLM:\n{}".format(json.dumps(data, indent=4)),
|
||||
)
|
||||
|
||||
data = await add_litellm_data_to_request(
|
||||
data=data,
|
||||
request=request,
|
||||
|
|
|
@ -353,7 +353,7 @@ class ProxyLogging:
|
|||
raise HTTPException(
|
||||
status_code=400, detail={"error": response}
|
||||
)
|
||||
print_verbose(f"final data being sent to {call_type} call: {data}")
|
||||
|
||||
return data
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
|
|
@ -2555,6 +2555,24 @@ def get_optional_params(
|
|||
message=f"Function calling is not supported by {custom_llm_provider}.",
|
||||
)
|
||||
|
||||
if "tools" in non_default_params:
|
||||
tools = non_default_params["tools"]
|
||||
for (
|
||||
tool
|
||||
) in (
|
||||
tools
|
||||
): # clean out 'additionalProperties = False'. Causes vertexai/gemini OpenAI API Schema errors - https://github.com/langchain-ai/langchainjs/issues/5240
|
||||
tool_function = tool.get("function", {})
|
||||
parameters = tool_function.get("parameters", None)
|
||||
if parameters is not None:
|
||||
new_parameters = copy.deepcopy(parameters)
|
||||
if (
|
||||
"additionalProperties" in new_parameters
|
||||
and new_parameters["additionalProperties"] is False
|
||||
):
|
||||
new_parameters.pop("additionalProperties", None)
|
||||
tool_function["parameters"] = new_parameters
|
||||
|
||||
def _check_valid_arg(supported_params):
|
||||
verbose_logger.debug(
|
||||
f"\nLiteLLM completion() model= {model}; provider = {custom_llm_provider}"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue