forked from phoenix/litellm-mirror
fix(utils.py): handle additionalProperties is False for vertex ai / gemini calls
Fixes https://github.com/BerriAI/litellm/issues/5338 Also adds together ai json mode support
This commit is contained in:
parent
874d58fe8a
commit
93ed8c7216
3 changed files with 69 additions and 2 deletions
|
@ -1,4 +1,4 @@
|
||||||
model_list:
|
model_list:
|
||||||
- model_name: "batch-gpt-4o-mini"
|
- model_name: "*"
|
||||||
litellm_params:
|
litellm_params:
|
||||||
model: "*"
|
model: "*"
|
||||||
|
|
|
@ -499,3 +499,40 @@ def test_vertex_safety_settings(provider):
|
||||||
model="gemini-1.5-pro", custom_llm_provider=provider
|
model="gemini-1.5-pro", custom_llm_provider=provider
|
||||||
)
|
)
|
||||||
assert len(optional_params) == 1
|
assert len(optional_params) == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_additional_properties_json_schema():
|
||||||
|
optional_params = get_optional_params(
|
||||||
|
model="gemini-1.5-pro",
|
||||||
|
custom_llm_provider="vertex_ai_beta",
|
||||||
|
response_format={
|
||||||
|
"type": "json_schema",
|
||||||
|
"json_schema": {
|
||||||
|
"name": "math_reasoning",
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"steps": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"explanation": {"type": "string"},
|
||||||
|
"output": {"type": "string"},
|
||||||
|
},
|
||||||
|
"required": ["explanation", "output"],
|
||||||
|
"additionalProperties": False,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"final_answer": {"type": "string"},
|
||||||
|
},
|
||||||
|
"required": ["steps", "final_answer"],
|
||||||
|
"additionalProperties": False,
|
||||||
|
},
|
||||||
|
"strict": True,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
print(optional_params)
|
||||||
|
assert "additionalProperties" not in optional_params["response_schema"]
|
||||||
|
|
|
@ -2687,6 +2687,24 @@ def get_optional_params_embeddings(
|
||||||
return final_params
|
return final_params
|
||||||
|
|
||||||
|
|
||||||
|
def _remove_additional_properties(schema):
|
||||||
|
if isinstance(schema, dict):
|
||||||
|
# Remove the 'additionalProperties' key if it exists and is set to False
|
||||||
|
if "additionalProperties" in schema and schema["additionalProperties"] is False:
|
||||||
|
del schema["additionalProperties"]
|
||||||
|
|
||||||
|
# Recursively process all dictionary values
|
||||||
|
for key, value in schema.items():
|
||||||
|
_remove_additional_properties(value)
|
||||||
|
|
||||||
|
elif isinstance(schema, list):
|
||||||
|
# Recursively process all items in the list
|
||||||
|
for item in schema:
|
||||||
|
_remove_additional_properties(item)
|
||||||
|
|
||||||
|
return schema
|
||||||
|
|
||||||
|
|
||||||
def get_optional_params(
|
def get_optional_params(
|
||||||
# use the openai defaults
|
# use the openai defaults
|
||||||
# https://platform.openai.com/docs/api-reference/chat/create
|
# https://platform.openai.com/docs/api-reference/chat/create
|
||||||
|
@ -2874,7 +2892,18 @@ def get_optional_params(
|
||||||
non_default_params["response_format"] = type_to_response_format_param(
|
non_default_params["response_format"] = type_to_response_format_param(
|
||||||
response_format=non_default_params["response_format"]
|
response_format=non_default_params["response_format"]
|
||||||
)
|
)
|
||||||
|
# # clean out 'additionalProperties = False'. Causes vertexai/gemini OpenAI API Schema errors - https://github.com/langchain-ai/langchainjs/issues/5240
|
||||||
|
if (
|
||||||
|
non_default_params["response_format"].get("json_schema", {}).get("schema")
|
||||||
|
is not None
|
||||||
|
):
|
||||||
|
old_schema = copy.deepcopy(
|
||||||
|
non_default_params["response_format"]
|
||||||
|
.get("json_schema", {})
|
||||||
|
.get("schema")
|
||||||
|
)
|
||||||
|
new_schema = _remove_additional_properties(schema=old_schema)
|
||||||
|
non_default_params["response_format"]["json_schema"]["schema"] = new_schema
|
||||||
if "tools" in non_default_params and isinstance(
|
if "tools" in non_default_params and isinstance(
|
||||||
non_default_params, list
|
non_default_params, list
|
||||||
): # fixes https://github.com/BerriAI/litellm/issues/4933
|
): # fixes https://github.com/BerriAI/litellm/issues/4933
|
||||||
|
@ -4303,6 +4332,7 @@ def get_supported_openai_params(
|
||||||
"frequency_penalty",
|
"frequency_penalty",
|
||||||
"tools",
|
"tools",
|
||||||
"tool_choice",
|
"tool_choice",
|
||||||
|
"response_format",
|
||||||
]
|
]
|
||||||
elif custom_llm_provider == "ai21":
|
elif custom_llm_provider == "ai21":
|
||||||
return [
|
return [
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue