mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 11:14:04 +00:00
fix(vertex_httpx.py): support passing response_schema to gemini
This commit is contained in:
parent
e1f84b1bd9
commit
e73e9e12bc
3 changed files with 58 additions and 2 deletions
|
@ -356,6 +356,7 @@ class VertexGeminiConfig:
|
|||
model: str,
|
||||
non_default_params: dict,
|
||||
optional_params: dict,
|
||||
drop_params: bool,
|
||||
):
|
||||
for param, value in non_default_params.items():
|
||||
if param == "temperature":
|
||||
|
@ -375,8 +376,13 @@ class VertexGeminiConfig:
|
|||
optional_params["stop_sequences"] = value
|
||||
if param == "max_tokens":
|
||||
optional_params["max_output_tokens"] = value
|
||||
if param == "response_format" and value["type"] == "json_object": # type: ignore
|
||||
optional_params["response_mime_type"] = "application/json"
|
||||
if param == "response_format" and isinstance(value, dict): # type: ignore
|
||||
if value["type"] == "json_object":
|
||||
optional_params["response_mime_type"] = "application/json"
|
||||
elif value["type"] == "text":
|
||||
optional_params["response_mime_type"] = "text/plain"
|
||||
if "response_schema" in value:
|
||||
optional_params["response_schema"] = value["response_schema"]
|
||||
if param == "frequency_penalty":
|
||||
optional_params["frequency_penalty"] = value
|
||||
if param == "presence_penalty":
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue