mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
Merge pull request #1969 from kan-bayashi/fix/support-multiple-tools-in-gemini
fix: fix the issues when using tools in gemini
This commit is contained in:
commit
851473b71a
2 changed files with 6 additions and 9 deletions
|
@ -4288,18 +4288,15 @@ def get_optional_params(
|
|||
if tools is not None and isinstance(tools, list):
|
||||
from vertexai.preview import generative_models
|
||||
|
||||
gtools = []
|
||||
gtool_func_declarations = []
|
||||
for tool in tools:
|
||||
gtool = generative_models.FunctionDeclaration(
|
||||
gtool_func_declaration = generative_models.FunctionDeclaration(
|
||||
name=tool["function"]["name"],
|
||||
description=tool["function"].get("description", ""),
|
||||
parameters=tool["function"].get("parameters", {}),
|
||||
)
|
||||
gtool_func_declaration = generative_models.Tool(
|
||||
function_declarations=[gtool]
|
||||
)
|
||||
gtools.append(gtool_func_declaration)
|
||||
optional_params["tools"] = gtools
|
||||
gtool_func_declarations.append(gtool_func_declaration)
|
||||
optional_params["tools"] = [generative_models.Tool(function_declarations=gtool_func_declarations)]
|
||||
elif custom_llm_provider == "sagemaker":
|
||||
## check if unsupported param passed in
|
||||
supported_params = ["stream", "temperature", "max_tokens", "top_p", "stop", "n"]
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue