mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
Merge pull request #1969 from kan-bayashi/fix/support-multiple-tools-in-gemini
fix: fix the issues when using tools in gemini
This commit is contained in:
commit
851473b71a
2 changed files with 6 additions and 9 deletions
|
@ -439,8 +439,8 @@ def completion(
|
||||||
tools=tools,
|
tools=tools,
|
||||||
)
|
)
|
||||||
|
|
||||||
if tools is not None and hasattr(
|
if tools is not None and bool(
|
||||||
response.candidates[0].content.parts[0], "function_call"
|
getattr(response.candidates[0].content.parts[0], "function_call", None)
|
||||||
):
|
):
|
||||||
function_call = response.candidates[0].content.parts[0].function_call
|
function_call = response.candidates[0].content.parts[0].function_call
|
||||||
args_dict = {}
|
args_dict = {}
|
||||||
|
|
|
@ -4288,18 +4288,15 @@ def get_optional_params(
|
||||||
if tools is not None and isinstance(tools, list):
|
if tools is not None and isinstance(tools, list):
|
||||||
from vertexai.preview import generative_models
|
from vertexai.preview import generative_models
|
||||||
|
|
||||||
gtools = []
|
gtool_func_declarations = []
|
||||||
for tool in tools:
|
for tool in tools:
|
||||||
gtool = generative_models.FunctionDeclaration(
|
gtool_func_declaration = generative_models.FunctionDeclaration(
|
||||||
name=tool["function"]["name"],
|
name=tool["function"]["name"],
|
||||||
description=tool["function"].get("description", ""),
|
description=tool["function"].get("description", ""),
|
||||||
parameters=tool["function"].get("parameters", {}),
|
parameters=tool["function"].get("parameters", {}),
|
||||||
)
|
)
|
||||||
gtool_func_declaration = generative_models.Tool(
|
gtool_func_declarations.append(gtool_func_declaration)
|
||||||
function_declarations=[gtool]
|
optional_params["tools"] = [generative_models.Tool(function_declarations=gtool_func_declarations)]
|
||||||
)
|
|
||||||
gtools.append(gtool_func_declaration)
|
|
||||||
optional_params["tools"] = gtools
|
|
||||||
elif custom_llm_provider == "sagemaker":
|
elif custom_llm_provider == "sagemaker":
|
||||||
## check if unsupported param passed in
|
## check if unsupported param passed in
|
||||||
supported_params = ["stream", "temperature", "max_tokens", "top_p", "stop", "n"]
|
supported_params = ["stream", "temperature", "max_tokens", "top_p", "stop", "n"]
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue