mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-24 18:24:20 +00:00
fix(utils.py): map optional params for gemini
This commit is contained in:
parent
79264b0dab
commit
3469b5b911
2 changed files with 6 additions and 3 deletions
|
@ -25,7 +25,7 @@ def generate_text():
|
|||
]
|
||||
}
|
||||
]
|
||||
response = litellm.completion(model="gemini/gemini-pro-vision", messages=messages)
|
||||
response = litellm.completion(model="gemini/gemini-pro-vision", messages=messages, stop="Hello world")
|
||||
print(response)
|
||||
assert isinstance(response.choices[0].message.content, str) == True
|
||||
except Exception as exception:
|
||||
|
|
|
@ -3435,7 +3435,7 @@ def get_optional_params(
|
|||
if presence_penalty is not None:
|
||||
optional_params["presencePenalty"] = {"scale": presence_penalty}
|
||||
elif (
|
||||
custom_llm_provider == "palm"
|
||||
custom_llm_provider == "palm" or custom_llm_provider == "gemini"
|
||||
): # https://developers.generativeai.google/tutorials/curl_quickstart
|
||||
## check if unsupported param passed in
|
||||
supported_params = ["temperature", "top_p", "stream", "n", "stop", "max_tokens"]
|
||||
|
@ -3450,7 +3450,10 @@ def get_optional_params(
|
|||
if n is not None:
|
||||
optional_params["candidate_count"] = n
|
||||
if stop is not None:
|
||||
optional_params["stop_sequences"] = stop
|
||||
if isinstance(stop, str):
|
||||
optional_params["stop_sequences"] = [stop]
|
||||
elif isinstance(stop, list):
|
||||
optional_params["stop_sequences"] = stop
|
||||
if max_tokens is not None:
|
||||
optional_params["max_output_tokens"] = max_tokens
|
||||
elif custom_llm_provider == "vertex_ai":
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue