mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
petals remove print statement
This commit is contained in:
parent
94a19fa6c2
commit
85862c1066
2 changed files with 11 additions and 3 deletions
|
@ -1041,6 +1041,14 @@ def get_optional_params( # use the openai defaults
|
|||
optional_params["remove_input"] = True
|
||||
if stop != None:
|
||||
optional_params["stop_sequences"] = stop
|
||||
elif model in litellm.petals_models or custom_llm_provider == "petals":
|
||||
# max_new_tokens=1,temperature=0.9, top_p=0.6
|
||||
if max_tokens != float("inf"):
|
||||
optional_params["max_new_tokens"] = max_tokens
|
||||
if temperature != 1:
|
||||
optional_params["temperature"] = temperature
|
||||
if top_p != 1:
|
||||
optional_params["top_p"] = top_p
|
||||
else: # assume passing in params for openai/azure openai
|
||||
if functions != []:
|
||||
optional_params["functions"] = functions
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue