petals remove print statement

This commit is contained in:
ishaan-jaff 2023-09-19 10:56:30 -07:00
parent 94a19fa6c2
commit 85862c1066
2 changed files with 11 additions and 3 deletions

View file

@ -1041,6 +1041,14 @@ def get_optional_params( # use the openai defaults
optional_params["remove_input"] = True
if stop != None:
optional_params["stop_sequences"] = stop
elif model in litellm.petals_models or custom_llm_provider == "petals":
# max_new_tokens=1,temperature=0.9, top_p=0.6
if max_tokens != float("inf"):
optional_params["max_new_tokens"] = max_tokens
if temperature != 1:
optional_params["temperature"] = temperature
if top_p != 1:
optional_params["top_p"] = top_p
else: # assume passing in params for openai/azure openai
if functions != []:
optional_params["functions"] = functions