petals remove print statement

This commit is contained in:
ishaan-jaff 2023-09-19 10:56:30 -07:00
parent 2e9ff5b0bf
commit c32190f2da
2 changed files with 11 additions and 3 deletions

View file

@ -65,9 +65,9 @@ def completion(
## COMPLETION CALL ## COMPLETION CALL
inputs = tokenizer(prompt, return_tensors="pt")["input_ids"].cuda() inputs = tokenizer(prompt, return_tensors="pt")["input_ids"].cuda()
outputs = model_obj.generate(inputs, max_new_tokens=5)
print(outputs) # optional params: max_new_tokens=1,temperature=0.9, top_p=0.6
outputs = model_obj.generate(inputs, **optional_params)
## LOGGING ## LOGGING
logging_obj.post_call( logging_obj.post_call(

View file

@ -1041,6 +1041,14 @@ def get_optional_params( # use the openai defaults
optional_params["remove_input"] = True optional_params["remove_input"] = True
if stop != None: if stop != None:
optional_params["stop_sequences"] = stop optional_params["stop_sequences"] = stop
elif model in litellm.petals_models or custom_llm_provider == "petals":
# max_new_tokens=1,temperature=0.9, top_p=0.6
if max_tokens != float("inf"):
optional_params["max_new_tokens"] = max_tokens
if temperature != 1:
optional_params["temperature"] = temperature
if top_p != 1:
optional_params["top_p"] = top_p
else: # assume passing in params for openai/azure openai else: # assume passing in params for openai/azure openai
if functions != []: if functions != []:
optional_params["functions"] = functions optional_params["functions"] = functions