map finish reason

This commit is contained in:
Krrish Dholakia 2023-09-13 19:22:38 -07:00
parent 1b346e0139
commit aaa57abddd
10 changed files with 64 additions and 56 deletions

View file

@ -830,7 +830,23 @@ def get_optional_params( # use the openai defaults
optional_params["top_k"] = top_k
if stop != None:
optional_params["stop_sequences"] = stop
elif custom_llm_provider == "huggingface":
if temperature != 1:
optional_params["temperature"] = temperature
if top_p != 1:
optional_params["top_p"] = top_p
if n != 1:
optional_params["n"] = n
if stream:
optional_params["stream"] = stream
if stop != None:
optional_params["stop"] = stop
if max_tokens != float("inf"):
optional_params["max_new_tokens"] = max_tokens
if presence_penalty != 0:
optional_params["repetition_penalty"] = presence_penalty
optional_params["details"] = True
optional_params["task"] = task
elif custom_llm_provider == "together_ai" or ("togethercomputer" in model):
if stream:
optional_params["stream_tokens"] = stream
@ -867,23 +883,6 @@ def get_optional_params( # use the openai defaults
optional_params["num_beams"] = num_beams
if max_tokens != float("inf"):
optional_params["max_new_tokens"] = max_tokens
elif custom_llm_provider == "huggingface":
if temperature != 1:
optional_params["temperature"] = temperature
if top_p != 1:
optional_params["top_p"] = top_p
if n != 1:
optional_params["n"] = n
if stream:
optional_params["stream"] = stream
if stop != None:
optional_params["stop"] = stop
if max_tokens != float("inf"):
optional_params["max_new_tokens"] = max_tokens
if presence_penalty != 0:
optional_params["repetition_penalty"] = presence_penalty
optional_params["details"] = True
optional_params["task"] = task
elif custom_llm_provider == "sagemaker":
if "llama-2" in model:
# llama-2 models on sagemaker support the following args