(fix) hf fix this error: Failed: Error occurred: HuggingfaceException - Input validation error: temperature must be strictly positive

This commit is contained in:
ishaan-jaff 2023-11-06 14:22:33 -08:00
parent e7b6ef8f50
commit b75a113e39

View file

@ -1460,6 +1460,10 @@ def get_optional_params( # use the openai defaults
_check_valid_arg(supported_params=supported_params) _check_valid_arg(supported_params=supported_params)
# temperature, top_p, n, stream, stop, max_tokens, n, presence_penalty default to None # temperature, top_p, n, stream, stop, max_tokens, n, presence_penalty default to None
if temperature is not None: if temperature is not None:
if temperature == 0.0 or temperature == 0:
# hugging face exception raised when temp==0
# Failed: Error occurred: HuggingfaceException - Input validation error: `temperature` must be strictly positive
temperature = 0.0001
optional_params["temperature"] = temperature optional_params["temperature"] = temperature
if top_p is not None: if top_p is not None:
optional_params["top_p"] = top_p optional_params["top_p"] = top_p
@ -1471,6 +1475,10 @@ def get_optional_params( # use the openai defaults
if stop is not None: if stop is not None:
optional_params["stop"] = stop optional_params["stop"] = stop
if max_tokens is not None: if max_tokens is not None:
# HF TGI raises the following exception when max_new_tokens==0
# Failed: Error occurred: HuggingfaceException - Input validation error: `max_new_tokens` must be strictly positive
if max_tokens == 0:
max_tokens = 1
optional_params["max_new_tokens"] = max_tokens optional_params["max_new_tokens"] = max_tokens
if n is not None: if n is not None:
optional_params["best_of"] = n optional_params["best_of"] = n