(feat) add support for echo for HF logprobs

This commit is contained in:
ishaan-jaff 2023-10-31 18:15:17 -07:00
parent 847de09308
commit ce462824be
2 changed files with 16 additions and 2 deletions

View file

@ -1348,6 +1348,10 @@ def get_optional_params( # use the openai defaults
optional_params["best_of"] = n
if presence_penalty:
optional_params["repetition_penalty"] = presence_penalty
if "echo" in special_params:
# https://huggingface.co/docs/huggingface_hub/main/en/package_reference/inference_client#huggingface_hub.InferenceClient.text_generation.decoder_input_details
# Return the decoder input token logprobs and ids. You must set details=True as well for it to be taken into account. Defaults to False
optional_params["decoder_input_details"] = special_params["echo"]
elif custom_llm_provider == "together_ai":
## check if unsupported param passed in
supported_params = ["stream", "temperature", "max_tokens", "top_p", "stop", "frequency_penalty"]