mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
(feat) add support for echo for HF logprobs
This commit is contained in:
parent
847de09308
commit
ce462824be
2 changed files with 16 additions and 2 deletions
|
@ -1348,6 +1348,10 @@ def get_optional_params( # use the openai defaults
|
|||
optional_params["best_of"] = n
|
||||
if presence_penalty:
|
||||
optional_params["repetition_penalty"] = presence_penalty
|
||||
if "echo" in special_params:
|
||||
# https://huggingface.co/docs/huggingface_hub/main/en/package_reference/inference_client#huggingface_hub.InferenceClient.text_generation.decoder_input_details
|
||||
# Return the decoder input token logprobs and ids. You must set details=True as well for it to be taken into account. Defaults to False
|
||||
optional_params["decoder_input_details"] = special_params["echo"]
|
||||
elif custom_llm_provider == "together_ai":
|
||||
## check if unsupported param passed in
|
||||
supported_params = ["stream", "temperature", "max_tokens", "top_p", "stop", "frequency_penalty"]
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue