fix petals bug

This commit is contained in:
ishaan-jaff 2023-09-27 20:51:31 -07:00
parent 361bf02e53
commit 8e2b139f8d
2 changed files with 3 additions and 1 deletions

View file

@ -1126,6 +1126,8 @@ def get_optional_params( # use the openai defaults
# max_new_tokens=1,temperature=0.9, top_p=0.6 # max_new_tokens=1,temperature=0.9, top_p=0.6
if max_tokens != float("inf"): if max_tokens != float("inf"):
optional_params["max_new_tokens"] = max_tokens optional_params["max_new_tokens"] = max_tokens
else:
optional_params["max_new_tokens"] = 256 # petals always needs max_new_tokens
if temperature != 1: if temperature != 1:
optional_params["temperature"] = temperature optional_params["temperature"] = temperature
if top_p != 1: if top_p != 1:

View file

@ -1,6 +1,6 @@
[tool.poetry] [tool.poetry]
name = "litellm" name = "litellm"
version = "0.1.788" version = "0.1.789"
description = "Library to easily interface with LLM API providers" description = "Library to easily interface with LLM API providers"
authors = ["BerriAI"] authors = ["BerriAI"]
license = "MIT License" license = "MIT License"