mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-26 03:04:13 +00:00
fix petals bug
This commit is contained in:
parent
361bf02e53
commit
8e2b139f8d
2 changed files with 3 additions and 1 deletions
|
@ -1126,6 +1126,8 @@ def get_optional_params( # use the openai defaults
|
|||
# max_new_tokens=1,temperature=0.9, top_p=0.6
|
||||
if max_tokens != float("inf"):
|
||||
optional_params["max_new_tokens"] = max_tokens
|
||||
else:
|
||||
optional_params["max_new_tokens"] = 256 # petals always needs max_new_tokens
|
||||
if temperature != 1:
|
||||
optional_params["temperature"] = temperature
|
||||
if top_p != 1:
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
[tool.poetry]
|
||||
name = "litellm"
|
||||
version = "0.1.788"
|
||||
version = "0.1.789"
|
||||
description = "Library to easily interface with LLM API providers"
|
||||
authors = ["BerriAI"]
|
||||
license = "MIT License"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue