mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-31 13:13:54 +00:00
feat: Support "stop" parameter in remote:vLLM
Signed-off-by: Yuan Tang <terrytangyuan@gmail.com>
This commit is contained in:
parent
f369871083
commit
a1da09e166
4 changed files with 34 additions and 0 deletions
|
|
@ -201,6 +201,7 @@ class SamplingParams(BaseModel):
|
|||
|
||||
max_tokens: Optional[int] = 0
|
||||
repetition_penalty: Optional[float] = 1.0
|
||||
stop: Optional[List[str]] = None
|
||||
|
||||
|
||||
class CheckpointQuantizationFormat(Enum):
|
||||
|
|
|
|||
|
|
@ -147,6 +147,9 @@ def get_sampling_options(params: SamplingParams) -> dict:
|
|||
if params.repetition_penalty is not None and params.repetition_penalty != 1.0:
|
||||
options["repeat_penalty"] = params.repetition_penalty
|
||||
|
||||
if params.stop is not None:
|
||||
options["stop"] = params.stop
|
||||
|
||||
return options
|
||||
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue