forked from phoenix/litellm-mirror
add stream_options to text_completion
This commit is contained in:
parent
dfd6361310
commit
4d5b4a5293
1 changed files with 6 additions and 1 deletions
|
@ -3195,6 +3195,7 @@ def text_completion(
|
||||||
Union[str, List[str]]
|
Union[str, List[str]]
|
||||||
] = None, # Optional: Sequences where the API will stop generating further tokens.
|
] = None, # Optional: Sequences where the API will stop generating further tokens.
|
||||||
stream: Optional[bool] = None, # Optional: Whether to stream back partial progress.
|
stream: Optional[bool] = None, # Optional: Whether to stream back partial progress.
|
||||||
|
stream_options: Optional[dict] = None,
|
||||||
suffix: Optional[
|
suffix: Optional[
|
||||||
str
|
str
|
||||||
] = None, # Optional: The suffix that comes after a completion of inserted text.
|
] = None, # Optional: The suffix that comes after a completion of inserted text.
|
||||||
|
@ -3272,6 +3273,8 @@ def text_completion(
|
||||||
optional_params["stop"] = stop
|
optional_params["stop"] = stop
|
||||||
if stream is not None:
|
if stream is not None:
|
||||||
optional_params["stream"] = stream
|
optional_params["stream"] = stream
|
||||||
|
if stream_options is not None:
|
||||||
|
optional_params["stream_options"] = stream_options
|
||||||
if suffix is not None:
|
if suffix is not None:
|
||||||
optional_params["suffix"] = suffix
|
optional_params["suffix"] = suffix
|
||||||
if temperature is not None:
|
if temperature is not None:
|
||||||
|
@ -3382,7 +3385,9 @@ def text_completion(
|
||||||
if kwargs.get("acompletion", False) == True:
|
if kwargs.get("acompletion", False) == True:
|
||||||
return response
|
return response
|
||||||
if stream == True or kwargs.get("stream", False) == True:
|
if stream == True or kwargs.get("stream", False) == True:
|
||||||
response = TextCompletionStreamWrapper(completion_stream=response, model=model)
|
response = TextCompletionStreamWrapper(
|
||||||
|
completion_stream=response, model=model, stream_options=stream_options
|
||||||
|
)
|
||||||
return response
|
return response
|
||||||
transformed_logprobs = None
|
transformed_logprobs = None
|
||||||
# only supported for TGI models
|
# only supported for TGI models
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue