mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 10:44:24 +00:00
(feat) add streaming for text_completion
This commit is contained in:
parent
a404b0fc3b
commit
2a751c277f
2 changed files with 41 additions and 0 deletions
|
@ -59,6 +59,7 @@ encoding = tiktoken.get_encoding("cl100k_base")
|
|||
from litellm.utils import (
|
||||
get_secret,
|
||||
CustomStreamWrapper,
|
||||
TextCompletionStreamWrapper,
|
||||
ModelResponse,
|
||||
TextCompletionResponse,
|
||||
TextChoices,
|
||||
|
@ -2031,6 +2032,9 @@ def text_completion(
|
|||
**kwargs,
|
||||
**optional_params,
|
||||
)
|
||||
if stream == True or kwargs.get("stream", False) == True:
|
||||
response = TextCompletionStreamWrapper(completion_stream=response, model=model)
|
||||
return response
|
||||
|
||||
transformed_logprobs = None
|
||||
# only supported for TGI models
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue