(feat) add streaming for text_completion

This commit is contained in:
ishaan-jaff 2023-11-08 11:58:07 -08:00
parent a404b0fc3b
commit 2a751c277f
2 changed files with 41 additions and 0 deletions

View file

@ -59,6 +59,7 @@ encoding = tiktoken.get_encoding("cl100k_base")
from litellm.utils import (
get_secret,
CustomStreamWrapper,
TextCompletionStreamWrapper,
ModelResponse,
TextCompletionResponse,
TextChoices,
@ -2031,6 +2032,9 @@ def text_completion(
**kwargs,
**optional_params,
)
if stream == True or kwargs.get("stream", False) == True:
response = TextCompletionStreamWrapper(completion_stream=response, model=model)
return response
transformed_logprobs = None
# only supported for TGI models