forked from phoenix/litellm-mirror
stream_options for text-completionopenai
This commit is contained in:
parent
4d5b4a5293
commit
66053f14ae
1 changed files with 2 additions and 0 deletions
|
@ -1205,6 +1205,7 @@ class OpenAITextCompletion(BaseLLM):
|
||||||
model=model,
|
model=model,
|
||||||
custom_llm_provider="text-completion-openai",
|
custom_llm_provider="text-completion-openai",
|
||||||
logging_obj=logging_obj,
|
logging_obj=logging_obj,
|
||||||
|
stream_options=data.get("stream_options", None),
|
||||||
)
|
)
|
||||||
|
|
||||||
for chunk in streamwrapper:
|
for chunk in streamwrapper:
|
||||||
|
@ -1243,6 +1244,7 @@ class OpenAITextCompletion(BaseLLM):
|
||||||
model=model,
|
model=model,
|
||||||
custom_llm_provider="text-completion-openai",
|
custom_llm_provider="text-completion-openai",
|
||||||
logging_obj=logging_obj,
|
logging_obj=logging_obj,
|
||||||
|
stream_options=data.get("stream_options", None),
|
||||||
)
|
)
|
||||||
|
|
||||||
async for transformed_chunk in streamwrapper:
|
async for transformed_chunk in streamwrapper:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue