forked from phoenix/litellm-mirror
support stream_options for chat completion models
This commit is contained in:
parent
edb10198ef
commit
1042051602
1 changed files with 2 additions and 0 deletions
|
@ -530,6 +530,7 @@ class OpenAIChatCompletion(BaseLLM):
|
|||
model=model,
|
||||
custom_llm_provider="openai",
|
||||
logging_obj=logging_obj,
|
||||
stream_options=data.get("stream_options", None),
|
||||
)
|
||||
return streamwrapper
|
||||
|
||||
|
@ -579,6 +580,7 @@ class OpenAIChatCompletion(BaseLLM):
|
|||
model=model,
|
||||
custom_llm_provider="openai",
|
||||
logging_obj=logging_obj,
|
||||
stream_options=data.get("stream_options", None),
|
||||
)
|
||||
return streamwrapper
|
||||
except (
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue