support stream_options for chat completion models

This commit is contained in:
Ishaan Jaff 2024-05-08 21:52:25 -07:00
parent edb10198ef
commit 1042051602

View file

@ -530,6 +530,7 @@ class OpenAIChatCompletion(BaseLLM):
model=model,
custom_llm_provider="openai",
logging_obj=logging_obj,
stream_options=data.get("stream_options", None),
)
return streamwrapper
@ -579,6 +580,7 @@ class OpenAIChatCompletion(BaseLLM):
model=model,
custom_llm_provider="openai",
logging_obj=logging_obj,
stream_options=data.get("stream_options", None),
)
return streamwrapper
except (