support stream_options for chat completion models

This commit is contained in:
Ishaan Jaff 2024-05-08 21:52:25 -07:00
parent edb10198ef
commit 1042051602

View file

@ -530,6 +530,7 @@ class OpenAIChatCompletion(BaseLLM):
model=model, model=model,
custom_llm_provider="openai", custom_llm_provider="openai",
logging_obj=logging_obj, logging_obj=logging_obj,
stream_options=data.get("stream_options", None),
) )
return streamwrapper return streamwrapper
@ -579,6 +580,7 @@ class OpenAIChatCompletion(BaseLLM):
model=model, model=model,
custom_llm_provider="openai", custom_llm_provider="openai",
logging_obj=logging_obj, logging_obj=logging_obj,
stream_options=data.get("stream_options", None),
) )
return streamwrapper return streamwrapper
except ( except (