mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-25 18:54:30 +00:00
add should_fake_stream
This commit is contained in:
parent
c58aff01e0
commit
bc174adcd0
3 changed files with 54 additions and 0 deletions
|
@ -1975,6 +1975,27 @@ def supports_system_messages(model: str, custom_llm_provider: Optional[str]) ->
|
|||
)
|
||||
|
||||
|
||||
def supports_native_streaming(model: str, custom_llm_provider: Optional[str]) -> bool:
|
||||
"""
|
||||
Check if the given model supports native streaming and return a boolean value.
|
||||
|
||||
Parameters:
|
||||
model (str): The model name to be checked.
|
||||
custom_llm_provider (str): The provider to be checked.
|
||||
|
||||
Returns:
|
||||
bool: True if the model supports native streaming, False otherwise.
|
||||
|
||||
Raises:
|
||||
Exception: If the given model is not found in model_prices_and_context_window.json.
|
||||
"""
|
||||
return _supports_factory(
|
||||
model=model,
|
||||
custom_llm_provider=custom_llm_provider,
|
||||
key="supports_native_streaming",
|
||||
)
|
||||
|
||||
|
||||
def supports_response_schema(
|
||||
model: str, custom_llm_provider: Optional[str] = None
|
||||
) -> bool:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue