feat(utils.py): support async streaming for custom llm provider

This commit is contained in:
Krrish Dholakia 2024-07-25 17:11:57 -07:00
parent bf23aac11d
commit 9b1c7066b7
3 changed files with 38 additions and 2 deletions

View file

@ -10132,6 +10132,7 @@ class CustomStreamWrapper:
try:
if self.completion_stream is None:
await self.fetch_stream()
if (
self.custom_llm_provider == "openai"
or self.custom_llm_provider == "azure"
@ -10156,6 +10157,7 @@ class CustomStreamWrapper:
or self.custom_llm_provider == "triton"
or self.custom_llm_provider == "watsonx"
or self.custom_llm_provider in litellm.openai_compatible_endpoints
or self.custom_llm_provider in litellm._custom_providers
):
async for chunk in self.completion_stream:
print_verbose(f"value of async chunk: {chunk}")