forked from phoenix/litellm-mirror
(feat) ollama_chat - add async streaming
This commit is contained in:
parent
67ed1ee10e
commit
aea7faa2c1
1 changed files with 1 additions and 0 deletions
|
@ -7198,6 +7198,7 @@ class CustomStreamWrapper:
|
|||
or self.custom_llm_provider == "text-completion-openai"
|
||||
or self.custom_llm_provider == "huggingface"
|
||||
or self.custom_llm_provider == "ollama"
|
||||
or self.custom_llm_provider == "ollama_chat"
|
||||
or self.custom_llm_provider == "vertex_ai"
|
||||
):
|
||||
print_verbose(f"INSIDE ASYNC STREAMING!!!")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue