(feat) ollama_chat - add async streaming

This commit is contained in:
ishaan-jaff 2023-12-25 23:45:01 +05:30
parent 67ed1ee10e
commit aea7faa2c1

View file

@ -7198,6 +7198,7 @@ class CustomStreamWrapper:
or self.custom_llm_provider == "text-completion-openai"
or self.custom_llm_provider == "huggingface"
or self.custom_llm_provider == "ollama"
or self.custom_llm_provider == "ollama_chat"
or self.custom_llm_provider == "vertex_ai"
):
print_verbose(f"INSIDE ASYNC STREAMING!!!")