(feat) add CustomStreamWrapper for Ollama - match OpenAI streaming

This commit is contained in:
ishaan-jaff 2023-10-11 16:47:52 -07:00
parent 897286ec15
commit 689acb8a08

View file

@ -1033,7 +1033,10 @@ def completion(
generator = ollama.get_ollama_response_stream(api_base, model, prompt, optional_params)
if optional_params.get("stream", False) == True:
# assume all ollama responses are streamed
return generator
response = CustomStreamWrapper(
generator, model, custom_llm_provider="ollama", logging_obj=logging
)
return response
else:
response_string = ""
for chunk in generator: