forked from phoenix/litellm-mirror
(feat) add CustomStreamWrapper for Ollama - match OpenAI streaming
This commit is contained in:
parent
897286ec15
commit
689acb8a08
1 changed files with 4 additions and 1 deletions
|
@ -1033,7 +1033,10 @@ def completion(
|
||||||
generator = ollama.get_ollama_response_stream(api_base, model, prompt, optional_params)
|
generator = ollama.get_ollama_response_stream(api_base, model, prompt, optional_params)
|
||||||
if optional_params.get("stream", False) == True:
|
if optional_params.get("stream", False) == True:
|
||||||
# assume all ollama responses are streamed
|
# assume all ollama responses are streamed
|
||||||
return generator
|
response = CustomStreamWrapper(
|
||||||
|
generator, model, custom_llm_provider="ollama", logging_obj=logging
|
||||||
|
)
|
||||||
|
return response
|
||||||
else:
|
else:
|
||||||
response_string = ""
|
response_string = ""
|
||||||
for chunk in generator:
|
for chunk in generator:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue