From d1db67890c7f70b4bd94269e4ffd59d715052783 Mon Sep 17 00:00:00 2001 From: Krrish Dholakia Date: Tue, 6 Feb 2024 10:11:43 -0800 Subject: [PATCH] fix(ollama.py): support format for ollama --- litellm/llms/ollama.py | 10 +++++++++- litellm/llms/ollama_chat.py | 3 +++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/litellm/llms/ollama.py b/litellm/llms/ollama.py index d0bc24af4c..9339deb78d 100644 --- a/litellm/llms/ollama.py +++ b/litellm/llms/ollama.py @@ -146,7 +146,15 @@ def get_ollama_response( optional_params[k] = v stream = optional_params.pop("stream", False) - data = {"model": model, "prompt": prompt, "options": optional_params} + format = optional_params.pop("format", None) + data = { + "model": model, + "prompt": prompt, + "options": optional_params, + "stream": stream, + } + if format is not None: + data["format"] = format ## LOGGING logging_obj.pre_call( diff --git a/litellm/llms/ollama_chat.py b/litellm/llms/ollama_chat.py index d1a439398b..0311931b13 100644 --- a/litellm/llms/ollama_chat.py +++ b/litellm/llms/ollama_chat.py @@ -146,12 +146,15 @@ def get_ollama_response( optional_params[k] = v stream = optional_params.pop("stream", False) + format = optional_params.pop("format", None) data = { "model": model, "messages": messages, "options": optional_params, "stream": stream, } + if format is not None: + data["format"] = format ## LOGGING logging_obj.pre_call( input=None,