From b87d630b0a9960f5efbde7d244451b69f8413552 Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Mon, 25 Dec 2023 14:00:56 +0530 Subject: [PATCH] (test) ollama json mode --- litellm/tests/test_ollama.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/litellm/tests/test_ollama.py b/litellm/tests/test_ollama.py index 4a602fe64..a9f3f5468 100644 --- a/litellm/tests/test_ollama.py +++ b/litellm/tests/test_ollama.py @@ -34,4 +34,14 @@ def test_get_ollama_model(): except Exception as e: pytest.fail(f"Error occurred: {e}") -# test_get_ollama_model() \ No newline at end of file +# test_get_ollama_model() + +def test_ollama_json_mode(): + # assert that format: json gets passed as is to ollama + try: + converted_params = get_optional_params(custom_llm_provider="ollama", model="llama2", format = "json", temperature=0.5) + print("Converted params", converted_params) + assert converted_params == {'temperature': 0.5, 'format': 'json'}, f"{converted_params} != {'temperature': 0.5, 'format': 'json'}" + except Exception as e: + pytest.fail(f"Error occurred: {e}") +# test_ollama_json_mode() \ No newline at end of file