feat(groq/): add response_format support for groq

Closes https://github.com/BerriAI/litellm/issues/6845
This commit is contained in:
Krrish Dholakia 2024-11-21 18:13:37 +05:30
parent d7f9999cef
commit 42beb618ae
10 changed files with 275 additions and 82 deletions

View file

@ -749,6 +749,7 @@ def test_convert_model_response_object():
("gemini/gemini-1.5-pro", True),
("predibase/llama3-8b-instruct", True),
("gpt-3.5-turbo", False),
("groq/llama3-70b-8192", True),
],
)
def test_supports_response_schema(model, expected_bool):