(fix) using Anthropic response_format={"type": "json_object"} (#6721)

* add support for response_format=json anthropic

* add test_json_response_format to baseLLM ChatTest

* fix test_litellm_anthropic_prompt_caching_tools

* fix test_anthropic_function_call_with_no_schema

* test test_create_json_tool_call_for_response_format
This commit is contained in:
Ishaan Jaff 2024-11-12 19:06:00 -08:00 committed by GitHub
parent e7543378b8
commit 6d4cf2d908
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 105 additions and 17 deletions

View file

@ -53,6 +53,32 @@ class BaseLLMChatTest(ABC):
response = litellm.completion(**base_completion_call_args, messages=messages)
assert response is not None
def test_json_response_format(self):
"""
Test that the JSON response format is supported by the LLM API
"""
base_completion_call_args = self.get_base_completion_call_args()
litellm.set_verbose = True
messages = [
{
"role": "system",
"content": "Your output should be a JSON object with no additional properties. ",
},
{
"role": "user",
"content": "Respond with this in json. city=San Francisco, state=CA, weather=sunny, temp=60",
},
]
response = litellm.completion(
**base_completion_call_args,
messages=messages,
response_format={"type": "json_object"},
)
print(response)
@pytest.fixture
def pdf_messages(self):
import base64