{ "test_id": "tests/integration/inference/test_tools_with_schemas.py::TestEdgeCases::test_tool_without_schema[txt=ollama/llama3.2:3b-instruct-fp16]", "request": { "method": "POST", "url": "http://0.0.0.0:11434/v1/v1/chat/completions", "headers": {}, "body": { "model": "llama3.2:3b-instruct-fp16", "messages": [ { "role": "user", "content": "Call the no args tool" } ], "tools": [ { "type": "function", "function": { "name": "no_args_tool", "description": "Tool with no arguments", "parameters": { "type": "object", "properties": {} } } } ], "extra_body": null }, "endpoint": "/v1/chat/completions", "model": "llama3.2:3b-instruct-fp16" }, "response": { "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { "id": "rec-d34b62e97025", "choices": [ { "finish_reason": "tool_calls", "index": 0, "logprobs": null, "message": { "content": "", "refusal": null, "role": "assistant", "annotations": null, "audio": null, "function_call": null, "tool_calls": [ { "id": "call_qv9vgam1", "function": { "arguments": "{}", "name": "no_args_tool" }, "type": "function", "index": 0 } ] } } ], "created": 0, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion", "service_tier": null, "system_fingerprint": "fp_ollama", "usage": { "completion_tokens": 14, "prompt_tokens": 148, "total_tokens": 162, "completion_tokens_details": null, "prompt_tokens_details": null } } }, "is_streaming": false } }