{ "test_id": null, "request": { "method": "POST", "url": "http://0.0.0.0:11434/v1/v1/chat/completions", "headers": {}, "body": { "model": "llama3.2:3b-instruct-fp16", "messages": [ { "role": "user", "content": "Test trace 0" } ], "extra_body": {} }, "endpoint": "/v1/chat/completions", "model": "llama3.2:3b-instruct-fp16" }, "response": { "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { "id": "rec-8d2ccc0a7de7", "choices": [ { "finish_reason": "stop", "index": 0, "logprobs": null, "message": { "content": "I'm not able to test a \"trace 0\" as I don't have any information about what that refers to. Can you please provide more context or clarify what you mean by \"test trace 0\"? Is this related to programming, security testing, or something else? I'll do my best to help once I understand the situation.", "refusal": null, "role": "assistant", "annotations": null, "audio": null, "function_call": null, "tool_calls": null } } ], "created": 0, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion", "service_tier": null, "system_fingerprint": "fp_ollama", "usage": { "completion_tokens": 70, "prompt_tokens": 29, "total_tokens": 99, "completion_tokens_details": null, "prompt_tokens_details": null } } }, "is_streaming": false } }