llama-stack-mirror/tests/integration/inference/recordings/f3bbb5229763f689be0dd350b96e1067861fdaa62a976a1a8f74ae18c1bb5bcb.json
Ashwin Bharambe 79bed44b04
fix(tests): ensure test isolation in server mode (#3737)
Propagate test IDs from client to server via HTTP headers to maintain
proper test isolation when running with server-based stack configs.
Without
this, recorded/replayed inference requests in server mode would leak
across
tests.

Changes:
- Patch client _prepare_request to inject test ID into provider data
header
- Sync test context from provider data on server side before storage
operations
- Set LLAMA_STACK_TEST_STACK_CONFIG_TYPE env var based on stack config
- Configure console width for cleaner log output in CI
- Add SQLITE_STORE_DIR temp directory for test data isolation
2025-10-08 12:03:36 -07:00

98 lines
2.6 KiB
JSON

{
"test_id": "tests/integration/inference/test_tools_with_schemas.py::TestMCPToolsInChatCompletion::test_mcp_tools_in_inference[txt=ollama/llama3.2:3b-instruct-fp16]",
"request": {
"method": "POST",
"url": "http://0.0.0.0:11434/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "llama3.2:3b-instruct-fp16",
"messages": [
{
"role": "user",
"content": "Calculate 5 + 3"
}
],
"tools": [
{
"type": "function",
"function": {
"name": "calculate",
"description": "",
"parameters": {
"properties": {
"x": {
"title": "X",
"type": "number"
},
"y": {
"title": "Y",
"type": "number"
},
"operation": {
"title": "Operation",
"type": "string"
}
},
"required": [
"x",
"y",
"operation"
],
"title": "calculateArguments",
"type": "object"
}
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "llama3.2:3b-instruct-fp16"
},
"response": {
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
"id": "rec-f3bbb5229763",
"choices": [
{
"finish_reason": "tool_calls",
"index": 0,
"logprobs": null,
"message": {
"content": "",
"refusal": null,
"role": "assistant",
"annotations": null,
"audio": null,
"function_call": null,
"tool_calls": [
{
"id": "call_4nzxw162",
"function": {
"arguments": "{\"operation\":\"+\",\"x\":\"5\",\"y\":\"3\"}",
"name": "calculate"
},
"type": "function",
"index": 0
}
]
}
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": {
"completion_tokens": 27,
"prompt_tokens": 172,
"total_tokens": 199,
"completion_tokens_details": null,
"prompt_tokens_details": null
}
}
},
"is_streaming": false
}
}