llama-stack-mirror/tests/integration/responses/recordings/07b6475c42139e6e3f06323eeef10cba663a61796f4667367aa5084f2e2c2eb6.json
Ashwin Bharambe f50ce11a3b
feat(tests): make inference_recorder into api_recorder (include tool_invoke) (#3403)
Renames `inference_recorder.py` to `api_recorder.py` and extends it to
support recording/replaying tool invocations in addition to inference
calls.

This allows us to record web-search, etc. tool calls and thereafter
apply recordings for `tests/integration/responses`

## Test Plan

```
export OPENAI_API_KEY=...
export TAVILY_SEARCH_API_KEY=...

./scripts/integration-tests.sh --stack-config ci-tests \
   --suite responses --inference-mode record-if-missing
```
2025-10-09 14:27:51 -07:00

372 lines
12 KiB
JSON

{
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_sequential_file_search[openai_client-txt=openai/gpt-4o:emb=openai/text-embedding-3-small:dim=1536]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "How many experts does the Llama 4 Maverick model have?"
},
{
"role": "assistant",
"content": "",
"tool_calls": [
{
"index": 0,
"id": "call_aPe1vS1v5bIwPgl789D5bfmW",
"type": "function",
"function": {
"name": "knowledge_search",
"arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}"
}
}
]
},
{
"role": "tool",
"tool_call_id": "call_aPe1vS1v5bIwPgl789D5bfmW",
"content": [
{
"type": "text",
"text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n"
},
{
"type": "text",
"text": "[1] document_id: file-690481987689, score: 2.5781234969335522, attributes: {'filename': 'test_sequential_file_search.txt', 'document_id': 'file-690481987689', 'token_count': 19.0, 'metadata_token_count': 11.0} (cite as <|file-690481987689|>)\nThe Llama 4 Maverick model has 128 experts in its mixture of experts architecture.\n"
},
{
"type": "text",
"text": "END of knowledge_search tool results.\n"
},
{
"type": "text",
"text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
}
]
},
{
"role": "assistant",
"content": "The Llama 4 Maverick model has 128 experts in its mixture of experts architecture <|file-690481987689|>."
},
{
"role": "user",
"content": "Can you tell me more about the architecture?"
},
{
"role": "assistant",
"content": "",
"tool_calls": [
{
"index": 0,
"id": "call_UqyVZyvMh30eQuiKg7lJIUhQ",
"type": "function",
"function": {
"name": "knowledge_search",
"arguments": "{\"query\":\"Llama 4 Maverick model architecture\"}"
}
}
]
},
{
"role": "tool",
"tool_call_id": "call_UqyVZyvMh30eQuiKg7lJIUhQ",
"content": [
{
"type": "text",
"text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n"
},
{
"type": "text",
"text": "[1] document_id: file-690481987689, score: 1.9327567816402336, attributes: {'filename': 'test_sequential_file_search.txt', 'document_id': 'file-690481987689', 'token_count': 19.0, 'metadata_token_count': 11.0} (cite as <|file-690481987689|>)\nThe Llama 4 Maverick model has 128 experts in its mixture of experts architecture.\n"
},
{
"type": "text",
"text": "END of knowledge_search tool results.\n"
},
{
"type": "text",
"text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model architecture\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
}
]
}
],
"stream": true,
"tools": [
{
"type": "function",
"function": {
"name": "knowledge_search",
"description": "Search for information in a database.",
"parameters": {
"type": "object",
"properties": {
"query": {
"type": "string",
"description": "The query to search for. Can be a natural language sentence or keywords."
}
},
"required": [
"query"
]
}
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-07b6475c4213",
"choices": [
{
"delta": {
"content": "",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f64f290af2",
"usage": null,
"obfuscation": "MR85AdN9cL5"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-07b6475c4213",
"choices": [
{
"delta": {
"content": "The Llama ",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f64f290af2",
"usage": null,
"obfuscation": "PU1MvlGgbp"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-07b6475c4213",
"choices": [
{
"delta": {
"content": "4 Maverick model's architecture utilizes a mixture of experts,",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f64f290af2",
"usage": null,
"obfuscation": "j5k5TwcdwAGF"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-07b6475c4213",
"choices": [
{
"delta": {
"content": " comprising ",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f64f290af2",
"usage": null,
"obfuscation": "RY"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-07b6475c4213",
"choices": [
{
"delta": {
"content": "128 experts <|file-690481987689|>.",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f64f290af2",
"usage": null,
"obfuscation": "zfJzUoSQia"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-07b6475c4213",
"choices": [
{
"delta": {
"content": " Unfortunately,",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f64f290af2",
"usage": null,
"obfuscation": "myxQ8DZiq0bzUFn"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-07b6475c4213",
"choices": [
{
"delta": {
"content": " the retrieved data did not provide more detailed information beyond this specific aspect of the architecture.",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f64f290af2",
"usage": null,
"obfuscation": "tqzrfbJ9V"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-07b6475c4213",
"choices": [
{
"delta": {
"content": " Would you like me to search for additional details?",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f64f290af2",
"usage": null,
"obfuscation": "p8JOT5P"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-07b6475c4213",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "stop",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f64f290af2",
"usage": null,
"obfuscation": "tA2QeAL"
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}