mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-14 06:23:45 +00:00
IDs are now deterministic hashes based on request content, and timestamps are normalized to constants, eliminating spurious changes when re-recording tests. ## Changes - Updated `inference_recorder.py` to normalize IDs and timestamps during recording - Added `scripts/normalize_recordings.py` utility to re-normalize existing recordings - Created documentation in `tests/integration/recordings/README.md` - Normalized 350 existing recording files
56 lines
1.7 KiB
JSON
56 lines
1.7 KiB
JSON
{
|
|
"request": {
|
|
"method": "POST",
|
|
"url": "http://localhost:11434/v1/v1/chat/completions",
|
|
"headers": {},
|
|
"body": {
|
|
"model": "llama3.2:3b",
|
|
"messages": [
|
|
{
|
|
"role": "user",
|
|
"content": "OpenAI test 3"
|
|
}
|
|
],
|
|
"stream": false
|
|
},
|
|
"endpoint": "/v1/chat/completions",
|
|
"model": "llama3.2:3b"
|
|
},
|
|
"response": {
|
|
"body": {
|
|
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
|
|
"__data__": {
|
|
"id": "rec-bf79a89cc37f",
|
|
"choices": [
|
|
{
|
|
"finish_reason": "stop",
|
|
"index": 0,
|
|
"logprobs": null,
|
|
"message": {
|
|
"content": "I'm happy to help, but it seems you want me to engage in a basic conversation as OpenAI's new chat model, right? I can do that!\n\nHere's my response:\n\nHello! How are you today? Is there something specific on your mind that you'd like to talk about or any particular topic you'd like to explore together?\n\nWhat is it that you're curious about?",
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"annotations": null,
|
|
"audio": null,
|
|
"function_call": null,
|
|
"tool_calls": null
|
|
}
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "llama3.2:3b",
|
|
"object": "chat.completion",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": {
|
|
"completion_tokens": 80,
|
|
"prompt_tokens": 30,
|
|
"total_tokens": 110,
|
|
"completion_tokens_details": null,
|
|
"prompt_tokens_details": null
|
|
}
|
|
}
|
|
},
|
|
"is_streaming": false
|
|
}
|
|
}
|