mirror of
				https://github.com/meta-llama/llama-stack.git
				synced 2025-10-25 17:11:12 +00:00 
			
		
		
		
	IDs are now deterministic hashes based on request content, and timestamps are normalized to constants, eliminating spurious changes when re-recording tests. ## Changes - Updated `inference_recorder.py` to normalize IDs and timestamps during recording - Added `scripts/normalize_recordings.py` utility to re-normalize existing recordings - Created documentation in `tests/integration/recordings/README.md` - Normalized 350 existing recording files
		
			
				
	
	
		
			257 lines
		
	
	
	
		
			7.6 KiB
		
	
	
	
		
			JSON
		
	
	
	
	
		
			Generated
		
	
	
			
		
		
	
	
			257 lines
		
	
	
	
		
			7.6 KiB
		
	
	
	
		
			JSON
		
	
	
	
	
		
			Generated
		
	
	
| {
 | |
|   "request": {
 | |
|     "method": "POST",
 | |
|     "url": "http://localhost:11434/api/generate",
 | |
|     "headers": {},
 | |
|     "body": {
 | |
|       "model": "llama3.2:3b-instruct-fp16",
 | |
|       "raw": true,
 | |
|       "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nWhat is 2 + 2?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n",
 | |
|       "options": {
 | |
|         "temperature": 0.0
 | |
|       },
 | |
|       "stream": true
 | |
|     },
 | |
|     "endpoint": "/api/generate",
 | |
|     "model": "llama3.2:3b-instruct-fp16"
 | |
|   },
 | |
|   "response": {
 | |
|     "body": [
 | |
|       {
 | |
|         "__type__": "ollama._types.GenerateResponse",
 | |
|         "__data__": {
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "created_at": "1970-01-01T00:00:00.000000Z",
 | |
|           "done": false,
 | |
|           "done_reason": null,
 | |
|           "total_duration": null,
 | |
|           "load_duration": null,
 | |
|           "prompt_eval_count": null,
 | |
|           "prompt_eval_duration": null,
 | |
|           "eval_count": null,
 | |
|           "eval_duration": null,
 | |
|           "response": "The",
 | |
|           "thinking": null,
 | |
|           "context": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "ollama._types.GenerateResponse",
 | |
|         "__data__": {
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "created_at": "1970-01-01T00:00:00.000000Z",
 | |
|           "done": false,
 | |
|           "done_reason": null,
 | |
|           "total_duration": null,
 | |
|           "load_duration": null,
 | |
|           "prompt_eval_count": null,
 | |
|           "prompt_eval_duration": null,
 | |
|           "eval_count": null,
 | |
|           "eval_duration": null,
 | |
|           "response": " answer",
 | |
|           "thinking": null,
 | |
|           "context": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "ollama._types.GenerateResponse",
 | |
|         "__data__": {
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "created_at": "1970-01-01T00:00:00.000000Z",
 | |
|           "done": false,
 | |
|           "done_reason": null,
 | |
|           "total_duration": null,
 | |
|           "load_duration": null,
 | |
|           "prompt_eval_count": null,
 | |
|           "prompt_eval_duration": null,
 | |
|           "eval_count": null,
 | |
|           "eval_duration": null,
 | |
|           "response": " to",
 | |
|           "thinking": null,
 | |
|           "context": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "ollama._types.GenerateResponse",
 | |
|         "__data__": {
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "created_at": "1970-01-01T00:00:00.000000Z",
 | |
|           "done": false,
 | |
|           "done_reason": null,
 | |
|           "total_duration": null,
 | |
|           "load_duration": null,
 | |
|           "prompt_eval_count": null,
 | |
|           "prompt_eval_duration": null,
 | |
|           "eval_count": null,
 | |
|           "eval_duration": null,
 | |
|           "response": " ",
 | |
|           "thinking": null,
 | |
|           "context": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "ollama._types.GenerateResponse",
 | |
|         "__data__": {
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "created_at": "1970-01-01T00:00:00.000000Z",
 | |
|           "done": false,
 | |
|           "done_reason": null,
 | |
|           "total_duration": null,
 | |
|           "load_duration": null,
 | |
|           "prompt_eval_count": null,
 | |
|           "prompt_eval_duration": null,
 | |
|           "eval_count": null,
 | |
|           "eval_duration": null,
 | |
|           "response": "2",
 | |
|           "thinking": null,
 | |
|           "context": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "ollama._types.GenerateResponse",
 | |
|         "__data__": {
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "created_at": "1970-01-01T00:00:00.000000Z",
 | |
|           "done": false,
 | |
|           "done_reason": null,
 | |
|           "total_duration": null,
 | |
|           "load_duration": null,
 | |
|           "prompt_eval_count": null,
 | |
|           "prompt_eval_duration": null,
 | |
|           "eval_count": null,
 | |
|           "eval_duration": null,
 | |
|           "response": " +",
 | |
|           "thinking": null,
 | |
|           "context": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "ollama._types.GenerateResponse",
 | |
|         "__data__": {
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "created_at": "1970-01-01T00:00:00.000000Z",
 | |
|           "done": false,
 | |
|           "done_reason": null,
 | |
|           "total_duration": null,
 | |
|           "load_duration": null,
 | |
|           "prompt_eval_count": null,
 | |
|           "prompt_eval_duration": null,
 | |
|           "eval_count": null,
 | |
|           "eval_duration": null,
 | |
|           "response": " ",
 | |
|           "thinking": null,
 | |
|           "context": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "ollama._types.GenerateResponse",
 | |
|         "__data__": {
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "created_at": "1970-01-01T00:00:00.000000Z",
 | |
|           "done": false,
 | |
|           "done_reason": null,
 | |
|           "total_duration": null,
 | |
|           "load_duration": null,
 | |
|           "prompt_eval_count": null,
 | |
|           "prompt_eval_duration": null,
 | |
|           "eval_count": null,
 | |
|           "eval_duration": null,
 | |
|           "response": "2",
 | |
|           "thinking": null,
 | |
|           "context": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "ollama._types.GenerateResponse",
 | |
|         "__data__": {
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "created_at": "1970-01-01T00:00:00.000000Z",
 | |
|           "done": false,
 | |
|           "done_reason": null,
 | |
|           "total_duration": null,
 | |
|           "load_duration": null,
 | |
|           "prompt_eval_count": null,
 | |
|           "prompt_eval_duration": null,
 | |
|           "eval_count": null,
 | |
|           "eval_duration": null,
 | |
|           "response": " is",
 | |
|           "thinking": null,
 | |
|           "context": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "ollama._types.GenerateResponse",
 | |
|         "__data__": {
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "created_at": "1970-01-01T00:00:00.000000Z",
 | |
|           "done": false,
 | |
|           "done_reason": null,
 | |
|           "total_duration": null,
 | |
|           "load_duration": null,
 | |
|           "prompt_eval_count": null,
 | |
|           "prompt_eval_duration": null,
 | |
|           "eval_count": null,
 | |
|           "eval_duration": null,
 | |
|           "response": " ",
 | |
|           "thinking": null,
 | |
|           "context": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "ollama._types.GenerateResponse",
 | |
|         "__data__": {
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "created_at": "1970-01-01T00:00:00.000000Z",
 | |
|           "done": false,
 | |
|           "done_reason": null,
 | |
|           "total_duration": null,
 | |
|           "load_duration": null,
 | |
|           "prompt_eval_count": null,
 | |
|           "prompt_eval_duration": null,
 | |
|           "eval_count": null,
 | |
|           "eval_duration": null,
 | |
|           "response": "4",
 | |
|           "thinking": null,
 | |
|           "context": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "ollama._types.GenerateResponse",
 | |
|         "__data__": {
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "created_at": "1970-01-01T00:00:00.000000Z",
 | |
|           "done": false,
 | |
|           "done_reason": null,
 | |
|           "total_duration": null,
 | |
|           "load_duration": null,
 | |
|           "prompt_eval_count": null,
 | |
|           "prompt_eval_duration": null,
 | |
|           "eval_count": null,
 | |
|           "eval_duration": null,
 | |
|           "response": ".",
 | |
|           "thinking": null,
 | |
|           "context": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "ollama._types.GenerateResponse",
 | |
|         "__data__": {
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "created_at": "1970-01-01T00:00:00.000000Z",
 | |
|           "done": true,
 | |
|           "done_reason": "stop",
 | |
|           "total_duration": 0,
 | |
|           "load_duration": 0,
 | |
|           "prompt_eval_count": 29,
 | |
|           "prompt_eval_duration": 0,
 | |
|           "eval_count": 13,
 | |
|           "eval_duration": 0,
 | |
|           "response": "",
 | |
|           "thinking": null,
 | |
|           "context": null
 | |
|         }
 | |
|       }
 | |
|     ],
 | |
|     "is_streaming": true
 | |
|   }
 | |
| }
 |