chore(tests): normalize recording IDs and timestamps to reduce git diff noise (#3676)

IDs are now deterministic hashes based on request content, and
timestamps are normalized to constants, eliminating spurious changes
when re-recording tests.

## Changes
- Updated `inference_recorder.py` to normalize IDs and timestamps during
recording
- Added `scripts/normalize_recordings.py` utility to re-normalize
existing recordings
- Created documentation in `tests/integration/recordings/README.md`
- Normalized 350 existing recording files
This commit is contained in:
Ashwin Bharambe 2025-10-03 17:26:11 -07:00 committed by GitHub
parent 6bcd3e25f2
commit 3f36bfaeaa
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
348 changed files with 10154 additions and 8329 deletions

View file

@ -22,7 +22,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-10-01T01:34:19.167396532Z",
"created_at": "1970-01-01T00:00:00.000000Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -40,7 +40,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-10-01T01:34:19.362195218Z",
"created_at": "1970-01-01T00:00:00.000000Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -58,7 +58,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-10-01T01:34:19.556896355Z",
"created_at": "1970-01-01T00:00:00.000000Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -76,7 +76,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-10-01T01:34:19.752258848Z",
"created_at": "1970-01-01T00:00:00.000000Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -94,7 +94,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-10-01T01:34:19.949688527Z",
"created_at": "1970-01-01T00:00:00.000000Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -112,7 +112,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-10-01T01:34:20.145337065Z",
"created_at": "1970-01-01T00:00:00.000000Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -130,7 +130,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-10-01T01:34:20.340739605Z",
"created_at": "1970-01-01T00:00:00.000000Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -148,7 +148,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-10-01T01:34:20.539146761Z",
"created_at": "1970-01-01T00:00:00.000000Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -166,7 +166,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-10-01T01:34:20.73590849Z",
"created_at": "1970-01-01T00:00:00.000000Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -184,7 +184,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-10-01T01:34:20.930252877Z",
"created_at": "1970-01-01T00:00:00.000000Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -202,7 +202,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-10-01T01:34:21.124432932Z",
"created_at": "1970-01-01T00:00:00.000000Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -220,7 +220,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-10-01T01:34:21.332871735Z",
"created_at": "1970-01-01T00:00:00.000000Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -238,7 +238,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-10-01T01:34:21.52851911Z",
"created_at": "1970-01-01T00:00:00.000000Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -256,7 +256,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-10-01T01:34:21.724649778Z",
"created_at": "1970-01-01T00:00:00.000000Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -274,7 +274,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-10-01T01:34:21.922353561Z",
"created_at": "1970-01-01T00:00:00.000000Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -292,7 +292,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-10-01T01:34:22.117061137Z",
"created_at": "1970-01-01T00:00:00.000000Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -310,7 +310,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-10-01T01:34:22.31230442Z",
"created_at": "1970-01-01T00:00:00.000000Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -328,7 +328,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-10-01T01:34:22.506582272Z",
"created_at": "1970-01-01T00:00:00.000000Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -346,15 +346,15 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-10-01T01:34:22.702819703Z",
"created_at": "1970-01-01T00:00:00.000000Z",
"done": true,
"done_reason": "stop",
"total_duration": 6447413112,
"load_duration": 45664730,
"total_duration": 0,
"load_duration": 0,
"prompt_eval_count": 376,
"prompt_eval_duration": 2864046437,
"prompt_eval_duration": 0,
"eval_count": 19,
"eval_duration": 3537012183,
"eval_duration": 0,
"response": "",
"thinking": null,
"context": null