diff --git a/tests/integration/recordings/index.sqlite b/tests/integration/recordings/index.sqlite index e01c8803a..7b6eb6a67 100644 Binary files a/tests/integration/recordings/index.sqlite and b/tests/integration/recordings/index.sqlite differ diff --git a/tests/integration/recordings/responses/3c0bf9ba81b2.json b/tests/integration/recordings/responses/3c0bf9ba81b2.json new file mode 100644 index 000000000..1b5f16c22 --- /dev/null +++ b/tests/integration/recordings/responses/3c0bf9ba81b2.json @@ -0,0 +1,56 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "user", + "content": "Quick test" + } + ], + "max_tokens": 5 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": { + "__type__": "openai.types.chat.chat_completion.ChatCompletion", + "__data__": { + "id": "chatcmpl-651", + "choices": [ + { + "finish_reason": "length", + "index": 0, + "logprobs": null, + "message": { + "content": "I'm ready to help", + "refusal": null, + "role": "assistant", + "annotations": null, + "audio": null, + "function_call": null, + "tool_calls": null + } + } + ], + "created": 1755294941, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": { + "completion_tokens": 5, + "prompt_tokens": 27, + "total_tokens": 32, + "completion_tokens_details": null, + "prompt_tokens_details": null + } + } + }, + "is_streaming": false + } +} diff --git a/tests/integration/recordings/responses/44a1d9de0602.json b/tests/integration/recordings/responses/44a1d9de0602.json new file mode 100644 index 000000000..2d158a06c --- /dev/null +++ b/tests/integration/recordings/responses/44a1d9de0602.json @@ -0,0 +1,56 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "user", + "content": "Say hello" + } + ], + "max_tokens": 20 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": { + "__type__": "openai.types.chat.chat_completion.ChatCompletion", + "__data__": { + "id": "chatcmpl-987", + "choices": [ + { + "finish_reason": "length", + "index": 0, + "logprobs": null, + "message": { + "content": "Hello! It's nice to meet you. Is there something I can help you with or would you", + "refusal": null, + "role": "assistant", + "annotations": null, + "audio": null, + "function_call": null, + "tool_calls": null + } + } + ], + "created": 1755294921, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": { + "completion_tokens": 20, + "prompt_tokens": 27, + "total_tokens": 47, + "completion_tokens_details": null, + "prompt_tokens_details": null + } + } + }, + "is_streaming": false + } +} diff --git a/tests/integration/recordings/responses/4a3a4447b16b.json b/tests/integration/recordings/responses/4a3a4447b16b.json index ee1ee6d70..a31c583c7 100644 --- a/tests/integration/recordings/responses/4a3a4447b16b.json +++ b/tests/integration/recordings/responses/4a3a4447b16b.json @@ -14,7 +14,7 @@ "models": [ { "model": "nomic-embed-text:latest", - "modified_at": "2025-08-15T20:24:13.254634Z", + "modified_at": "2025-08-15T21:55:08.088554Z", "digest": "0a109f422b47e3a30ba2b10eca18548e944e8a23073ee3f3e947efcf3c45e59f", "size": 274302450, "details": {