From aeb9e4bafe7fa2e662cd1657c82fb48b42b50bf3 Mon Sep 17 00:00:00 2001 From: Swapna Lekkala Date: Mon, 13 Oct 2025 13:55:04 -0700 Subject: [PATCH] add recordings --- ...a09616be69ff06cb5e962ed9cdb7c52c4f042.json | 59 +++++++++++++++++++ ...34a95f56931b792d5939f4cebc57-a282c3d2.json | 35 +++++++++++ 2 files changed, 94 insertions(+) create mode 100644 tests/integration/agents/recordings/c7446b903135a28b57098ca75e8a09616be69ff06cb5e962ed9cdb7c52c4f042.json create mode 100644 tests/integration/common/recordings/models-64a2277c90f0f42576f60c1030e3a020403d34a95f56931b792d5939f4cebc57-a282c3d2.json diff --git a/tests/integration/agents/recordings/c7446b903135a28b57098ca75e8a09616be69ff06cb5e962ed9cdb7c52c4f042.json b/tests/integration/agents/recordings/c7446b903135a28b57098ca75e8a09616be69ff06cb5e962ed9cdb7c52c4f042.json new file mode 100644 index 000000000..6acd5328c --- /dev/null +++ b/tests/integration/agents/recordings/c7446b903135a28b57098ca75e8a09616be69ff06cb5e962ed9cdb7c52c4f042.json @@ -0,0 +1,59 @@ +{ + "test_id": "tests/integration/agents/test_openai_responses.py::test_guardrails_with_tools[openai_client-txt=ollama/llama3.2:3b-instruct-fp16]", + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama-guard3:1b", + "messages": [ + { + "role": "user", + "content": "Task: Check if there is unsafe content in 'User' messages in conversations according our safety policy with the below categories.\n\n\nS1: Violent Crimes.\nS2: Non-Violent Crimes.\nS3: Sex Crimes.\nS4: Child Exploitation.\nS5: Defamation.\nS6: Specialized Advice.\nS7: Privacy.\nS8: Intellectual Property.\nS9: Indiscriminate Weapons.\nS10: Hate.\nS11: Self-Harm.\nS12: Sexual Content.\nS13: Elections.\nS14: Code Interpreter Abuse.\n\n\n\n\nUser: What's the weather like? Please help me in a safe and appropriate way.\n\n\n\nProvide your safety assessment for ONLY THE LAST User message in the above conversation:\n - First line must read 'safe' or 'unsafe'.\n - If unsafe, a second line must include a comma-separated list of violated categories." + } + ], + "stream": false, + "temperature": 0.0 + }, + "endpoint": "/v1/chat/completions", + "model": "llama-guard3:1b" + }, + "response": { + "body": { + "__type__": "openai.types.chat.chat_completion.ChatCompletion", + "__data__": { + "id": "rec-c7446b903135", + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "safe", + "refusal": null, + "role": "assistant", + "annotations": null, + "audio": null, + "function_call": null, + "tool_calls": null + } + } + ], + "created": 0, + "model": "llama-guard3:1b", + "object": "chat.completion", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": { + "completion_tokens": 2, + "prompt_tokens": 399, + "total_tokens": 401, + "completion_tokens_details": null, + "prompt_tokens_details": null + } + } + }, + "is_streaming": false + }, + "id_normalization_mapping": {} +} diff --git a/tests/integration/common/recordings/models-64a2277c90f0f42576f60c1030e3a020403d34a95f56931b792d5939f4cebc57-a282c3d2.json b/tests/integration/common/recordings/models-64a2277c90f0f42576f60c1030e3a020403d34a95f56931b792d5939f4cebc57-a282c3d2.json new file mode 100644 index 000000000..14211fc18 --- /dev/null +++ b/tests/integration/common/recordings/models-64a2277c90f0f42576f60c1030e3a020403d34a95f56931b792d5939f4cebc57-a282c3d2.json @@ -0,0 +1,35 @@ +{ + "test_id": null, + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/models", + "headers": {}, + "body": {}, + "endpoint": "/v1/models", + "model": "" + }, + "response": { + "body": [ + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "llama3.2:3b-instruct-fp16", + "created": 1760378164, + "object": "model", + "owned_by": "library" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "llama-guard3:1b", + "created": 1760377649, + "object": "model", + "owned_by": "library" + } + } + ], + "is_streaming": false + }, + "id_normalization_mapping": {} +}