From 19d25fc3f7a0bbed10436ba1f32108a6bfbd7c41 Mon Sep 17 00:00:00 2001 From: Ashwin Bharambe Date: Wed, 1 Oct 2025 20:16:02 -0700 Subject: [PATCH] moar recordings and test fixes --- llama_stack/apis/tools/tools.py | 2 +- .../meta_reference/responses/streaming.py | 4 +- .../recordings/responses/0396786db779.json | 366 +++++ .../recordings/responses/04cb9de29e06.json | 366 +++++ .../recordings/responses/05e3ebc68306.json | 4 +- .../recordings/responses/0e8f2b001dd9.json | 10 +- .../recordings/responses/0fad19b9d308.json | 12 +- .../recordings/responses/1a4da7c94fde.json | 4 +- .../recordings/responses/1b939935d483.json | 258 ++++ .../recordings/responses/239f4768f5aa.json | 10 +- .../recordings/responses/2d187a11704c.json | 208 +-- .../recordings/responses/325a72db5755.json | 80 +- .../recordings/responses/36badd90238f.json | 366 +++++ .../recordings/responses/37706c1729ba.json | 4 +- .../recordings/responses/38ea441b5f83.json | 10 +- .../recordings/responses/3a81146f2afa.json | 1178 ++++++++--------- .../recordings/responses/3ca695048bee.json | 10 +- .../recordings/responses/3f5871e0805d.json | 85 ++ .../recordings/responses/4283d7199d9b.json | 366 +++++ .../recordings/responses/4ebcaf6c2aee.json | 400 +++++- .../recordings/responses/55ae40168378.json | 366 +++++ .../recordings/responses/63aa4590a38a.json | 768 +++++------ .../recordings/responses/6412295819a1.json | 10 +- .../recordings/responses/65c12de0a1db.json | 10 +- .../recordings/responses/6b3e593ad9b8.json | 4 +- .../recordings/responses/6f96090aa955.json | 126 +- .../recordings/responses/7a047bcf8b19.json | 4 +- .../recordings/responses/80e4404d8987.json | 28 +- .../recordings/responses/8486e5b1c6db.json | 276 ++++ .../recordings/responses/84fc473e7b29.json | 4 +- .../recordings/responses/87577729d812.json | 4 +- .../recordings/responses/931ac7158789.json | 10 +- .../recordings/responses/a0c4df33879f.json | 46 +- .../recordings/responses/a46b77ffd494.json | 4 +- .../recordings/responses/a4c8d19bb1eb.json | 4 +- .../recordings/responses/a689181d64d3.json | 86 ++ .../recordings/responses/b28f75bd87dc.json | 4 +- .../recordings/responses/b374fc18c641.json | 258 ++++ .../recordings/responses/c2ac76cbf66d.json | 4 +- .../recordings/responses/c3dbccc5de74.json | 10 +- .../recordings/responses/c4991de37dfb.json | 10 +- .../recordings/responses/c8234a1171f3.json | 4 +- .../recordings/responses/c8e196049fe4.json | 4 +- .../recordings/responses/cd0ece88d392.json | 258 ++++ .../recordings/responses/cd294c2e0038.json | 4 +- .../recordings/responses/cf776b1aa432.json | 32 +- .../recordings/responses/d0ac68cbde69.json | 20 +- .../recordings/responses/d7caf68e394e.json | 4 +- .../recordings/responses/d9e8f66e1d85.json | 12 +- .../recordings/responses/df20f4b62da7.json | 258 ++++ .../recordings/responses/e0c71820f395.json | 24 +- .../recordings/responses/e9c8a0e4f0e0.json | 4 +- .../recordings/responses/f23defea82ec.json | 400 +++++- .../recordings/responses/f70f30f54211.json | 10 +- .../recordings/responses/f8ba05a5ce61.json | 402 ++++++ .../recordings/responses/feae037e2abd.json | 258 ++++ .../routers/test_routing_tables.py | 12 +- .../agent/test_meta_reference_agent.py | 46 +- .../meta_reference/test_openai_responses.py | 20 +- .../responses/test_streaming.py | 22 +- 60 files changed, 6163 insertions(+), 1410 deletions(-) create mode 100644 tests/integration/recordings/responses/0396786db779.json create mode 100644 tests/integration/recordings/responses/04cb9de29e06.json create mode 100644 tests/integration/recordings/responses/1b939935d483.json create mode 100644 tests/integration/recordings/responses/36badd90238f.json create mode 100644 tests/integration/recordings/responses/3f5871e0805d.json create mode 100644 tests/integration/recordings/responses/4283d7199d9b.json create mode 100644 tests/integration/recordings/responses/55ae40168378.json create mode 100644 tests/integration/recordings/responses/8486e5b1c6db.json create mode 100644 tests/integration/recordings/responses/a689181d64d3.json create mode 100644 tests/integration/recordings/responses/b374fc18c641.json create mode 100644 tests/integration/recordings/responses/cd0ece88d392.json create mode 100644 tests/integration/recordings/responses/df20f4b62da7.json create mode 100644 tests/integration/recordings/responses/f8ba05a5ce61.json create mode 100644 tests/integration/recordings/responses/feae037e2abd.json diff --git a/llama_stack/apis/tools/tools.py b/llama_stack/apis/tools/tools.py index 7a2027ade..37d7f92c9 100644 --- a/llama_stack/apis/tools/tools.py +++ b/llama_stack/apis/tools/tools.py @@ -31,7 +31,7 @@ class ToolDef(BaseModel): :param toolgroup_id: (Optional) ID of the tool group this tool belongs to """ - toolgroup_id: str | None = None + toolgroup_id: str | None = None name: str description: str | None = None input_schema: dict[str, Any] | None = None diff --git a/llama_stack/providers/inline/agents/meta_reference/responses/streaming.py b/llama_stack/providers/inline/agents/meta_reference/responses/streaming.py index cb9a1bd8d..9f425e8b5 100644 --- a/llama_stack/providers/inline/agents/meta_reference/responses/streaming.py +++ b/llama_stack/providers/inline/agents/meta_reference/responses/streaming.py @@ -516,11 +516,11 @@ class StreamingResponseOrchestrator: """Process all tools and emit appropriate streaming events.""" from openai.types.chat import ChatCompletionToolParam - from llama_stack.apis.tools import Tool + from llama_stack.apis.tools import ToolDef from llama_stack.models.llama.datatypes import ToolDefinition from llama_stack.providers.utils.inference.openai_compat import convert_tooldef_to_openai_tool - def make_openai_tool(tool_name: str, tool: Tool) -> ChatCompletionToolParam: + def make_openai_tool(tool_name: str, tool: ToolDef) -> ChatCompletionToolParam: tool_def = ToolDefinition( tool_name=tool_name, description=tool.description, diff --git a/tests/integration/recordings/responses/0396786db779.json b/tests/integration/recordings/responses/0396786db779.json new file mode 100644 index 000000000..e2d40c100 --- /dev/null +++ b/tests/integration/recordings/responses/0396786db779.json @@ -0,0 +1,366 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant Always respond with tool calls no matter what. <|eot_id|><|start_header_id|>user<|end_header_id|>\n\nGet the boiling point of polyjuice with a tool call.<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0001, + "top_p": 0.9 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.228595Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "[", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.272966Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "get", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.315637Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_bo", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.356564Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "iling", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.397939Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_point", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.438829Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "(", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.479679Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "liquid", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.520682Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_name", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.56207Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "='", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.603054Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "poly", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.644749Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ju", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.685399Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ice", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.7267Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "',", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.77062Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " cel", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.813947Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ci", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.854591Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "us", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.896278Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "=True", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.937449Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ")]", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.979031Z", + "done": true, + "done_reason": "stop", + "total_duration": 944600833, + "load_duration": 83227667, + "prompt_eval_count": 369, + "prompt_eval_duration": 109699916, + "eval_count": 19, + "eval_duration": 751096500, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/04cb9de29e06.json b/tests/integration/recordings/responses/04cb9de29e06.json new file mode 100644 index 000000000..0fdc6f8b9 --- /dev/null +++ b/tests/integration/recordings/responses/04cb9de29e06.json @@ -0,0 +1,366 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant\nYou MUST use the tool `get_boiling_point` to answer the user query.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nWhat is the boiling point of the liquid polyjuice in celsius?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0001, + "top_p": 0.9 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:08.682181Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "[", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:08.728326Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "get", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:08.775162Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_bo", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:08.820267Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "iling", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:08.864362Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_point", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:08.906797Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "(", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:08.950158Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "liquid", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:08.992796Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_name", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:09.034691Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "='", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:09.07709Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "poly", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:09.119534Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ju", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:09.161661Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ice", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:09.204749Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "',", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:09.247334Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " cel", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:09.29011Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ci", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:09.331776Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "us", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:09.374076Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "=True", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:09.416672Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ")]", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:09.458519Z", + "done": true, + "done_reason": "stop", + "total_duration": 1437962792, + "load_duration": 129009042, + "prompt_eval_count": 379, + "prompt_eval_duration": 530416042, + "eval_count": 19, + "eval_duration": 777491375, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/05e3ebc68306.json b/tests/integration/recordings/responses/05e3ebc68306.json index 53b7c8a89..b921d36a0 100644 --- a/tests/integration/recordings/responses/05e3ebc68306.json +++ b/tests/integration/recordings/responses/05e3ebc68306.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-618", + "id": "chatcmpl-97", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759245078, + "created": 1759373697, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/0e8f2b001dd9.json b/tests/integration/recordings/responses/0e8f2b001dd9.json index 24ea5322c..35fb6c921 100644 --- a/tests/integration/recordings/responses/0e8f2b001dd9.json +++ b/tests/integration/recordings/responses/0e8f2b001dd9.json @@ -20,14 +20,14 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-494", + "id": "chatcmpl-508", "choices": [ { "finish_reason": "stop", "index": 0, "logprobs": null, "message": { - "content": "You're referring to Saturn's majestic ring system. Yes, the answer is Saturn! Its ring system is one of the most well-known and iconic in our solar system. The rings are made up of ice particles and rock debris that orbit around Saturn, stretching out for hundreds of thousands of miles.", + "content": "The planet Saturn has distinct ring systems, but the answer you're looking for is likely \"Saturn\". However, another notable ring system exists around the planet Saturn's moon, Titan, however Titan does have significant ring structure. One Ring system belonging to another planet that has a name starting with letter S is - Saturns sister- The other possible answer would be- SATURN'S OTHER RINGED PLANET: Saturn", "refusal": null, "role": "assistant", "annotations": null, @@ -37,15 +37,15 @@ } } ], - "created": 1759351471, + "created": 1759376639, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion", "service_tier": null, "system_fingerprint": "fp_ollama", "usage": { - "completion_tokens": 60, + "completion_tokens": 87, "prompt_tokens": 39, - "total_tokens": 99, + "total_tokens": 126, "completion_tokens_details": null, "prompt_tokens_details": null } diff --git a/tests/integration/recordings/responses/0fad19b9d308.json b/tests/integration/recordings/responses/0fad19b9d308.json index 2e92cc179..dcb6ee055 100644 --- a/tests/integration/recordings/responses/0fad19b9d308.json +++ b/tests/integration/recordings/responses/0fad19b9d308.json @@ -38,7 +38,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-187", + "id": "chatcmpl-232", "choices": [ { "delta": { @@ -49,9 +49,9 @@ "tool_calls": [ { "index": 0, - "id": "call_nng2lhyy", + "id": "call_1hqty7lo", "function": { - "arguments": "{\"timezone\":\"UTC\"}", + "arguments": "{\"timezone\":\"\",\"value\":\"UTC\"}", "name": "get_time" }, "type": "function" @@ -63,7 +63,7 @@ "logprobs": null } ], - "created": 1759351462, + "created": 1759376623, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -74,7 +74,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-187", + "id": "chatcmpl-232", "choices": [ { "delta": { @@ -89,7 +89,7 @@ "logprobs": null } ], - "created": 1759351462, + "created": 1759376623, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, diff --git a/tests/integration/recordings/responses/1a4da7c94fde.json b/tests/integration/recordings/responses/1a4da7c94fde.json index 4b3fb8fb6..0f5734bd9 100644 --- a/tests/integration/recordings/responses/1a4da7c94fde.json +++ b/tests/integration/recordings/responses/1a4da7c94fde.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-438", + "id": "chatcmpl-466", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759245073, + "created": 1759373692, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/1b939935d483.json b/tests/integration/recordings/responses/1b939935d483.json new file mode 100644 index 000000000..1eed51400 --- /dev/null +++ b/tests/integration/recordings/responses/1b939935d483.json @@ -0,0 +1,258 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant\nYou MUST use one of the provided functions/tools to answer the user query.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nWhat is the boiling point of the liquid polyjuice in celsius?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n[get_boiling_point(liquid_name=\"polyjuice\", celcius=True)]<|eot_id|><|start_header_id|>ipython<|end_header_id|>\n\n-100<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0001, + "top_p": 0.9 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:01.957108Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "The", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:01.998746Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " boiling", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:02.040281Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " point", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:02.081567Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " of", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:02.122945Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " poly", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:02.16406Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ju", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:02.205051Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ice", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:02.246393Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " is", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:02.288195Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " -", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:02.331557Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "100", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:02.373397Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\u00b0C", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:02.414856Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ".", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:02.456059Z", + "done": true, + "done_reason": "stop", + "total_duration": 669686292, + "load_duration": 96788459, + "prompt_eval_count": 408, + "prompt_eval_duration": 72865250, + "eval_count": 13, + "eval_duration": 499470042, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/239f4768f5aa.json b/tests/integration/recordings/responses/239f4768f5aa.json index ce540db3f..38f483090 100644 --- a/tests/integration/recordings/responses/239f4768f5aa.json +++ b/tests/integration/recordings/responses/239f4768f5aa.json @@ -53,14 +53,14 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-433", + "id": "chatcmpl-497", "choices": [ { "finish_reason": "stop", "index": 0, "logprobs": null, "message": { - "content": "{\"first_name\": \"Michael\", \"last_name\": \"Jordan\", \"year_of_birth\": 1963}\n\n \t\t\t\t\t\t\t\t\t\t\t \t\t ", + "content": "{\"first_name\": \"Michael\", \"last_name\": \"Jordan\", \"year_of_birth\": 1963}", "refusal": null, "role": "assistant", "annotations": null, @@ -70,15 +70,15 @@ } } ], - "created": 1758979490, + "created": 1759376618, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion", "service_tier": null, "system_fingerprint": "fp_ollama", "usage": { - "completion_tokens": 31, + "completion_tokens": 26, "prompt_tokens": 60, - "total_tokens": 91, + "total_tokens": 86, "completion_tokens_details": null, "prompt_tokens_details": null } diff --git a/tests/integration/recordings/responses/2d187a11704c.json b/tests/integration/recordings/responses/2d187a11704c.json index c0f746ffe..0c12271fd 100644 --- a/tests/integration/recordings/responses/2d187a11704c.json +++ b/tests/integration/recordings/responses/2d187a11704c.json @@ -22,7 +22,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:56.566151Z", + "created_at": "2025-10-02T02:55:03.175181Z", "done": false, "done_reason": null, "total_duration": null, @@ -40,7 +40,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:56.609308Z", + "created_at": "2025-10-02T02:55:03.21666Z", "done": false, "done_reason": null, "total_duration": null, @@ -58,7 +58,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:56.651314Z", + "created_at": "2025-10-02T02:55:03.258841Z", "done": false, "done_reason": null, "total_duration": null, @@ -76,7 +76,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:56.693185Z", + "created_at": "2025-10-02T02:55:03.299188Z", "done": false, "done_reason": null, "total_duration": null, @@ -94,7 +94,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:56.734643Z", + "created_at": "2025-10-02T02:55:03.339415Z", "done": false, "done_reason": null, "total_duration": null, @@ -112,7 +112,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:56.776343Z", + "created_at": "2025-10-02T02:55:03.379794Z", "done": false, "done_reason": null, "total_duration": null, @@ -130,7 +130,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:56.81705Z", + "created_at": "2025-10-02T02:55:03.420354Z", "done": false, "done_reason": null, "total_duration": null, @@ -148,7 +148,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:56.857959Z", + "created_at": "2025-10-02T02:55:03.460933Z", "done": false, "done_reason": null, "total_duration": null, @@ -166,7 +166,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:56.899424Z", + "created_at": "2025-10-02T02:55:03.501777Z", "done": false, "done_reason": null, "total_duration": null, @@ -184,7 +184,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:56.939218Z", + "created_at": "2025-10-02T02:55:03.542402Z", "done": false, "done_reason": null, "total_duration": null, @@ -202,7 +202,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:56.980065Z", + "created_at": "2025-10-02T02:55:03.582816Z", "done": false, "done_reason": null, "total_duration": null, @@ -220,7 +220,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:57.02214Z", + "created_at": "2025-10-02T02:55:03.623108Z", "done": false, "done_reason": null, "total_duration": null, @@ -238,7 +238,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:57.0628Z", + "created_at": "2025-10-02T02:55:03.663532Z", "done": false, "done_reason": null, "total_duration": null, @@ -256,7 +256,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:57.106061Z", + "created_at": "2025-10-02T02:55:03.704651Z", "done": false, "done_reason": null, "total_duration": null, @@ -274,7 +274,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:57.1492Z", + "created_at": "2025-10-02T02:55:03.746321Z", "done": false, "done_reason": null, "total_duration": null, @@ -292,7 +292,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:57.190075Z", + "created_at": "2025-10-02T02:55:03.787213Z", "done": false, "done_reason": null, "total_duration": null, @@ -310,7 +310,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:57.23178Z", + "created_at": "2025-10-02T02:55:03.829153Z", "done": false, "done_reason": null, "total_duration": null, @@ -328,7 +328,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:57.272738Z", + "created_at": "2025-10-02T02:55:03.869545Z", "done": false, "done_reason": null, "total_duration": null, @@ -346,7 +346,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:57.313855Z", + "created_at": "2025-10-02T02:55:03.909839Z", "done": false, "done_reason": null, "total_duration": null, @@ -364,7 +364,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:57.354964Z", + "created_at": "2025-10-02T02:55:03.950296Z", "done": false, "done_reason": null, "total_duration": null, @@ -382,7 +382,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:57.395971Z", + "created_at": "2025-10-02T02:55:03.990725Z", "done": false, "done_reason": null, "total_duration": null, @@ -400,7 +400,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:57.438471Z", + "created_at": "2025-10-02T02:55:04.031037Z", "done": false, "done_reason": null, "total_duration": null, @@ -418,7 +418,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:57.479796Z", + "created_at": "2025-10-02T02:55:04.071398Z", "done": false, "done_reason": null, "total_duration": null, @@ -436,7 +436,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:57.520641Z", + "created_at": "2025-10-02T02:55:04.111908Z", "done": false, "done_reason": null, "total_duration": null, @@ -454,7 +454,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:57.561511Z", + "created_at": "2025-10-02T02:55:04.153461Z", "done": false, "done_reason": null, "total_duration": null, @@ -472,7 +472,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:57.602875Z", + "created_at": "2025-10-02T02:55:04.195941Z", "done": false, "done_reason": null, "total_duration": null, @@ -490,7 +490,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:57.643406Z", + "created_at": "2025-10-02T02:55:04.236433Z", "done": false, "done_reason": null, "total_duration": null, @@ -508,7 +508,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:57.684279Z", + "created_at": "2025-10-02T02:55:04.27718Z", "done": false, "done_reason": null, "total_duration": null, @@ -526,7 +526,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:57.725699Z", + "created_at": "2025-10-02T02:55:04.317743Z", "done": false, "done_reason": null, "total_duration": null, @@ -544,7 +544,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:57.766658Z", + "created_at": "2025-10-02T02:55:04.358602Z", "done": false, "done_reason": null, "total_duration": null, @@ -562,7 +562,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:57.80738Z", + "created_at": "2025-10-02T02:55:04.399212Z", "done": false, "done_reason": null, "total_duration": null, @@ -580,7 +580,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:57.848466Z", + "created_at": "2025-10-02T02:55:04.439733Z", "done": false, "done_reason": null, "total_duration": null, @@ -598,7 +598,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:57.889056Z", + "created_at": "2025-10-02T02:55:04.480639Z", "done": false, "done_reason": null, "total_duration": null, @@ -616,7 +616,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:57.931554Z", + "created_at": "2025-10-02T02:55:04.521251Z", "done": false, "done_reason": null, "total_duration": null, @@ -634,7 +634,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:57.974754Z", + "created_at": "2025-10-02T02:55:04.56195Z", "done": false, "done_reason": null, "total_duration": null, @@ -652,7 +652,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:58.016978Z", + "created_at": "2025-10-02T02:55:04.60257Z", "done": false, "done_reason": null, "total_duration": null, @@ -670,7 +670,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:58.057942Z", + "created_at": "2025-10-02T02:55:04.643071Z", "done": false, "done_reason": null, "total_duration": null, @@ -688,7 +688,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:58.099015Z", + "created_at": "2025-10-02T02:55:04.684195Z", "done": false, "done_reason": null, "total_duration": null, @@ -706,7 +706,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:58.140531Z", + "created_at": "2025-10-02T02:55:04.725008Z", "done": false, "done_reason": null, "total_duration": null, @@ -724,7 +724,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:58.181382Z", + "created_at": "2025-10-02T02:55:04.766299Z", "done": false, "done_reason": null, "total_duration": null, @@ -742,7 +742,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:58.223318Z", + "created_at": "2025-10-02T02:55:04.807076Z", "done": false, "done_reason": null, "total_duration": null, @@ -760,7 +760,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:58.26358Z", + "created_at": "2025-10-02T02:55:04.848963Z", "done": false, "done_reason": null, "total_duration": null, @@ -778,7 +778,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:58.305496Z", + "created_at": "2025-10-02T02:55:04.889928Z", "done": false, "done_reason": null, "total_duration": null, @@ -796,7 +796,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:58.347254Z", + "created_at": "2025-10-02T02:55:04.934326Z", "done": false, "done_reason": null, "total_duration": null, @@ -814,7 +814,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:58.390044Z", + "created_at": "2025-10-02T02:55:04.977276Z", "done": false, "done_reason": null, "total_duration": null, @@ -832,7 +832,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:58.430867Z", + "created_at": "2025-10-02T02:55:05.020601Z", "done": false, "done_reason": null, "total_duration": null, @@ -850,7 +850,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:58.471376Z", + "created_at": "2025-10-02T02:55:05.063018Z", "done": false, "done_reason": null, "total_duration": null, @@ -868,7 +868,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:58.51208Z", + "created_at": "2025-10-02T02:55:05.104224Z", "done": false, "done_reason": null, "total_duration": null, @@ -886,7 +886,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:58.553226Z", + "created_at": "2025-10-02T02:55:05.144777Z", "done": false, "done_reason": null, "total_duration": null, @@ -904,7 +904,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:58.594787Z", + "created_at": "2025-10-02T02:55:05.184974Z", "done": false, "done_reason": null, "total_duration": null, @@ -922,7 +922,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:58.63466Z", + "created_at": "2025-10-02T02:55:05.225424Z", "done": false, "done_reason": null, "total_duration": null, @@ -940,7 +940,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:58.674628Z", + "created_at": "2025-10-02T02:55:05.2659Z", "done": false, "done_reason": null, "total_duration": null, @@ -958,7 +958,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:58.714616Z", + "created_at": "2025-10-02T02:55:05.306482Z", "done": false, "done_reason": null, "total_duration": null, @@ -976,7 +976,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:58.754906Z", + "created_at": "2025-10-02T02:55:05.346838Z", "done": false, "done_reason": null, "total_duration": null, @@ -994,7 +994,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:58.795048Z", + "created_at": "2025-10-02T02:55:05.387059Z", "done": false, "done_reason": null, "total_duration": null, @@ -1012,7 +1012,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:58.835297Z", + "created_at": "2025-10-02T02:55:05.427541Z", "done": false, "done_reason": null, "total_duration": null, @@ -1030,7 +1030,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:58.875738Z", + "created_at": "2025-10-02T02:55:05.467788Z", "done": false, "done_reason": null, "total_duration": null, @@ -1048,7 +1048,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:58.91604Z", + "created_at": "2025-10-02T02:55:05.508102Z", "done": false, "done_reason": null, "total_duration": null, @@ -1066,7 +1066,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:58.956596Z", + "created_at": "2025-10-02T02:55:05.548521Z", "done": false, "done_reason": null, "total_duration": null, @@ -1084,7 +1084,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:58.996664Z", + "created_at": "2025-10-02T02:55:05.588742Z", "done": false, "done_reason": null, "total_duration": null, @@ -1102,7 +1102,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:59.037796Z", + "created_at": "2025-10-02T02:55:05.629266Z", "done": false, "done_reason": null, "total_duration": null, @@ -1120,7 +1120,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:59.078586Z", + "created_at": "2025-10-02T02:55:05.674214Z", "done": false, "done_reason": null, "total_duration": null, @@ -1138,7 +1138,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:59.119448Z", + "created_at": "2025-10-02T02:55:05.71804Z", "done": false, "done_reason": null, "total_duration": null, @@ -1156,7 +1156,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:59.160318Z", + "created_at": "2025-10-02T02:55:05.761666Z", "done": false, "done_reason": null, "total_duration": null, @@ -1174,7 +1174,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:59.201852Z", + "created_at": "2025-10-02T02:55:05.80432Z", "done": false, "done_reason": null, "total_duration": null, @@ -1192,7 +1192,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:59.243763Z", + "created_at": "2025-10-02T02:55:05.846217Z", "done": false, "done_reason": null, "total_duration": null, @@ -1210,7 +1210,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:59.284948Z", + "created_at": "2025-10-02T02:55:05.88931Z", "done": false, "done_reason": null, "total_duration": null, @@ -1228,7 +1228,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:59.325598Z", + "created_at": "2025-10-02T02:55:05.93282Z", "done": false, "done_reason": null, "total_duration": null, @@ -1246,7 +1246,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:59.366289Z", + "created_at": "2025-10-02T02:55:05.976513Z", "done": false, "done_reason": null, "total_duration": null, @@ -1264,7 +1264,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:59.406764Z", + "created_at": "2025-10-02T02:55:06.020886Z", "done": false, "done_reason": null, "total_duration": null, @@ -1282,7 +1282,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:59.447922Z", + "created_at": "2025-10-02T02:55:06.063597Z", "done": false, "done_reason": null, "total_duration": null, @@ -1300,7 +1300,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:59.488486Z", + "created_at": "2025-10-02T02:55:06.106054Z", "done": false, "done_reason": null, "total_duration": null, @@ -1318,7 +1318,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:59.529Z", + "created_at": "2025-10-02T02:55:06.148232Z", "done": false, "done_reason": null, "total_duration": null, @@ -1336,7 +1336,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:59.569417Z", + "created_at": "2025-10-02T02:55:06.190334Z", "done": false, "done_reason": null, "total_duration": null, @@ -1354,7 +1354,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:59.610542Z", + "created_at": "2025-10-02T02:55:06.231933Z", "done": false, "done_reason": null, "total_duration": null, @@ -1372,7 +1372,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:59.651411Z", + "created_at": "2025-10-02T02:55:06.27373Z", "done": false, "done_reason": null, "total_duration": null, @@ -1390,7 +1390,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:59.69241Z", + "created_at": "2025-10-02T02:55:06.315435Z", "done": false, "done_reason": null, "total_duration": null, @@ -1408,7 +1408,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:59.732339Z", + "created_at": "2025-10-02T02:55:06.35848Z", "done": false, "done_reason": null, "total_duration": null, @@ -1426,7 +1426,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:59.772462Z", + "created_at": "2025-10-02T02:55:06.400959Z", "done": false, "done_reason": null, "total_duration": null, @@ -1444,7 +1444,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:59.812507Z", + "created_at": "2025-10-02T02:55:06.441214Z", "done": false, "done_reason": null, "total_duration": null, @@ -1462,7 +1462,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:59.852762Z", + "created_at": "2025-10-02T02:55:06.481409Z", "done": false, "done_reason": null, "total_duration": null, @@ -1480,7 +1480,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:59.892984Z", + "created_at": "2025-10-02T02:55:06.522518Z", "done": false, "done_reason": null, "total_duration": null, @@ -1498,7 +1498,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:59.933555Z", + "created_at": "2025-10-02T02:55:06.564666Z", "done": false, "done_reason": null, "total_duration": null, @@ -1516,7 +1516,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:59.973778Z", + "created_at": "2025-10-02T02:55:06.605895Z", "done": false, "done_reason": null, "total_duration": null, @@ -1534,7 +1534,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:38:00.014923Z", + "created_at": "2025-10-02T02:55:06.646978Z", "done": false, "done_reason": null, "total_duration": null, @@ -1552,7 +1552,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:38:00.057464Z", + "created_at": "2025-10-02T02:55:06.68904Z", "done": false, "done_reason": null, "total_duration": null, @@ -1570,7 +1570,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:38:00.09902Z", + "created_at": "2025-10-02T02:55:06.730173Z", "done": false, "done_reason": null, "total_duration": null, @@ -1588,7 +1588,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:38:00.140492Z", + "created_at": "2025-10-02T02:55:06.772861Z", "done": false, "done_reason": null, "total_duration": null, @@ -1606,7 +1606,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:38:00.180239Z", + "created_at": "2025-10-02T02:55:06.816599Z", "done": false, "done_reason": null, "total_duration": null, @@ -1624,7 +1624,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:38:00.220364Z", + "created_at": "2025-10-02T02:55:06.859503Z", "done": false, "done_reason": null, "total_duration": null, @@ -1642,7 +1642,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:38:00.26097Z", + "created_at": "2025-10-02T02:55:06.901146Z", "done": false, "done_reason": null, "total_duration": null, @@ -1660,7 +1660,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:38:00.301228Z", + "created_at": "2025-10-02T02:55:06.943698Z", "done": false, "done_reason": null, "total_duration": null, @@ -1678,7 +1678,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:38:00.341631Z", + "created_at": "2025-10-02T02:55:06.985619Z", "done": false, "done_reason": null, "total_duration": null, @@ -1696,7 +1696,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:38:00.383006Z", + "created_at": "2025-10-02T02:55:07.027092Z", "done": false, "done_reason": null, "total_duration": null, @@ -1714,7 +1714,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:38:00.423509Z", + "created_at": "2025-10-02T02:55:07.068654Z", "done": false, "done_reason": null, "total_duration": null, @@ -1732,7 +1732,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:38:00.464702Z", + "created_at": "2025-10-02T02:55:07.109785Z", "done": false, "done_reason": null, "total_duration": null, @@ -1750,7 +1750,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:38:00.505914Z", + "created_at": "2025-10-02T02:55:07.151491Z", "done": false, "done_reason": null, "total_duration": null, @@ -1768,7 +1768,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:38:00.546505Z", + "created_at": "2025-10-02T02:55:07.192762Z", "done": false, "done_reason": null, "total_duration": null, @@ -1786,7 +1786,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:38:00.587839Z", + "created_at": "2025-10-02T02:55:07.2337Z", "done": false, "done_reason": null, "total_duration": null, @@ -1804,15 +1804,15 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:38:00.629018Z", + "created_at": "2025-10-02T02:55:07.276074Z", "done": true, "done_reason": "stop", - "total_duration": 4303339291, - "load_duration": 156231250, + "total_duration": 4260353875, + "load_duration": 95584041, "prompt_eval_count": 36, - "prompt_eval_duration": 81909875, + "prompt_eval_duration": 62641958, "eval_count": 100, - "eval_duration": 4064559292, + "eval_duration": 4101499250, "response": "", "thinking": null, "context": null diff --git a/tests/integration/recordings/responses/325a72db5755.json b/tests/integration/recordings/responses/325a72db5755.json index 7bf2640d5..518d5b8c9 100644 --- a/tests/integration/recordings/responses/325a72db5755.json +++ b/tests/integration/recordings/responses/325a72db5755.json @@ -21,7 +21,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-256", + "id": "chatcmpl-822", "choices": [ { "delta": { @@ -36,7 +36,7 @@ "logprobs": null } ], - "created": 1759351472, + "created": 1759376639, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -47,7 +47,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-256", + "id": "chatcmpl-822", "choices": [ { "delta": { @@ -62,7 +62,7 @@ "logprobs": null } ], - "created": 1759351472, + "created": 1759376639, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -73,7 +73,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-256", + "id": "chatcmpl-822", "choices": [ { "delta": { @@ -88,7 +88,7 @@ "logprobs": null } ], - "created": 1759351472, + "created": 1759376639, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -99,7 +99,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-256", + "id": "chatcmpl-822", "choices": [ { "delta": { @@ -114,7 +114,7 @@ "logprobs": null } ], - "created": 1759351472, + "created": 1759376639, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -125,7 +125,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-256", + "id": "chatcmpl-822", "choices": [ { "delta": { @@ -140,7 +140,7 @@ "logprobs": null } ], - "created": 1759351472, + "created": 1759376639, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -151,7 +151,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-256", + "id": "chatcmpl-822", "choices": [ { "delta": { @@ -166,7 +166,7 @@ "logprobs": null } ], - "created": 1759351472, + "created": 1759376639, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -177,7 +177,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-256", + "id": "chatcmpl-822", "choices": [ { "delta": { @@ -192,7 +192,7 @@ "logprobs": null } ], - "created": 1759351472, + "created": 1759376639, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -203,7 +203,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-256", + "id": "chatcmpl-822", "choices": [ { "delta": { @@ -218,7 +218,7 @@ "logprobs": null } ], - "created": 1759351472, + "created": 1759376639, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -229,7 +229,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-256", + "id": "chatcmpl-822", "choices": [ { "delta": { @@ -244,7 +244,7 @@ "logprobs": null } ], - "created": 1759351472, + "created": 1759376639, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -255,7 +255,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-256", + "id": "chatcmpl-822", "choices": [ { "delta": { @@ -270,7 +270,7 @@ "logprobs": null } ], - "created": 1759351472, + "created": 1759376640, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -281,7 +281,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-256", + "id": "chatcmpl-822", "choices": [ { "delta": { @@ -296,7 +296,7 @@ "logprobs": null } ], - "created": 1759351472, + "created": 1759376640, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -307,7 +307,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-256", + "id": "chatcmpl-822", "choices": [ { "delta": { @@ -322,7 +322,7 @@ "logprobs": null } ], - "created": 1759351472, + "created": 1759376640, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -333,7 +333,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-256", + "id": "chatcmpl-822", "choices": [ { "delta": { @@ -348,7 +348,7 @@ "logprobs": null } ], - "created": 1759351472, + "created": 1759376640, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -359,7 +359,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-256", + "id": "chatcmpl-822", "choices": [ { "delta": { @@ -374,7 +374,7 @@ "logprobs": null } ], - "created": 1759351472, + "created": 1759376640, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -385,7 +385,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-256", + "id": "chatcmpl-822", "choices": [ { "delta": { @@ -400,7 +400,7 @@ "logprobs": null } ], - "created": 1759351472, + "created": 1759376640, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -411,7 +411,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-256", + "id": "chatcmpl-822", "choices": [ { "delta": { @@ -426,7 +426,7 @@ "logprobs": null } ], - "created": 1759351472, + "created": 1759376640, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -437,7 +437,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-256", + "id": "chatcmpl-822", "choices": [ { "delta": { @@ -452,7 +452,7 @@ "logprobs": null } ], - "created": 1759351472, + "created": 1759376640, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -463,7 +463,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-256", + "id": "chatcmpl-822", "choices": [ { "delta": { @@ -478,7 +478,7 @@ "logprobs": null } ], - "created": 1759351472, + "created": 1759376640, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -489,7 +489,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-256", + "id": "chatcmpl-822", "choices": [ { "delta": { @@ -504,7 +504,7 @@ "logprobs": null } ], - "created": 1759351472, + "created": 1759376640, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -515,7 +515,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-256", + "id": "chatcmpl-822", "choices": [ { "delta": { @@ -530,7 +530,7 @@ "logprobs": null } ], - "created": 1759351472, + "created": 1759376640, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, diff --git a/tests/integration/recordings/responses/36badd90238f.json b/tests/integration/recordings/responses/36badd90238f.json new file mode 100644 index 000000000..c3760805b --- /dev/null +++ b/tests/integration/recordings/responses/36badd90238f.json @@ -0,0 +1,366 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nCall get_boiling_point tool and answer What is the boiling point of polyjuice?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0001, + "top_p": 0.9 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.266524Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "[", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.307779Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "get", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.349588Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_bo", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.392007Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "iling", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.435225Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_point", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.47687Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "(", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.518854Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "liquid", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.560093Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_name", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.601376Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "='", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.642613Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "poly", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.686473Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ju", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.728965Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ice", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.770498Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "',", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.812614Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " cel", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.854407Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ci", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.896933Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "us", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.938059Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "=True", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.980332Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ")]", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.021812Z", + "done": true, + "done_reason": "stop", + "total_duration": 900445208, + "load_duration": 78206917, + "prompt_eval_count": 364, + "prompt_eval_duration": 65645917, + "eval_count": 19, + "eval_duration": 755986375, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/37706c1729ba.json b/tests/integration/recordings/responses/37706c1729ba.json index 256e0c37e..f7abee0e3 100644 --- a/tests/integration/recordings/responses/37706c1729ba.json +++ b/tests/integration/recordings/responses/37706c1729ba.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-923", + "id": "chatcmpl-316", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759245080, + "created": 1759373698, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/38ea441b5f83.json b/tests/integration/recordings/responses/38ea441b5f83.json index 79886b389..03229846b 100644 --- a/tests/integration/recordings/responses/38ea441b5f83.json +++ b/tests/integration/recordings/responses/38ea441b5f83.json @@ -46,7 +46,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-761", + "id": "chatcmpl-236", "choices": [ { "finish_reason": "tool_calls", @@ -61,7 +61,7 @@ "function_call": null, "tool_calls": [ { - "id": "call_cj8ownwc", + "id": "call_u4ydewqv", "function": { "arguments": "{\"location\":\"San Francisco, CA\"}", "name": "get_weather" @@ -73,15 +73,15 @@ } } ], - "created": 1758975113, + "created": 1759376610, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion", "service_tier": null, "system_fingerprint": "fp_ollama", "usage": { - "completion_tokens": 18, + "completion_tokens": 20, "prompt_tokens": 185, - "total_tokens": 203, + "total_tokens": 205, "completion_tokens_details": null, "prompt_tokens_details": null } diff --git a/tests/integration/recordings/responses/3a81146f2afa.json b/tests/integration/recordings/responses/3a81146f2afa.json index 5218693f3..be64d280c 100644 --- a/tests/integration/recordings/responses/3a81146f2afa.json +++ b/tests/integration/recordings/responses/3a81146f2afa.json @@ -18,16 +18,16 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-591", + "id": "cmpl-356", "choices": [ { "finish_reason": null, "index": 0, "logprobs": null, - "text": "Blue" + "text": "Purple" } ], - "created": 1759351454, + "created": 1759376595, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -37,7 +37,7 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-591", + "id": "cmpl-356", "choices": [ { "finish_reason": null, @@ -46,7 +46,7 @@ "text": ".\n\n" } ], - "created": 1759351454, + "created": 1759376595, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -56,16 +56,16 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-591", + "id": "cmpl-356", "choices": [ { "finish_reason": null, "index": 0, "logprobs": null, - "text": "I" + "text": "My" } ], - "created": 1759351455, + "created": 1759376595, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -75,16 +75,16 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-591", + "id": "cmpl-356", "choices": [ { "finish_reason": null, "index": 0, "logprobs": null, - "text": " completed" + "text": " answer" } ], - "created": 1759351455, + "created": 1759376595, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -94,197 +94,7 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " the" - } - ], - "created": 1759351455, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " sentence" - } - ], - "created": 1759351455, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " with" - } - ], - "created": 1759351455, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " the" - } - ], - "created": 1759351455, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " word" - } - ], - "created": 1759351455, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " \"" - } - ], - "created": 1759351455, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": "blue" - } - ], - "created": 1759351455, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": "\"" - } - ], - "created": 1759351455, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " because" - } - ], - "created": 1759351455, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " it" - } - ], - "created": 1759351455, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", + "id": "cmpl-356", "choices": [ { "finish_reason": null, @@ -293,7 +103,7 @@ "text": " is" } ], - "created": 1759351455, + "created": 1759376595, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -303,330 +113,7 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " a" - } - ], - "created": 1759351455, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " well" - } - ], - "created": 1759351455, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": "-known" - } - ], - "created": 1759351455, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " and" - } - ], - "created": 1759351455, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " classic" - } - ], - "created": 1759351455, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " rh" - } - ], - "created": 1759351455, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": "ym" - } - ], - "created": 1759351455, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": "ing" - } - ], - "created": 1759351455, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " completion" - } - ], - "created": 1759351455, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " of" - } - ], - "created": 1759351455, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " the" - } - ], - "created": 1759351455, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " original" - } - ], - "created": 1759351456, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " phrase" - } - ], - "created": 1759351456, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " from" - } - ], - "created": 1759351456, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " the" - } - ], - "created": 1759351456, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " nursery" - } - ], - "created": 1759351456, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " rhyme" - } - ], - "created": 1759351456, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", + "id": "cmpl-356", "choices": [ { "finish_reason": null, @@ -635,7 +122,7 @@ "text": " \"" } ], - "created": 1759351456, + "created": 1759376595, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -645,7 +132,368 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-591", + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": "purple" + } + ], + "created": 1759376595, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": "\"" + } + ], + "created": 1759376595, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " because" + } + ], + "created": 1759376595, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " it" + } + ], + "created": 1759376595, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": "'s" + } + ], + "created": 1759376595, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " a" + } + ], + "created": 1759376595, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " well" + } + ], + "created": 1759376595, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": "-known" + } + ], + "created": 1759376595, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " completion" + } + ], + "created": 1759376595, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " of" + } + ], + "created": 1759376595, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " the" + } + ], + "created": 1759376595, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " classic" + } + ], + "created": 1759376595, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " English" + } + ], + "created": 1759376595, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " poem" + } + ], + "created": 1759376595, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": "'s" + } + ], + "created": 1759376596, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " rhyme" + } + ], + "created": 1759376596, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " scheme" + } + ], + "created": 1759376596, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": ":" + } + ], + "created": 1759376596, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " \"" + } + ], + "created": 1759376596, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", "choices": [ { "finish_reason": null, @@ -654,7 +502,7 @@ "text": "R" } ], - "created": 1759351456, + "created": 1759376596, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -664,7 +512,7 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-591", + "id": "cmpl-356", "choices": [ { "finish_reason": null, @@ -673,7 +521,7 @@ "text": "oses" } ], - "created": 1759351456, + "created": 1759376596, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -683,7 +531,7 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-591", + "id": "cmpl-356", "choices": [ { "finish_reason": null, @@ -692,7 +540,7 @@ "text": " are" } ], - "created": 1759351456, + "created": 1759376596, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -702,16 +550,16 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-591", + "id": "cmpl-356", "choices": [ { "finish_reason": null, "index": 0, "logprobs": null, - "text": " Red" + "text": " red" } ], - "created": 1759351456, + "created": 1759376596, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -721,7 +569,7 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-591", + "id": "cmpl-356", "choices": [ { "finish_reason": null, @@ -730,7 +578,7 @@ "text": "," } ], - "created": 1759351456, + "created": 1759376596, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -740,16 +588,16 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-591", + "id": "cmpl-356", "choices": [ { "finish_reason": null, "index": 0, "logprobs": null, - "text": " V" + "text": " v" } ], - "created": 1759351456, + "created": 1759376596, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -759,7 +607,7 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-591", + "id": "cmpl-356", "choices": [ { "finish_reason": null, @@ -768,7 +616,7 @@ "text": "io" } ], - "created": 1759351456, + "created": 1759376596, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -778,7 +626,7 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-591", + "id": "cmpl-356", "choices": [ { "finish_reason": null, @@ -787,7 +635,7 @@ "text": "lets" } ], - "created": 1759351456, + "created": 1759376596, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -797,7 +645,7 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-591", + "id": "cmpl-356", "choices": [ { "finish_reason": null, @@ -806,7 +654,7 @@ "text": " are" } ], - "created": 1759351456, + "created": 1759376596, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -816,16 +664,16 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-591", + "id": "cmpl-356", "choices": [ { "finish_reason": null, "index": 0, "logprobs": null, - "text": " Blue" + "text": " purple" } ], - "created": 1759351456, + "created": 1759376596, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -835,16 +683,16 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-591", + "id": "cmpl-356", "choices": [ { "finish_reason": null, "index": 0, "logprobs": null, - "text": "\"." + "text": ".\"" } ], - "created": 1759351456, + "created": 1759376596, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -854,16 +702,16 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-591", + "id": "cmpl-356", "choices": [ { "finish_reason": null, "index": 0, "logprobs": null, - "text": " This" + "text": " The" } ], - "created": 1759351456, + "created": 1759376596, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -873,26 +721,7 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-591", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " completed" - } - ], - "created": 1759351456, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-591", + "id": "cmpl-356", "choices": [ { "finish_reason": null, @@ -901,7 +730,7 @@ "text": " phrase" } ], - "created": 1759351456, + "created": 1759376596, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -911,16 +740,16 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-591", + "id": "cmpl-356", "choices": [ { "finish_reason": null, "index": 0, "logprobs": null, - "text": " typically" + "text": " has" } ], - "created": 1759351456, + "created": 1759376596, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -930,16 +759,16 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-591", + "id": "cmpl-356", "choices": [ { "finish_reason": null, "index": 0, "logprobs": null, - "text": " goes" + "text": " become" } ], - "created": 1759351456, + "created": 1759376596, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -949,16 +778,16 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-591", + "id": "cmpl-356", "choices": [ { "finish_reason": null, "index": 0, "logprobs": null, - "text": " like" + "text": " a" } ], - "created": 1759351456, + "created": 1759376596, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -968,7 +797,178 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-591", + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " cultural" + } + ], + "created": 1759376596, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " reference" + } + ], + "created": 1759376596, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " point" + } + ], + "created": 1759376596, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " and" + } + ], + "created": 1759376596, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " is" + } + ], + "created": 1759376597, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " often" + } + ], + "created": 1759376597, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " used" + } + ], + "created": 1759376597, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " to" + } + ], + "created": 1759376597, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " fill" + } + ], + "created": 1759376597, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-356", "choices": [ { "finish_reason": "length", @@ -977,7 +977,7 @@ "text": "" } ], - "created": 1759351456, + "created": 1759376597, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", diff --git a/tests/integration/recordings/responses/3ca695048bee.json b/tests/integration/recordings/responses/3ca695048bee.json index 77e2a77ec..9143c46da 100644 --- a/tests/integration/recordings/responses/3ca695048bee.json +++ b/tests/integration/recordings/responses/3ca695048bee.json @@ -39,7 +39,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-220", + "id": "chatcmpl-271", "choices": [ { "delta": { @@ -50,7 +50,7 @@ "tool_calls": [ { "index": 0, - "id": "call_qfe4vie8", + "id": "call_hvk3qq8o", "function": { "arguments": "{\"city\":\"Tokyo\"}", "name": "get_weather" @@ -64,7 +64,7 @@ "logprobs": null } ], - "created": 1759351469, + "created": 1759376635, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -75,7 +75,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-220", + "id": "chatcmpl-271", "choices": [ { "delta": { @@ -90,7 +90,7 @@ "logprobs": null } ], - "created": 1759351469, + "created": 1759376635, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, diff --git a/tests/integration/recordings/responses/3f5871e0805d.json b/tests/integration/recordings/responses/3f5871e0805d.json new file mode 100644 index 000000000..4c79ce460 --- /dev/null +++ b/tests/integration/recordings/responses/3f5871e0805d.json @@ -0,0 +1,85 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "user", + "content": "Process this data" + } + ], + "tools": [ + { + "type": "function", + "function": { + "name": "process_data", + "description": "Process structured data", + "parameters": { + "type": "object", + "properties": { + "data": { + "$ref": "#/$defs/DataObject" + } + }, + "$defs": { + "DataObject": { + "type": "object", + "properties": { + "values": { + "type": "array", + "items": { + "type": "number" + } + } + } + } + } + } + } + } + ] + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": { + "__type__": "openai.types.chat.chat_completion.ChatCompletion", + "__data__": { + "id": "chatcmpl-798", + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "{\"name\":\"process_data\",\"parameters\":{\"data\":[{\"values\":[2,3]}]\"}}", + "refusal": null, + "role": "assistant", + "annotations": null, + "audio": null, + "function_call": null, + "tool_calls": null + } + } + ], + "created": 1759376608, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": { + "completion_tokens": 20, + "prompt_tokens": 176, + "total_tokens": 196, + "completion_tokens_details": null, + "prompt_tokens_details": null + } + } + }, + "is_streaming": false + } +} diff --git a/tests/integration/recordings/responses/4283d7199d9b.json b/tests/integration/recordings/responses/4283d7199d9b.json new file mode 100644 index 000000000..c09104a8c --- /dev/null +++ b/tests/integration/recordings/responses/4283d7199d9b.json @@ -0,0 +1,366 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nWhat is the boiling point of the liquid polyjuice in celsius?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0001, + "top_p": 0.9 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.080011Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "[", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.126544Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "get", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.169848Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_bo", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.21147Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "iling", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.254674Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_point", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.29727Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "(", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.338937Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "liquid", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.380865Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_name", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.422627Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "='", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.463935Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "poly", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.505674Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ju", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.547072Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ice", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.588461Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "',", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.629627Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " cel", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.67101Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ci", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.713398Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "us", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.757208Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "=True", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.800572Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ")]", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.843458Z", + "done": true, + "done_reason": "stop", + "total_duration": 1585956083, + "load_duration": 162121750, + "prompt_eval_count": 361, + "prompt_eval_duration": 657951625, + "eval_count": 19, + "eval_duration": 765105333, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/4ebcaf6c2aee.json b/tests/integration/recordings/responses/4ebcaf6c2aee.json index 41dc9ab1a..f57994797 100644 --- a/tests/integration/recordings/responses/4ebcaf6c2aee.json +++ b/tests/integration/recordings/responses/4ebcaf6c2aee.json @@ -19,22 +19,390 @@ "data": [ { "embedding": [ - 0.253706, - 0.016367152, - -0.29664654, - 0.31654558, - -0.18624601, - 0.07602756, - -0.031531323, - 0.2986085, - -0.49672848, - -0.36617878, - 0.25328273, - -0.33349335, - 0.0060151755, - 0.14081024, - -0.13757885, - -0.14679416 + 0.04635219, + 0.002988263, + -0.054220885, + 0.057812735, + -0.0340614, + 0.013923248, + -0.005755826, + 0.054555666, + -0.09073176, + -0.066910096, + 0.046287432, + -0.060912322, + 0.0010950539, + 0.025724398, + -0.025169374, + -0.026821515, + -0.030190151, + 0.0019341545, + -0.0754819, + 0.057380512, + 0.020332545, + -0.005591279, + -0.0022273492, + 0.012063173, + -0.011033521, + -0.03300947, + 0.05462081, + 0.014426073, + 0.024025004, + 0.004224287, + 0.09837723, + 0.08385713, + -0.049175426, + 0.03877149, + 0.08748876, + -0.0223024, + 0.006552746, + -0.0070359865, + 0.017893821, + 0.015465863, + 0.05007282, + -0.019349905, + 0.064887345, + 0.03184605, + 0.0034936152, + 0.02317752, + -0.06297051, + 0.044468515, + -0.022246253, + -0.017976552, + 0.040390052, + -0.0020998395, + -0.05173264, + 0.014722753, + 0.01640469, + -0.06438627, + -0.043313596, + -0.040564552, + 0.044412937, + -0.0031199565, + -0.007237415, + -0.05158015, + 0.059660934, + -0.014839656, + 0.012902056, + 0.028181136, + -0.019578207, + -0.0664231, + -0.06333673, + 0.028995825, + -0.114707075, + 0.041575413, + -0.022128351, + 0.01979776, + 0.0630018, + 0.011822141, + -0.06492722, + -0.066328146, + 0.021114407, + -0.020638306, + -0.009599678, + 0.013701863, + -0.060742326, + 0.005395315, + 0.026589092, + 0.11719033, + 0.067120634, + 0.008300158, + 0.036319703, + 0.00772981, + 0.071582936, + 0.019818509, + -0.15945566, + 0.047943458, + 0.00031571978, + -0.04666597, + 0.007148715, + -0.08839544, + 0.038042437, + 0.06620088, + 0.034336157, + -0.035366412, + 0.041598067, + 0.073756054, + -0.018818064, + -0.017260034, + 0.058635473, + -0.01371376, + 0.048319146, + -0.023727186, + 0.024134034, + 0.015763162, + 0.06681245, + 0.01748244, + 0.0825409, + -0.044568237, + 0.0015441044, + -0.011225885, + 0.0153481, + -0.061364066, + 0.05792184, + 0.044216745, + -0.047036964, + -0.02634555, + -0.033504363, + 0.06713578, + 0.030866034, + 2.024336e-34, + -0.03532978, + 0.021929236, + 0.030160688, + 0.09271786, + -0.010355268, + 0.07196569, + 0.052604284, + 0.085753724, + 0.094942175, + 0.053786535, + -0.08900509, + -0.024382822, + -0.008744401, + -0.03167582, + 0.01025236, + 0.1818434, + -0.0022662894, + 0.118558116, + -0.072208576, + -0.005867667, + 0.0746222, + -0.024001855, + -0.013938801, + -0.030681474, + -0.029207803, + -0.117624186, + -0.046466038, + -0.002622228, + -0.0902171, + -0.038626853, + -0.037497964, + -0.02418436, + -0.069297835, + 0.06424038, + 0.0045628003, + -0.0041498984, + -0.01649947, + 0.051125433, + -0.0058985935, + -0.0122523345, + -0.047424458, + -0.007806876, + 0.07906618, + 0.03244041, + -0.044682544, + -0.022625683, + 0.028852794, + -0.050480433, + 0.043801326, + -0.023512814, + -0.029832385, + 0.031089257, + 0.07129686, + -0.089649536, + 0.011963804, + -0.018448317, + 0.019637493, + 0.020081993, + 0.0012980831, + 0.093201645, + -0.064436235, + -0.040581323, + -0.01193043, + 0.043884862, + -0.010675756, + -0.030739127, + 0.005605308, + -0.110498495, + 0.044510514, + 0.037110664, + 0.04116233, + -0.039460793, + -0.04470639, + -0.027589805, + -0.02073358, + -0.067221105, + 0.050390884, + 0.031397663, + -0.008031462, + -0.009285899, + 0.0013141648, + -0.017254544, + 0.010367782, + -0.05940024, + -0.018042587, + -0.15487815, + 0.0069424273, + -0.05208202, + 0.0014201442, + -0.13956298, + -0.040203292, + 0.027910054, + -0.064872995, + -0.016270144, + 0.07052549, + 5.3188943e-34, + 0.012666737, + 0.016728623, + -0.013163009, + 0.06391275, + -0.043404065, + 0.015435096, + 0.03720438, + 0.05997576, + -0.07789181, + -0.0408386, + 0.024137221, + -0.019834999, + -0.034739267, + 0.00042199617, + 0.048484907, + 0.08716056, + -0.101133205, + -0.07535088, + -0.03912376, + -0.031597532, + -0.052266575, + 0.022085808, + -0.011040282, + 0.005077135, + -0.088432744, + -0.010477913, + 0.047780182, + -0.073345095, + 0.014382301, + 0.038075384, + 0.02176859, + -0.029071847, + -0.036925532, + 0.14317243, + 0.020646103, + -0.08367964, + 0.111576855, + -0.009943396, + 0.023071144, + 0.0926832, + 0.011242715, + 0.068017475, + -0.007714686, + 0.03060742, + -0.011360289, + 0.109015204, + 0.12930514, + -0.07566831, + 0.09001269, + -0.0090979, + 0.0148039665, + 0.048663232, + 0.08894293, + 0.038565516, + 0.005821986, + 0.016084671, + -0.106283545, + -0.033372246, + 0.05440088, + -0.005663873, + 0.0011572369, + -0.024969472, + 0.043092247, + -0.009314855, + -0.11836073, + -0.027310666, + 0.009811885, + -0.0052975323, + -0.044883158, + 0.066436425, + -0.06750139, + -0.02696421, + 0.01402391, + -0.04950559, + -0.084093384, + -0.07380851, + 0.04709705, + 4.9404687e-05, + 0.01672617, + 0.01849747, + 0.027683195, + 0.0047972985, + 0.0017495222, + 0.07066204, + -0.022430636, + 0.06875498, + 0.093927115, + 0.11101308, + -0.015589739, + 0.021178465, + 0.033638563, + 0.034676168, + -0.026882911, + -0.010514364, + 0.0073013064, + -1.2070348e-08, + -0.10034882, + -0.028641108, + -0.061462097, + -0.009792086, + -0.081652306, + -0.011814046, + 0.002039501, + 0.010384326, + 0.01639641, + 0.09542911, + 0.012538498, + -0.03542602, + 0.018125113, + 0.062750235, + 0.0007333235, + -0.13612862, + -0.049830034, + 0.021177148, + 0.006589976, + 0.007859552, + -0.03270378, + 0.024738451, + -0.02542262, + -0.0033008803, + 0.030640591, + -0.032442387, + 0.04598555, + 0.03903257, + 0.035755396, + 0.01686084, + 0.13498692, + 0.028296864, + -0.0035224769, + -0.036735818, + -0.046355885, + 0.057701495, + 0.008000554, + 0.047822826, + 0.04911064, + 0.035214324, + -0.09817153, + 0.0050856513, + -0.018094635, + -0.04385158, + 0.06649695, + -0.037648164, + -0.006218895, + -0.037976924, + -0.0036204353, + -0.03149386, + 0.031777944, + -0.011333557, + 0.009081317, + 0.022486951, + 0.032106593, + 0.023041077, + -0.06739943, + 0.06294171, + -0.057333894, + -0.041295, + 0.060841344, + 0.03247397, + -0.05132725, + -0.04992364 ], "index": 0, "object": "embedding" diff --git a/tests/integration/recordings/responses/55ae40168378.json b/tests/integration/recordings/responses/55ae40168378.json new file mode 100644 index 000000000..8d8407727 --- /dev/null +++ b/tests/integration/recordings/responses/55ae40168378.json @@ -0,0 +1,366 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant\nYou MUST use one of the provided functions/tools to answer the user query.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nWhat is the boiling point of the liquid polyjuice in celsius?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0001, + "top_p": 0.9 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.216374Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "[", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.257898Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "get", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.299052Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_bo", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.340155Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "iling", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.381269Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_point", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.422347Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "(", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.463428Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "liquid", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.504785Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_name", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.548668Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "='", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.589697Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "poly", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.631027Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ju", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.672172Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ice", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.713652Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "',", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.755751Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " cel", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.796948Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ci", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.838368Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "us", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.879363Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "=True", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.920412Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ")]", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.961636Z", + "done": true, + "done_reason": "stop", + "total_duration": 983443875, + "load_duration": 129661959, + "prompt_eval_count": 377, + "prompt_eval_duration": 107132333, + "eval_count": 19, + "eval_duration": 745847667, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/63aa4590a38a.json b/tests/integration/recordings/responses/63aa4590a38a.json index 9e3b275db..ae20dce36 100644 --- a/tests/integration/recordings/responses/63aa4590a38a.json +++ b/tests/integration/recordings/responses/63aa4590a38a.json @@ -19,390 +19,390 @@ "data": [ { "embedding": [ - 0.043770123, - 0.021501394, - -0.081300564, - 0.010615138, - -0.07908651, - -0.03219175, - 0.13090447, - 0.042329222, - -0.11600146, - -0.07588096, - 0.041826088, - -0.080617175, - 0.038125783, - -0.01069657, - 0.01577377, - -0.04196888, - 0.043099895, - -0.033355612, - 0.013571747, - -0.0103924, - 0.015561896, - -0.03786113, - -0.050319925, - -0.02566629, - -0.047868017, - -0.08717805, - 0.01685358, - -0.03676223, - 0.0063788705, - 0.020863743, - 0.11264443, - -0.0021451844, - -0.07911777, - 0.038758967, - 0.115321144, - -0.019753717, - 0.0067159277, - -0.02115779, - -0.0144774495, - -0.0027154125, - -0.034384295, - -0.052576542, - -0.030578543, - 0.04745372, - -0.024294367, - 0.01091144, - -0.03947583, - 0.07183755, - -0.020715859, - 0.018965777, - 0.04292474, - -0.007755194, - 0.0025708016, - -0.058263537, - 0.0117485095, - -0.022703577, - 0.001755438, - -0.012628832, - 0.030728007, - 0.017719304, - -0.061525322, - -0.036568273, - 0.025831668, - 0.025376469, - 0.012137967, - 0.009102949, - -0.027313529, - -0.093379095, - 0.0052120173, - 0.0074658697, - -0.07538, - 0.010161349, - -0.028439516, - 0.03026334, - 0.0036700817, - -0.022599109, - -0.037862476, - -0.08384314, - -0.0124443015, - -0.048889726, - 0.029131662, - -0.044443335, - -0.07518736, - -0.020938978, - 0.063386515, - 0.16294138, - 0.060580015, - -0.01281573, - -0.031040885, - 0.018372353, - 0.11225789, - 0.072922915, - -0.06272038, - -0.031792488, - -0.017476005, - 0.04846264, - -0.04116229, - -0.041834168, - -0.059919056, - 0.15907861, - -0.027786179, - -0.012492541, - 0.05599519, - -0.019895995, - 0.022076221, - 0.006363836, - 0.046413723, - -0.0731325, - 0.03326452, - 0.059475966, - -0.033314705, - 0.030761855, - 0.00819013, - -0.020254606, - 0.05658313, - -0.08153619, - 0.023402533, - 0.0060753864, - -0.07993489, - 0.013990512, - 0.052254565, - 0.027170746, - -0.049271967, - 0.02814688, - 0.019500777, - 0.054206643, - 0.082691684, - -1.8817448e-33, - 0.013630832, - -0.010863344, - 0.015899567, - 0.06938339, - -0.05113185, - 0.08995833, - 0.04450505, - 0.08101549, - 0.018903807, - -0.020960161, - -0.017933648, - -0.02174221, - 0.010988686, - 0.015100026, - 0.017031211, - 0.09433042, - 0.003454907, - 0.010199729, - -0.0446973, - 0.0018167854, - 0.015817188, - -0.06576281, - -0.004943305, - 0.004393494, - -0.019598262, - -0.092797264, - -0.025917865, - 0.04409669, - 0.054165967, - -0.007365383, - -0.021470547, - -0.03683317, - -0.091507494, - 0.08402351, - -0.01809901, - 0.0038072586, - 0.020236026, - 0.0439697, - -0.077322714, - 0.0057473024, - -0.054513566, - -0.024854423, - 0.075270385, - 0.034554463, - -0.08118007, - -0.12208905, - -0.0052893, - 0.0078005046, - 0.05028763, - 0.015558154, - -0.056349996, - 0.0398076, - 0.012997719, - -0.040145177, - 0.014409028, - -0.033200737, - -0.008437484, - -0.037582297, - -0.019651853, - 0.017285295, - -0.008976723, - -0.0018494898, - -0.0030671947, - 0.03046138, - -0.051143825, - -0.08688155, - -0.018344227, - -0.113307714, - 0.073259674, - 0.04602224, - 0.012651309, - -0.063435435, - -0.028471926, - 0.020155901, - -0.078830436, - -0.00069818215, - -0.03156303, - 0.123062745, - 0.0042949035, - -0.026413191, - 0.07838535, - -0.07747411, - -0.02126005, - 0.048919026, - 0.02919413, - -0.009296978, - -0.030687347, - -0.041037664, - -0.038565576, - -0.08043238, - 0.023225678, - 0.041928973, - -0.05812511, - 0.058555346, - 0.07633673, - 4.4510456e-34, - -0.019582625, - 0.040237214, - 0.01455587, - 0.034353998, - 0.043911777, - -0.023234777, - 0.0677493, - -0.030089214, - -0.09076478, - -0.019257858, - -0.02767876, - -0.00065146026, - 0.0043030144, - 0.05363546, - 0.04073387, - 0.03255476, - -0.10712685, - -0.050083157, - -0.016644027, - -0.0077649173, - -0.11153465, - 0.07478277, - -0.015999233, - -0.050547555, - -0.113217294, - -0.006174145, - 0.050873067, - -0.030284155, - 0.04314861, - 0.033020362, - 0.023671353, - 0.04654029, - -0.03415647, - 0.03614603, - 0.023047049, - -0.02677317, - 0.063607745, - 0.09978129, - 0.03527302, - 0.15538219, - 0.08349002, - 0.10931568, - 0.04684532, - -0.010147538, - -0.03256112, - 0.12924333, - 0.031221064, - -0.099673584, - 0.010860566, - 0.02326085, - -0.011916549, - 0.010135849, - 0.06884636, - 0.009350001, - -0.0226591, - -0.04280281, - -0.04821317, - -0.08508304, - 0.051028382, - 0.045148462, - -0.03566162, - 0.06547104, - 0.048883036, - 0.03793435, - -0.1407055, - -0.06711337, - 0.009881868, - -0.0049659596, - -0.044289522, - 0.0039236215, - -0.02692826, - -0.066134326, - 0.04076233, - -0.05222117, - 0.060488354, - -0.04113724, - -0.04314174, - -0.025147837, - 0.085597694, - -0.044939328, - 0.06395307, - -0.024218159, - -0.050523587, - -0.0020718095, - -0.07894165, - 0.0026805927, - 0.020709056, - 0.1026727, - -0.012374822, - 0.056179732, - 0.06552235, - 0.030915475, - -0.077197015, - -0.061245024, - -0.016111895, - -1.3512232e-08, - -0.05040501, - -0.033646606, - 0.04670903, - 0.047397695, - -0.044165645, - 0.046301767, - -0.006073457, - -0.053902794, - 0.013089125, - 0.050438043, - -0.009894958, - -0.0041677835, - 0.0723306, - 0.021069802, - 0.02670403, - -0.074845195, - -0.026750853, - 0.052738186, - -0.03469103, - 0.039813705, - -0.01640883, - 0.045899663, - -0.0224731, - 0.02387658, - 0.049145795, - 0.09110705, - -0.0025007618, - 0.04937552, - -0.03864697, - 0.020868128, - 0.07605537, - 0.08488945, - -0.05197299, - -0.06879239, - -0.06136516, - 0.077237174, - -0.06451729, - 0.04453416, - 0.008209786, - 0.015886698, - -0.04280691, - 0.005315579, - 0.0034463098, - 0.0031776188, - -0.013040836, - -0.091359615, - 0.0642767, - -0.054965723, - 0.0007161393, - -0.06260912, - -0.03496602, - -0.029944083, - 0.04422821, - 0.017855663, - -0.027972128, - -0.03656317, - 0.02111413, - 0.060607255, - -0.031320468, - -0.014338154, - 0.034649797, - 0.052279983, - -0.036579564, - 0.028179456 + 0.043779343, + 0.021533398, + -0.081306435, + 0.010584965, + -0.079082854, + -0.03219143, + 0.13092613, + 0.04234389, + -0.11600539, + -0.07588513, + 0.04182356, + -0.08061255, + 0.038127176, + -0.010701234, + 0.015768763, + -0.04193689, + 0.04310592, + -0.033361685, + 0.013566423, + -0.010392366, + 0.015551022, + -0.037858423, + -0.050305344, + -0.025666261, + -0.047879875, + -0.087179765, + 0.016856788, + -0.036765736, + 0.006393739, + 0.020844297, + 0.11262393, + -0.002143682, + -0.07910913, + 0.038748607, + 0.11532516, + -0.019759571, + 0.0066967797, + -0.021164352, + -0.014471563, + -0.0027048697, + -0.034388524, + -0.052571636, + -0.030607725, + 0.04747725, + -0.02431059, + 0.0109337615, + -0.03946421, + 0.071846664, + -0.020690937, + 0.01898796, + 0.042931512, + -0.0077551426, + 0.0025911122, + -0.058268107, + 0.0117475465, + -0.022701943, + 0.0017815019, + -0.012612941, + 0.030724185, + 0.017728312, + -0.06155491, + -0.03656162, + 0.02583153, + 0.02537894, + 0.012139213, + 0.009105951, + -0.027318193, + -0.093389414, + 0.005184693, + 0.007488449, + -0.07540277, + 0.010159999, + -0.028444426, + 0.030260745, + 0.0036438918, + -0.022627153, + -0.037846327, + -0.08381657, + -0.012445195, + -0.048908208, + 0.029149827, + -0.044437535, + -0.07520237, + -0.020924438, + 0.06342514, + 0.1629199, + 0.060563333, + -0.012817673, + -0.031030292, + 0.018368995, + 0.11223112, + 0.07292473, + -0.062686674, + -0.031803295, + -0.017489262, + 0.048433464, + -0.041148387, + -0.04183779, + -0.05994369, + 0.15909556, + -0.027785666, + -0.012455991, + 0.056005318, + -0.019891974, + 0.022063067, + 0.006342065, + 0.0464118, + -0.07311654, + 0.033282198, + 0.05949105, + -0.033307947, + 0.030738499, + 0.008186239, + -0.020268966, + 0.056593496, + -0.081526734, + 0.023390312, + 0.0060836566, + -0.07992586, + 0.013986445, + 0.052250065, + 0.027186505, + -0.049284942, + 0.028148174, + 0.019493744, + 0.05418436, + 0.0827222, + -1.8825437e-33, + 0.01360945, + -0.010870715, + 0.015887791, + 0.069373555, + -0.051129147, + 0.08999179, + 0.044494778, + 0.08100757, + 0.018944906, + -0.020974122, + -0.017938385, + -0.021756735, + 0.010972489, + 0.015099965, + 0.017018452, + 0.094338946, + 0.0034407445, + 0.010244923, + -0.044709302, + 0.0018059182, + 0.015817573, + -0.065777056, + -0.004948138, + 0.0044092103, + -0.019589791, + -0.092789896, + -0.025898295, + 0.044104066, + 0.0541385, + -0.007362511, + -0.021487307, + -0.036836285, + -0.09148704, + 0.084001675, + -0.018094191, + 0.003797567, + 0.020257449, + 0.04394643, + -0.0772898, + 0.0057312953, + -0.054519102, + -0.024835315, + 0.0753162, + 0.034552757, + -0.081203006, + -0.12210961, + -0.0053012627, + 0.00780717, + 0.050265096, + 0.015569535, + -0.056362487, + 0.039800324, + 0.013022089, + -0.04015537, + 0.014401654, + -0.033209093, + -0.008451782, + -0.037590392, + -0.01965779, + 0.01730637, + -0.00896531, + -0.0018413392, + -0.0030382746, + 0.030460354, + -0.05112036, + -0.086875, + -0.018338922, + -0.11328767, + 0.07325826, + 0.046035297, + 0.012633494, + -0.06343216, + -0.028439038, + 0.020128354, + -0.07883383, + -0.00069870794, + -0.03155447, + 0.12306934, + 0.004300722, + -0.026421167, + 0.078361824, + -0.077461444, + -0.021267027, + 0.048929654, + 0.02919381, + -0.0092880055, + -0.030666346, + -0.04102384, + -0.03860138, + -0.08042292, + 0.023227168, + 0.04191858, + -0.058156747, + 0.0585743, + 0.076342255, + 4.465569e-34, + -0.019599343, + 0.040230304, + 0.01455632, + 0.034345042, + 0.04392999, + -0.023241352, + 0.067749046, + -0.03010354, + -0.09075954, + -0.019227842, + -0.027724287, + -0.00062344945, + 0.0042892746, + 0.053643614, + 0.04075099, + 0.032581333, + -0.107116826, + -0.0500636, + -0.016655827, + -0.007782394, + -0.111523, + 0.07476429, + -0.016019335, + -0.050536986, + -0.11320647, + -0.0061384854, + 0.050886273, + -0.030283457, + 0.04318923, + 0.03301474, + 0.02362771, + 0.046507858, + -0.03416386, + 0.036145207, + 0.023037339, + -0.026803765, + 0.06361122, + 0.09975251, + 0.035269737, + 0.1554014, + 0.083479255, + 0.10931981, + 0.046847064, + -0.010136355, + -0.032541983, + 0.12926093, + 0.031193413, + -0.09971323, + 0.010830718, + 0.02325219, + -0.011917061, + 0.010155018, + 0.06883269, + 0.009340846, + -0.022698723, + -0.042815465, + -0.048211087, + -0.085067384, + 0.05105234, + 0.045155898, + -0.03564869, + 0.06549556, + 0.048875004, + 0.037915554, + -0.14071068, + -0.067095764, + 0.009898252, + -0.0049653547, + -0.044304688, + 0.0039006064, + -0.026903173, + -0.066124685, + 0.040738244, + -0.052228633, + 0.060485654, + -0.041119356, + -0.04312945, + -0.025152665, + 0.08556276, + -0.044942576, + 0.06393979, + -0.024227533, + -0.05052092, + -0.0020624825, + -0.078943975, + 0.0026753, + 0.02068896, + 0.102683865, + -0.01237572, + 0.056172684, + 0.06552171, + 0.030940128, + -0.07721113, + -0.061241012, + -0.016143149, + -1.3511957e-08, + -0.050416306, + -0.033628013, + 0.046722032, + 0.04744138, + -0.04411888, + 0.04631675, + -0.0060847937, + -0.053873356, + 0.013075445, + 0.050437532, + -0.009895477, + -0.0041795173, + 0.07229928, + 0.021081135, + 0.02672776, + -0.07482113, + -0.026757998, + 0.052755926, + -0.034690056, + 0.039811596, + -0.016370349, + 0.045900222, + -0.02250936, + 0.023861, + 0.04912799, + 0.09111738, + -0.0024878879, + 0.049395334, + -0.03861115, + 0.020867983, + 0.076049894, + 0.084881924, + -0.051956687, + -0.06878504, + -0.061384037, + 0.077220954, + -0.06454818, + 0.044513144, + 0.008181126, + 0.015890416, + -0.04280811, + 0.005317184, + 0.0034429359, + 0.0031937633, + -0.013058055, + -0.09134677, + 0.06425565, + -0.054977305, + 0.0007087448, + -0.06258866, + -0.034974415, + -0.029966963, + 0.044276785, + 0.017868131, + -0.027976807, + -0.036579583, + 0.021142753, + 0.06057356, + -0.03133335, + -0.014331035, + 0.034653842, + 0.052315667, + -0.036585484, + 0.028209662 ], "index": 0, "object": "embedding" diff --git a/tests/integration/recordings/responses/6412295819a1.json b/tests/integration/recordings/responses/6412295819a1.json index 5d825494f..5494079e8 100644 --- a/tests/integration/recordings/responses/6412295819a1.json +++ b/tests/integration/recordings/responses/6412295819a1.json @@ -16,23 +16,23 @@ "body": { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-602", + "id": "cmpl-39", "choices": [ { "finish_reason": "stop", "index": 0, "logprobs": null, - "text": "Blue.\n\nMy response is \"blue\" because it completes a classic English nursery rhyme:\n\nRoses are red,\nViolets are blue,\nSugar is sweet,\nAnd so are you.\n\nIn this context, the traditional completion of the sentence uses the word \"blue\". However, if I were to choose my own alternative completion, I might opt for a whimsical or unconventional option." + "text": "Blue.\n\nMy answer is \"blue\" because it completes the traditional rhyme scheme of the classic English poem \"Roses are Red, Violets are Blue\", which goes like this:\n\nRoses are red,\nViolets are blue,\nSugar is sweet,\nAnd so are you.\n\nIn this poem, the third line typically follows the same rhyme as the first line. In order to maintain this rhyming scheme, adding \"Blue\" after \"violets\" in the original sentence makes sense linguistically and structurally, creating a natural completion of the poetic phrase." } ], - "created": 1759351454, + "created": 1759376594, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", "usage": { - "completion_tokens": 77, + "completion_tokens": 115, "prompt_tokens": 50, - "total_tokens": 127, + "total_tokens": 165, "completion_tokens_details": null, "prompt_tokens_details": null } diff --git a/tests/integration/recordings/responses/65c12de0a1db.json b/tests/integration/recordings/responses/65c12de0a1db.json index e1c0fb8fc..31f88271d 100644 --- a/tests/integration/recordings/responses/65c12de0a1db.json +++ b/tests/integration/recordings/responses/65c12de0a1db.json @@ -24,14 +24,14 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-123", + "id": "chatcmpl-528", "choices": [ { "finish_reason": "stop", "index": 0, "logprobs": null, "message": { - "content": "Hello! As of my knowledge cutoff on December 15th, I have the latest information for you. However, please note that my data may not be entirely up-to-date.\n\nCurrently, and based on historical climate patterns, it appears to be a partly cloudy day with mild temperatures in San Francisco, CA. Expect a temperature range of around 48\u00b0F (9\u00b0C) to 54\u00b0F (12\u00b0C). It's likely to be a breezy day, with winds blowing at about 13 mph (21 km/h).\n\nHowever, if I were to look into more recent weather patterns or forecasts, I would recommend checking the latest conditions directly from reliable sources such as the National Weather Service or local news outlets for more accurate and up-to-date information.\n\nPlease let me know how I can further assist you.", + "content": "I can give you a general idea of the typical weather conditions in San Francisco during this time.\n\nUnfortunately, I'm not aware of your current location or date. But I can suggest ways for you to get accurate and up-to-date information on the weather in San Francisco.\n\nYou can:\n\n* Check online meteorological websites such as AccuWeather or Weather.com for current conditions and forecasts.\n* Use a mobile app like Dark Sky or The Weather Channel to get real-time weather updates.\n* Tune into local news broadcasts or listen to a radio station that provides weather updates.\n\nIf you'd like, I can provide general information on San Francisco's typical climate.", "refusal": null, "role": "assistant", "annotations": null, @@ -41,15 +41,15 @@ } } ], - "created": 1758978071, + "created": 1759376616, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion", "service_tier": null, "system_fingerprint": "fp_ollama", "usage": { - "completion_tokens": 163, + "completion_tokens": 131, "prompt_tokens": 45, - "total_tokens": 208, + "total_tokens": 176, "completion_tokens_details": null, "prompt_tokens_details": null } diff --git a/tests/integration/recordings/responses/6b3e593ad9b8.json b/tests/integration/recordings/responses/6b3e593ad9b8.json index 0165009cb..62e6d2ca2 100644 --- a/tests/integration/recordings/responses/6b3e593ad9b8.json +++ b/tests/integration/recordings/responses/6b3e593ad9b8.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-738", + "id": "chatcmpl-396", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759245079, + "created": 1759373698, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/6f96090aa955.json b/tests/integration/recordings/responses/6f96090aa955.json index 375a2ac70..46a6e21db 100644 --- a/tests/integration/recordings/responses/6f96090aa955.json +++ b/tests/integration/recordings/responses/6f96090aa955.json @@ -21,7 +21,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-177", + "id": "chatcmpl-468", "choices": [ { "delta": { @@ -36,7 +36,7 @@ "logprobs": null } ], - "created": 1759351466, + "created": 1759376633, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -47,7 +47,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-177", + "id": "chatcmpl-468", "choices": [ { "delta": { @@ -62,7 +62,7 @@ "logprobs": null } ], - "created": 1759351466, + "created": 1759376633, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -73,7 +73,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-177", + "id": "chatcmpl-468", "choices": [ { "delta": { @@ -88,7 +88,7 @@ "logprobs": null } ], - "created": 1759351467, + "created": 1759376633, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -99,7 +99,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-177", + "id": "chatcmpl-468", "choices": [ { "delta": { @@ -114,7 +114,7 @@ "logprobs": null } ], - "created": 1759351467, + "created": 1759376633, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -125,7 +125,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-177", + "id": "chatcmpl-468", "choices": [ { "delta": { @@ -140,7 +140,7 @@ "logprobs": null } ], - "created": 1759351467, + "created": 1759376633, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -151,7 +151,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-177", + "id": "chatcmpl-468", "choices": [ { "delta": { @@ -166,7 +166,7 @@ "logprobs": null } ], - "created": 1759351467, + "created": 1759376633, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -177,7 +177,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-177", + "id": "chatcmpl-468", "choices": [ { "delta": { @@ -192,7 +192,7 @@ "logprobs": null } ], - "created": 1759351467, + "created": 1759376634, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -203,7 +203,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-177", + "id": "chatcmpl-468", "choices": [ { "delta": { @@ -218,7 +218,7 @@ "logprobs": null } ], - "created": 1759351467, + "created": 1759376634, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -229,7 +229,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-177", + "id": "chatcmpl-468", "choices": [ { "delta": { @@ -244,7 +244,7 @@ "logprobs": null } ], - "created": 1759351467, + "created": 1759376634, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -255,7 +255,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-177", + "id": "chatcmpl-468", "choices": [ { "delta": { @@ -270,7 +270,7 @@ "logprobs": null } ], - "created": 1759351467, + "created": 1759376634, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -281,7 +281,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-177", + "id": "chatcmpl-468", "choices": [ { "delta": { @@ -296,7 +296,7 @@ "logprobs": null } ], - "created": 1759351467, + "created": 1759376634, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -307,7 +307,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-177", + "id": "chatcmpl-468", "choices": [ { "delta": { @@ -322,7 +322,7 @@ "logprobs": null } ], - "created": 1759351467, + "created": 1759376634, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -333,7 +333,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-177", + "id": "chatcmpl-468", "choices": [ { "delta": { @@ -348,7 +348,7 @@ "logprobs": null } ], - "created": 1759351467, + "created": 1759376634, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -359,7 +359,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-177", + "id": "chatcmpl-468", "choices": [ { "delta": { @@ -374,7 +374,7 @@ "logprobs": null } ], - "created": 1759351467, + "created": 1759376634, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -385,7 +385,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-177", + "id": "chatcmpl-468", "choices": [ { "delta": { @@ -400,7 +400,7 @@ "logprobs": null } ], - "created": 1759351467, + "created": 1759376634, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -411,7 +411,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-177", + "id": "chatcmpl-468", "choices": [ { "delta": { @@ -426,7 +426,7 @@ "logprobs": null } ], - "created": 1759351467, + "created": 1759376634, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -437,7 +437,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-177", + "id": "chatcmpl-468", "choices": [ { "delta": { @@ -452,7 +452,7 @@ "logprobs": null } ], - "created": 1759351467, + "created": 1759376634, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -463,7 +463,33 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-177", + "id": "chatcmpl-468", + "choices": [ + { + "delta": { + "content": ",", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759376634, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-468", "choices": [ { "delta": { @@ -478,7 +504,7 @@ "logprobs": null } ], - "created": 1759351467, + "created": 1759376634, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -489,7 +515,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-177", + "id": "chatcmpl-468", "choices": [ { "delta": { @@ -504,7 +530,7 @@ "logprobs": null } ], - "created": 1759351467, + "created": 1759376634, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -515,7 +541,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-177", + "id": "chatcmpl-468", "choices": [ { "delta": { @@ -530,7 +556,7 @@ "logprobs": null } ], - "created": 1759351467, + "created": 1759376634, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -541,7 +567,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-177", + "id": "chatcmpl-468", "choices": [ { "delta": { @@ -556,7 +582,7 @@ "logprobs": null } ], - "created": 1759351467, + "created": 1759376634, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -567,7 +593,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-177", + "id": "chatcmpl-468", "choices": [ { "delta": { @@ -582,7 +608,7 @@ "logprobs": null } ], - "created": 1759351467, + "created": 1759376634, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -593,7 +619,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-177", + "id": "chatcmpl-468", "choices": [ { "delta": { @@ -608,7 +634,7 @@ "logprobs": null } ], - "created": 1759351467, + "created": 1759376634, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -619,7 +645,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-177", + "id": "chatcmpl-468", "choices": [ { "delta": { @@ -634,7 +660,7 @@ "logprobs": null } ], - "created": 1759351467, + "created": 1759376634, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -645,7 +671,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-177", + "id": "chatcmpl-468", "choices": [ { "delta": { @@ -660,7 +686,7 @@ "logprobs": null } ], - "created": 1759351467, + "created": 1759376634, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, diff --git a/tests/integration/recordings/responses/7a047bcf8b19.json b/tests/integration/recordings/responses/7a047bcf8b19.json index 4f9c8b06e..a0588bd3c 100644 --- a/tests/integration/recordings/responses/7a047bcf8b19.json +++ b/tests/integration/recordings/responses/7a047bcf8b19.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-236", + "id": "chatcmpl-790", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759247859, + "created": 1759373710, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/80e4404d8987.json b/tests/integration/recordings/responses/80e4404d8987.json index 7eabfc363..226b6648d 100644 --- a/tests/integration/recordings/responses/80e4404d8987.json +++ b/tests/integration/recordings/responses/80e4404d8987.json @@ -22,7 +22,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:46.708948Z", + "created_at": "2025-10-02T02:54:51.50254Z", "done": false, "done_reason": null, "total_duration": null, @@ -40,7 +40,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:46.749031Z", + "created_at": "2025-10-02T02:54:51.549521Z", "done": false, "done_reason": null, "total_duration": null, @@ -58,7 +58,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:46.790192Z", + "created_at": "2025-10-02T02:54:51.594384Z", "done": false, "done_reason": null, "total_duration": null, @@ -76,7 +76,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:46.831093Z", + "created_at": "2025-10-02T02:54:51.637769Z", "done": false, "done_reason": null, "total_duration": null, @@ -94,7 +94,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:46.873135Z", + "created_at": "2025-10-02T02:54:51.684099Z", "done": false, "done_reason": null, "total_duration": null, @@ -112,7 +112,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:46.91375Z", + "created_at": "2025-10-02T02:54:51.730912Z", "done": false, "done_reason": null, "total_duration": null, @@ -130,7 +130,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:46.95439Z", + "created_at": "2025-10-02T02:54:51.777299Z", "done": false, "done_reason": null, "total_duration": null, @@ -148,7 +148,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:46.995224Z", + "created_at": "2025-10-02T02:54:51.823309Z", "done": false, "done_reason": null, "total_duration": null, @@ -166,7 +166,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:47.035887Z", + "created_at": "2025-10-02T02:54:51.868924Z", "done": false, "done_reason": null, "total_duration": null, @@ -184,15 +184,15 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-09-03T17:37:47.076806Z", + "created_at": "2025-10-02T02:54:51.915105Z", "done": true, "done_reason": "stop", - "total_duration": 2069654958, - "load_duration": 177579833, + "total_duration": 5098012833, + "load_duration": 4289621791, "prompt_eval_count": 31, - "prompt_eval_duration": 1521851250, + "prompt_eval_duration": 393000541, "eval_count": 10, - "eval_duration": 369478042, + "eval_duration": 414080875, "response": "", "thinking": null, "context": null diff --git a/tests/integration/recordings/responses/8486e5b1c6db.json b/tests/integration/recordings/responses/8486e5b1c6db.json new file mode 100644 index 000000000..6eae12ff0 --- /dev/null +++ b/tests/integration/recordings/responses/8486e5b1c6db.json @@ -0,0 +1,276 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point_with_metadata\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nCall get_boiling_point_with_metadata tool and answer What is the boiling point of polyjuice?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n[get_boiling_point_with_metadata(liquid_name=\"polyjuice\", celcius=True)]<|eot_id|><|start_header_id|>ipython<|end_header_id|>\n\n-100<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0001, + "top_p": 0.9 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.185623Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "The", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.227358Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " boiling", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.268854Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " point", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.311161Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " of", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.353205Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " poly", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.394667Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ju", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.43604Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ice", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.477482Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " in", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.519193Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " Celsius", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.561068Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " is", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.602574Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " -", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.644332Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "100", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.686134Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ".", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.727722Z", + "done": true, + "done_reason": "stop", + "total_duration": 730418375, + "load_duration": 118920875, + "prompt_eval_count": 401, + "prompt_eval_duration": 67995917, + "eval_count": 14, + "eval_duration": 542856417, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/84fc473e7b29.json b/tests/integration/recordings/responses/84fc473e7b29.json index a4b228f05..3de1076be 100644 --- a/tests/integration/recordings/responses/84fc473e7b29.json +++ b/tests/integration/recordings/responses/84fc473e7b29.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-344", + "id": "chatcmpl-746", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759247858, + "created": 1759373709, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/87577729d812.json b/tests/integration/recordings/responses/87577729d812.json index 7c268aa2e..8b4e8fab8 100644 --- a/tests/integration/recordings/responses/87577729d812.json +++ b/tests/integration/recordings/responses/87577729d812.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-119", + "id": "chatcmpl-386", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759245069, + "created": 1759373686, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/931ac7158789.json b/tests/integration/recordings/responses/931ac7158789.json index dc5f985da..6041755e9 100644 --- a/tests/integration/recordings/responses/931ac7158789.json +++ b/tests/integration/recordings/responses/931ac7158789.json @@ -40,7 +40,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-190", + "id": "chatcmpl-854", "choices": [ { "finish_reason": "tool_calls", @@ -55,7 +55,7 @@ "function_call": null, "tool_calls": [ { - "id": "call_st2uc9zo", + "id": "call_frxtygkf", "function": { "arguments": "{\"location\":\"San Francisco\"}", "name": "get_weather" @@ -67,15 +67,15 @@ } } ], - "created": 1759351458, + "created": 1759376619, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion", "service_tier": null, "system_fingerprint": "fp_ollama", "usage": { - "completion_tokens": 18, + "completion_tokens": 16, "prompt_tokens": 161, - "total_tokens": 179, + "total_tokens": 177, "completion_tokens_details": null, "prompt_tokens_details": null } diff --git a/tests/integration/recordings/responses/a0c4df33879f.json b/tests/integration/recordings/responses/a0c4df33879f.json index c61531b41..be15ad0f4 100644 --- a/tests/integration/recordings/responses/a0c4df33879f.json +++ b/tests/integration/recordings/responses/a0c4df33879f.json @@ -21,7 +21,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-463", + "id": "chatcmpl-292", "choices": [ { "delta": { @@ -36,7 +36,7 @@ "logprobs": null } ], - "created": 1759351466, + "created": 1759376633, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -47,7 +47,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-463", + "id": "chatcmpl-292", "choices": [ { "delta": { @@ -62,7 +62,7 @@ "logprobs": null } ], - "created": 1759351466, + "created": 1759376633, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -73,7 +73,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-463", + "id": "chatcmpl-292", "choices": [ { "delta": { @@ -88,7 +88,7 @@ "logprobs": null } ], - "created": 1759351466, + "created": 1759376633, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -99,7 +99,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-463", + "id": "chatcmpl-292", "choices": [ { "delta": { @@ -114,7 +114,7 @@ "logprobs": null } ], - "created": 1759351466, + "created": 1759376633, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -125,7 +125,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-463", + "id": "chatcmpl-292", "choices": [ { "delta": { @@ -140,7 +140,7 @@ "logprobs": null } ], - "created": 1759351466, + "created": 1759376633, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -151,11 +151,11 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-463", + "id": "chatcmpl-292", "choices": [ { "delta": { - "content": "Sun", + "content": "sun", "function_call": null, "refusal": null, "role": "assistant", @@ -166,7 +166,7 @@ "logprobs": null } ], - "created": 1759351466, + "created": 1759376633, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -177,7 +177,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-463", + "id": "chatcmpl-292", "choices": [ { "delta": { @@ -192,7 +192,7 @@ "logprobs": null } ], - "created": 1759351466, + "created": 1759376633, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -203,7 +203,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-463", + "id": "chatcmpl-292", "choices": [ { "delta": { @@ -218,7 +218,7 @@ "logprobs": null } ], - "created": 1759351466, + "created": 1759376633, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -229,7 +229,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-463", + "id": "chatcmpl-292", "choices": [ { "delta": { @@ -244,7 +244,7 @@ "logprobs": null } ], - "created": 1759351466, + "created": 1759376633, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -255,7 +255,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-463", + "id": "chatcmpl-292", "choices": [ { "delta": { @@ -270,7 +270,7 @@ "logprobs": null } ], - "created": 1759351466, + "created": 1759376633, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -281,7 +281,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-463", + "id": "chatcmpl-292", "choices": [ { "delta": { @@ -296,7 +296,7 @@ "logprobs": null } ], - "created": 1759351466, + "created": 1759376633, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, diff --git a/tests/integration/recordings/responses/a46b77ffd494.json b/tests/integration/recordings/responses/a46b77ffd494.json index dff3d3fd7..469fe098d 100644 --- a/tests/integration/recordings/responses/a46b77ffd494.json +++ b/tests/integration/recordings/responses/a46b77ffd494.json @@ -17,7 +17,7 @@ "body": { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-183", + "id": "cmpl-253", "choices": [ { "finish_reason": "stop", @@ -26,7 +26,7 @@ "text": "Michael Jordan was born in the year of " } ], - "created": 1758978053, + "created": 1759376606, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", diff --git a/tests/integration/recordings/responses/a4c8d19bb1eb.json b/tests/integration/recordings/responses/a4c8d19bb1eb.json index 8cf230578..049d510e5 100644 --- a/tests/integration/recordings/responses/a4c8d19bb1eb.json +++ b/tests/integration/recordings/responses/a4c8d19bb1eb.json @@ -20,7 +20,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-498", + "id": "chatcmpl-395", "choices": [ { "finish_reason": "stop", @@ -37,7 +37,7 @@ } } ], - "created": 1759351474, + "created": 1759376641, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/a689181d64d3.json b/tests/integration/recordings/responses/a689181d64d3.json new file mode 100644 index 000000000..61c34a3e4 --- /dev/null +++ b/tests/integration/recordings/responses/a689181d64d3.json @@ -0,0 +1,86 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "user", + "content": "What's the weather in Tokyo?" + } + ], + "tools": [ + { + "type": "function", + "function": { + "name": "get_weather", + "description": "Get weather information", + "parameters": { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "City name" + } + }, + "required": [ + "location" + ] + } + } + } + ] + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": { + "__type__": "openai.types.chat.chat_completion.ChatCompletion", + "__data__": { + "id": "chatcmpl-54", + "choices": [ + { + "finish_reason": "tool_calls", + "index": 0, + "logprobs": null, + "message": { + "content": "", + "refusal": null, + "role": "assistant", + "annotations": null, + "audio": null, + "function_call": null, + "tool_calls": [ + { + "id": "call_v05v3tmn", + "function": { + "arguments": "{\"location\":\"Tokyo\"}", + "name": "get_weather" + }, + "type": "function", + "index": 0 + } + ] + } + } + ], + "created": 1759376607, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": { + "completion_tokens": 18, + "prompt_tokens": 158, + "total_tokens": 176, + "completion_tokens_details": null, + "prompt_tokens_details": null + } + } + }, + "is_streaming": false + } +} diff --git a/tests/integration/recordings/responses/b28f75bd87dc.json b/tests/integration/recordings/responses/b28f75bd87dc.json index 4a874e119..cfef0228f 100644 --- a/tests/integration/recordings/responses/b28f75bd87dc.json +++ b/tests/integration/recordings/responses/b28f75bd87dc.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-316", + "id": "chatcmpl-659", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759247858, + "created": 1759373708, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/b374fc18c641.json b/tests/integration/recordings/responses/b374fc18c641.json new file mode 100644 index 000000000..55cf0d7f3 --- /dev/null +++ b/tests/integration/recordings/responses/b374fc18c641.json @@ -0,0 +1,258 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant\nYou MUST use the tool `get_boiling_point` to answer the user query.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nWhat is the boiling point of the liquid polyjuice in celsius?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n[get_boiling_point(liquid_name=\"polyjuice\", celcius=True)]<|eot_id|><|start_header_id|>ipython<|end_header_id|>\n\n-100<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0001, + "top_p": 0.9 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.268889Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "The", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.310661Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " boiling", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.35195Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " point", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.393537Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " of", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.435595Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " poly", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.481337Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ju", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.526974Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ice", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.569942Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " is", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.612747Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " -", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.656585Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "100", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.697454Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\u00b0C", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.738529Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ".", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.781405Z", + "done": true, + "done_reason": "stop", + "total_duration": 663905208, + "load_duration": 85733250, + "prompt_eval_count": 410, + "prompt_eval_duration": 64272708, + "eval_count": 13, + "eval_duration": 513001750, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/c2ac76cbf66d.json b/tests/integration/recordings/responses/c2ac76cbf66d.json index 34f0c4a1d..d9b0d7f1d 100644 --- a/tests/integration/recordings/responses/c2ac76cbf66d.json +++ b/tests/integration/recordings/responses/c2ac76cbf66d.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-963", + "id": "chatcmpl-368", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759245073, + "created": 1759373692, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/c3dbccc5de74.json b/tests/integration/recordings/responses/c3dbccc5de74.json index a2043db9a..699297a59 100644 --- a/tests/integration/recordings/responses/c3dbccc5de74.json +++ b/tests/integration/recordings/responses/c3dbccc5de74.json @@ -47,7 +47,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-634", + "id": "chatcmpl-688", "choices": [ { "delta": { @@ -58,7 +58,7 @@ "tool_calls": [ { "index": 0, - "id": "call_wubm4yax", + "id": "call_bnha2w8y", "function": { "arguments": "{\"location\":\"San Francisco, CA\"}", "name": "get_weather" @@ -72,7 +72,7 @@ "logprobs": null } ], - "created": 1758975115, + "created": 1759376611, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -83,7 +83,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-634", + "id": "chatcmpl-688", "choices": [ { "delta": { @@ -98,7 +98,7 @@ "logprobs": null } ], - "created": 1758975115, + "created": 1759376611, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, diff --git a/tests/integration/recordings/responses/c4991de37dfb.json b/tests/integration/recordings/responses/c4991de37dfb.json index f77a51997..dbbb22eba 100644 --- a/tests/integration/recordings/responses/c4991de37dfb.json +++ b/tests/integration/recordings/responses/c4991de37dfb.json @@ -32,7 +32,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-695", + "id": "chatcmpl-61", "choices": [ { "finish_reason": "tool_calls", @@ -47,7 +47,7 @@ "function_call": null, "tool_calls": [ { - "id": "call_bfwjy2i9", + "id": "call_d0ln40bb", "function": { "arguments": "{}", "name": "no_args_tool" @@ -59,15 +59,15 @@ } } ], - "created": 1759351462, + "created": 1759376624, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion", "service_tier": null, "system_fingerprint": "fp_ollama", "usage": { - "completion_tokens": 14, + "completion_tokens": 12, "prompt_tokens": 148, - "total_tokens": 162, + "total_tokens": 160, "completion_tokens_details": null, "prompt_tokens_details": null } diff --git a/tests/integration/recordings/responses/c8234a1171f3.json b/tests/integration/recordings/responses/c8234a1171f3.json index 6bfe929b4..10318c9eb 100644 --- a/tests/integration/recordings/responses/c8234a1171f3.json +++ b/tests/integration/recordings/responses/c8234a1171f3.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-240", + "id": "chatcmpl-753", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759245081, + "created": 1759373699, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/c8e196049fe4.json b/tests/integration/recordings/responses/c8e196049fe4.json index 3a1495f07..62d6674e6 100644 --- a/tests/integration/recordings/responses/c8e196049fe4.json +++ b/tests/integration/recordings/responses/c8e196049fe4.json @@ -20,7 +20,7 @@ "body": { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-381", + "id": "cmpl-130", "choices": [ { "finish_reason": "stop", @@ -29,7 +29,7 @@ "text": "Michael Jordan was born in the year of " } ], - "created": 1758978056, + "created": 1759376606, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", diff --git a/tests/integration/recordings/responses/cd0ece88d392.json b/tests/integration/recordings/responses/cd0ece88d392.json new file mode 100644 index 000000000..3e0f5cd14 --- /dev/null +++ b/tests/integration/recordings/responses/cd0ece88d392.json @@ -0,0 +1,258 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nWhat is the boiling point of the liquid polyjuice in celsius?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n[get_boiling_point(liquid_name=\"polyjuice\", celcius=True)]<|eot_id|><|start_header_id|>ipython<|end_header_id|>\n\n-100<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0001, + "top_p": 0.9 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:55.86924Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "The", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:55.911521Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " boiling", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:55.95324Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " point", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:55.996666Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " of", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:56.038076Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " poly", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:56.079306Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ju", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:56.121626Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ice", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:56.162658Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " is", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:56.203804Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " -", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:56.245419Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "100", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:56.286364Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\u00b0C", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:56.327683Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ".", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:56.369528Z", + "done": true, + "done_reason": "stop", + "total_duration": 708500166, + "load_duration": 138748458, + "prompt_eval_count": 392, + "prompt_eval_duration": 68099125, + "eval_count": 13, + "eval_duration": 500834417, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/cd294c2e0038.json b/tests/integration/recordings/responses/cd294c2e0038.json index cad7814b3..944ccbf52 100644 --- a/tests/integration/recordings/responses/cd294c2e0038.json +++ b/tests/integration/recordings/responses/cd294c2e0038.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-325", + "id": "chatcmpl-249", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759247860, + "created": 1759373711, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/cf776b1aa432.json b/tests/integration/recordings/responses/cf776b1aa432.json index c7449427a..7059ff8a4 100644 --- a/tests/integration/recordings/responses/cf776b1aa432.json +++ b/tests/integration/recordings/responses/cf776b1aa432.json @@ -21,7 +21,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-78", + "id": "chatcmpl-246", "choices": [ { "delta": { @@ -36,7 +36,7 @@ "logprobs": null } ], - "created": 1759259077, + "created": 1759373713, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -47,7 +47,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-78", + "id": "chatcmpl-246", "choices": [ { "delta": { @@ -62,7 +62,7 @@ "logprobs": null } ], - "created": 1759259077, + "created": 1759373713, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -73,7 +73,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-78", + "id": "chatcmpl-246", "choices": [ { "delta": { @@ -88,7 +88,7 @@ "logprobs": null } ], - "created": 1759259077, + "created": 1759373713, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -99,7 +99,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-78", + "id": "chatcmpl-246", "choices": [ { "delta": { @@ -114,7 +114,7 @@ "logprobs": null } ], - "created": 1759259077, + "created": 1759373713, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -125,7 +125,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-78", + "id": "chatcmpl-246", "choices": [ { "delta": { @@ -140,7 +140,7 @@ "logprobs": null } ], - "created": 1759259077, + "created": 1759373713, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -151,7 +151,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-78", + "id": "chatcmpl-246", "choices": [ { "delta": { @@ -166,7 +166,7 @@ "logprobs": null } ], - "created": 1759259077, + "created": 1759373713, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -177,7 +177,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-78", + "id": "chatcmpl-246", "choices": [ { "delta": { @@ -192,7 +192,7 @@ "logprobs": null } ], - "created": 1759259077, + "created": 1759373713, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -203,7 +203,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-78", + "id": "chatcmpl-246", "choices": [ { "delta": { @@ -218,7 +218,7 @@ "logprobs": null } ], - "created": 1759259077, + "created": 1759373713, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, diff --git a/tests/integration/recordings/responses/d0ac68cbde69.json b/tests/integration/recordings/responses/d0ac68cbde69.json index 1a4cceb98..1c6ef82a2 100644 --- a/tests/integration/recordings/responses/d0ac68cbde69.json +++ b/tests/integration/recordings/responses/d0ac68cbde69.json @@ -16,7 +16,7 @@ "model": "llama3.2:3b-instruct-fp16", "name": "llama3.2:3b-instruct-fp16", "digest": "195a8c01d91ec3cb1e0aad4624a51f2602c51fa7d96110f8ab5a20c84081804d", - "expires_at": "2025-10-01T16:04:02.100179-07:00", + "expires_at": "2025-10-01T20:49:01.661997-07:00", "size": 7919570944, "size_vram": 7919570944, "details": { @@ -29,6 +29,24 @@ "parameter_size": "3.2B", "quantization_level": "F16" } + }, + { + "model": "all-minilm:l6-v2", + "name": "all-minilm:l6-v2", + "digest": "1b226e2802dbb772b5fc32a58f103ca1804ef7501331012de126ab22f67475ef", + "expires_at": "2025-10-01T20:48:49.131311-07:00", + "size": 585846784, + "size_vram": 585846784, + "details": { + "parent_model": "", + "format": "gguf", + "family": "bert", + "families": [ + "bert" + ], + "parameter_size": "23M", + "quantization_level": "F16" + } } ] } diff --git a/tests/integration/recordings/responses/d7caf68e394e.json b/tests/integration/recordings/responses/d7caf68e394e.json index acabcaa04..8bf2ef23e 100644 --- a/tests/integration/recordings/responses/d7caf68e394e.json +++ b/tests/integration/recordings/responses/d7caf68e394e.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-56", + "id": "chatcmpl-953", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759245088, + "created": 1759373707, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/d9e8f66e1d85.json b/tests/integration/recordings/responses/d9e8f66e1d85.json index 77dcd74ea..93776822e 100644 --- a/tests/integration/recordings/responses/d9e8f66e1d85.json +++ b/tests/integration/recordings/responses/d9e8f66e1d85.json @@ -71,7 +71,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-559", + "id": "chatcmpl-185", "choices": [ { "finish_reason": "tool_calls", @@ -86,9 +86,9 @@ "function_call": null, "tool_calls": [ { - "id": "call_p9nnpr9l", + "id": "call_re3m95b3", "function": { - "arguments": "{\"flight\":\"{'from':'SFO','to':'JFK'}\",\"passenger\":{\"age\":\"30\",\"name\":\"John Doe\"}}", + "arguments": "{\"flight\":\"{\\\"date\\\": \\\"2024-03-16\\\", \\\"from\\\": \\\"SFO\\\", \\\"to\\\": \\\"JFK\\\"}\",\"passenger\":\"{\\\"age\\\": 40, \\\"name\\\": \\\"John Doe\\\"}\"}", "name": "book_flight" }, "type": "function", @@ -98,15 +98,15 @@ } } ], - "created": 1759351460, + "created": 1759376621, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion", "service_tier": null, "system_fingerprint": "fp_ollama", "usage": { - "completion_tokens": 38, + "completion_tokens": 59, "prompt_tokens": 227, - "total_tokens": 265, + "total_tokens": 286, "completion_tokens_details": null, "prompt_tokens_details": null } diff --git a/tests/integration/recordings/responses/df20f4b62da7.json b/tests/integration/recordings/responses/df20f4b62da7.json new file mode 100644 index 000000000..9c22642d5 --- /dev/null +++ b/tests/integration/recordings/responses/df20f4b62da7.json @@ -0,0 +1,258 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant Always respond with tool calls no matter what. <|eot_id|><|start_header_id|>user<|end_header_id|>\n\nGet the boiling point of polyjuice with a tool call.<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n[get_boiling_point(liquid_name=\"polyjuice\", celcius=True)]<|eot_id|><|start_header_id|>ipython<|end_header_id|>\n\n-100<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0001, + "top_p": 0.9 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:58.856153Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "The", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:58.898198Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " boiling", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:58.939822Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " point", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:58.981421Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " of", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:59.023342Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " poly", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:59.065147Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ju", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:59.106081Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ice", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:59.147339Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " is", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:59.189027Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " -", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:59.230097Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "100", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:59.271249Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\u00b0C", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:59.312423Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ".", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:59.353748Z", + "done": true, + "done_reason": "stop", + "total_duration": 699082625, + "load_duration": 131157125, + "prompt_eval_count": 400, + "prompt_eval_duration": 68858833, + "eval_count": 13, + "eval_duration": 498145250, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/e0c71820f395.json b/tests/integration/recordings/responses/e0c71820f395.json index 881b7e4fd..6a98cf66f 100644 --- a/tests/integration/recordings/responses/e0c71820f395.json +++ b/tests/integration/recordings/responses/e0c71820f395.json @@ -76,42 +76,32 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-927", + "id": "chatcmpl-900", "choices": [ { - "finish_reason": "tool_calls", + "finish_reason": "stop", "index": 0, "logprobs": null, "message": { - "content": "", + "content": "None of the provided functions directly respond to the prompt \"to format an answer to the original user question.\" However, based on the context that you want a tool call with its proper arguments to best answer the given prompt, I will choose the \"complex\" function and provide an argument. Here's the response:\n\n{\"name\":\"complex\",\"parameters\":{\"data\":\"JSON for a function call with its proper arguments\"}}", "refusal": null, "role": "assistant", "annotations": null, "audio": null, "function_call": null, - "tool_calls": [ - { - "id": "call_umobdfav", - "function": { - "arguments": "{\"data\":\"[[1, 2], [3, 4]]\"}", - "name": "complex" - }, - "type": "function", - "index": 0 - } - ] + "tool_calls": null } } ], - "created": 1759351464, + "created": 1759376627, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion", "service_tier": null, "system_fingerprint": "fp_ollama", "usage": { - "completion_tokens": 27, + "completion_tokens": 82, "prompt_tokens": 246, - "total_tokens": 273, + "total_tokens": 328, "completion_tokens_details": null, "prompt_tokens_details": null } diff --git a/tests/integration/recordings/responses/e9c8a0e4f0e0.json b/tests/integration/recordings/responses/e9c8a0e4f0e0.json index 9fa4d9a16..e7e6f57d4 100644 --- a/tests/integration/recordings/responses/e9c8a0e4f0e0.json +++ b/tests/integration/recordings/responses/e9c8a0e4f0e0.json @@ -20,7 +20,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-875", + "id": "chatcmpl-417", "choices": [ { "finish_reason": "stop", @@ -37,7 +37,7 @@ } } ], - "created": 1759351466, + "created": 1759376632, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/f23defea82ec.json b/tests/integration/recordings/responses/f23defea82ec.json index 1e964af04..5d37b2524 100644 --- a/tests/integration/recordings/responses/f23defea82ec.json +++ b/tests/integration/recordings/responses/f23defea82ec.json @@ -19,22 +19,390 @@ "data": [ { "embedding": [ - 0.253706, - 0.016367152, - -0.29664654, - 0.31654558, - -0.18624601, - 0.07602756, - -0.031531323, - 0.2986085, - -0.49672848, - -0.36617878, - 0.25328273, - -0.33349335, - 0.0060151755, - 0.14081024, - -0.13757885, - -0.14679416 + 0.04635219, + 0.002988263, + -0.054220885, + 0.057812735, + -0.0340614, + 0.013923248, + -0.005755826, + 0.054555666, + -0.09073176, + -0.066910096, + 0.046287432, + -0.060912322, + 0.0010950539, + 0.025724398, + -0.025169374, + -0.026821515, + -0.030190151, + 0.0019341545, + -0.0754819, + 0.057380512, + 0.020332545, + -0.005591279, + -0.0022273492, + 0.012063173, + -0.011033521, + -0.03300947, + 0.05462081, + 0.014426073, + 0.024025004, + 0.004224287, + 0.09837723, + 0.08385713, + -0.049175426, + 0.03877149, + 0.08748876, + -0.0223024, + 0.006552746, + -0.0070359865, + 0.017893821, + 0.015465863, + 0.05007282, + -0.019349905, + 0.064887345, + 0.03184605, + 0.0034936152, + 0.02317752, + -0.06297051, + 0.044468515, + -0.022246253, + -0.017976552, + 0.040390052, + -0.0020998395, + -0.05173264, + 0.014722753, + 0.01640469, + -0.06438627, + -0.043313596, + -0.040564552, + 0.044412937, + -0.0031199565, + -0.007237415, + -0.05158015, + 0.059660934, + -0.014839656, + 0.012902056, + 0.028181136, + -0.019578207, + -0.0664231, + -0.06333673, + 0.028995825, + -0.114707075, + 0.041575413, + -0.022128351, + 0.01979776, + 0.0630018, + 0.011822141, + -0.06492722, + -0.066328146, + 0.021114407, + -0.020638306, + -0.009599678, + 0.013701863, + -0.060742326, + 0.005395315, + 0.026589092, + 0.11719033, + 0.067120634, + 0.008300158, + 0.036319703, + 0.00772981, + 0.071582936, + 0.019818509, + -0.15945566, + 0.047943458, + 0.00031571978, + -0.04666597, + 0.007148715, + -0.08839544, + 0.038042437, + 0.06620088, + 0.034336157, + -0.035366412, + 0.041598067, + 0.073756054, + -0.018818064, + -0.017260034, + 0.058635473, + -0.01371376, + 0.048319146, + -0.023727186, + 0.024134034, + 0.015763162, + 0.06681245, + 0.01748244, + 0.0825409, + -0.044568237, + 0.0015441044, + -0.011225885, + 0.0153481, + -0.061364066, + 0.05792184, + 0.044216745, + -0.047036964, + -0.02634555, + -0.033504363, + 0.06713578, + 0.030866034, + 2.024336e-34, + -0.03532978, + 0.021929236, + 0.030160688, + 0.09271786, + -0.010355268, + 0.07196569, + 0.052604284, + 0.085753724, + 0.094942175, + 0.053786535, + -0.08900509, + -0.024382822, + -0.008744401, + -0.03167582, + 0.01025236, + 0.1818434, + -0.0022662894, + 0.118558116, + -0.072208576, + -0.005867667, + 0.0746222, + -0.024001855, + -0.013938801, + -0.030681474, + -0.029207803, + -0.117624186, + -0.046466038, + -0.002622228, + -0.0902171, + -0.038626853, + -0.037497964, + -0.02418436, + -0.069297835, + 0.06424038, + 0.0045628003, + -0.0041498984, + -0.01649947, + 0.051125433, + -0.0058985935, + -0.0122523345, + -0.047424458, + -0.007806876, + 0.07906618, + 0.03244041, + -0.044682544, + -0.022625683, + 0.028852794, + -0.050480433, + 0.043801326, + -0.023512814, + -0.029832385, + 0.031089257, + 0.07129686, + -0.089649536, + 0.011963804, + -0.018448317, + 0.019637493, + 0.020081993, + 0.0012980831, + 0.093201645, + -0.064436235, + -0.040581323, + -0.01193043, + 0.043884862, + -0.010675756, + -0.030739127, + 0.005605308, + -0.110498495, + 0.044510514, + 0.037110664, + 0.04116233, + -0.039460793, + -0.04470639, + -0.027589805, + -0.02073358, + -0.067221105, + 0.050390884, + 0.031397663, + -0.008031462, + -0.009285899, + 0.0013141648, + -0.017254544, + 0.010367782, + -0.05940024, + -0.018042587, + -0.15487815, + 0.0069424273, + -0.05208202, + 0.0014201442, + -0.13956298, + -0.040203292, + 0.027910054, + -0.064872995, + -0.016270144, + 0.07052549, + 5.3188943e-34, + 0.012666737, + 0.016728623, + -0.013163009, + 0.06391275, + -0.043404065, + 0.015435096, + 0.03720438, + 0.05997576, + -0.07789181, + -0.0408386, + 0.024137221, + -0.019834999, + -0.034739267, + 0.00042199617, + 0.048484907, + 0.08716056, + -0.101133205, + -0.07535088, + -0.03912376, + -0.031597532, + -0.052266575, + 0.022085808, + -0.011040282, + 0.005077135, + -0.088432744, + -0.010477913, + 0.047780182, + -0.073345095, + 0.014382301, + 0.038075384, + 0.02176859, + -0.029071847, + -0.036925532, + 0.14317243, + 0.020646103, + -0.08367964, + 0.111576855, + -0.009943396, + 0.023071144, + 0.0926832, + 0.011242715, + 0.068017475, + -0.007714686, + 0.03060742, + -0.011360289, + 0.109015204, + 0.12930514, + -0.07566831, + 0.09001269, + -0.0090979, + 0.0148039665, + 0.048663232, + 0.08894293, + 0.038565516, + 0.005821986, + 0.016084671, + -0.106283545, + -0.033372246, + 0.05440088, + -0.005663873, + 0.0011572369, + -0.024969472, + 0.043092247, + -0.009314855, + -0.11836073, + -0.027310666, + 0.009811885, + -0.0052975323, + -0.044883158, + 0.066436425, + -0.06750139, + -0.02696421, + 0.01402391, + -0.04950559, + -0.084093384, + -0.07380851, + 0.04709705, + 4.9404687e-05, + 0.01672617, + 0.01849747, + 0.027683195, + 0.0047972985, + 0.0017495222, + 0.07066204, + -0.022430636, + 0.06875498, + 0.093927115, + 0.11101308, + -0.015589739, + 0.021178465, + 0.033638563, + 0.034676168, + -0.026882911, + -0.010514364, + 0.0073013064, + -1.2070348e-08, + -0.10034882, + -0.028641108, + -0.061462097, + -0.009792086, + -0.081652306, + -0.011814046, + 0.002039501, + 0.010384326, + 0.01639641, + 0.09542911, + 0.012538498, + -0.03542602, + 0.018125113, + 0.062750235, + 0.0007333235, + -0.13612862, + -0.049830034, + 0.021177148, + 0.006589976, + 0.007859552, + -0.03270378, + 0.024738451, + -0.02542262, + -0.0033008803, + 0.030640591, + -0.032442387, + 0.04598555, + 0.03903257, + 0.035755396, + 0.01686084, + 0.13498692, + 0.028296864, + -0.0035224769, + -0.036735818, + -0.046355885, + 0.057701495, + 0.008000554, + 0.047822826, + 0.04911064, + 0.035214324, + -0.09817153, + 0.0050856513, + -0.018094635, + -0.04385158, + 0.06649695, + -0.037648164, + -0.006218895, + -0.037976924, + -0.0036204353, + -0.03149386, + 0.031777944, + -0.011333557, + 0.009081317, + 0.022486951, + 0.032106593, + 0.023041077, + -0.06739943, + 0.06294171, + -0.057333894, + -0.041295, + 0.060841344, + 0.03247397, + -0.05132725, + -0.04992364 ], "index": 0, "object": "embedding" diff --git a/tests/integration/recordings/responses/f70f30f54211.json b/tests/integration/recordings/responses/f70f30f54211.json index 12e5ed31f..c8c0b8c08 100644 --- a/tests/integration/recordings/responses/f70f30f54211.json +++ b/tests/integration/recordings/responses/f70f30f54211.json @@ -38,7 +38,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-958", + "id": "chatcmpl-545", "choices": [ { "finish_reason": "tool_calls", @@ -53,7 +53,7 @@ "function_call": null, "tool_calls": [ { - "id": "call_wc359mqt", + "id": "call_oy2by8f2", "function": { "arguments": "{\"city\":\"Tokyo\"}", "name": "get_weather" @@ -65,15 +65,15 @@ } } ], - "created": 1759351475, + "created": 1759376642, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion", "service_tier": null, "system_fingerprint": "fp_ollama", "usage": { - "completion_tokens": 14, + "completion_tokens": 15, "prompt_tokens": 177, - "total_tokens": 191, + "total_tokens": 192, "completion_tokens_details": null, "prompt_tokens_details": null } diff --git a/tests/integration/recordings/responses/f8ba05a5ce61.json b/tests/integration/recordings/responses/f8ba05a5ce61.json new file mode 100644 index 000000000..a09e430bd --- /dev/null +++ b/tests/integration/recordings/responses/f8ba05a5ce61.json @@ -0,0 +1,402 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point_with_metadata\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nCall get_boiling_point_with_metadata tool and answer What is the boiling point of polyjuice?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0001, + "top_p": 0.9 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.137398Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "[", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.179615Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "get", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.221193Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_bo", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.264409Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "iling", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.30586Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_point", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.347477Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_with", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.389016Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_metadata", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.430288Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "(", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.471941Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "liquid", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.513993Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_name", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.555492Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "='", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.596851Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "poly", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.638274Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ju", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.680806Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ice", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.723172Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "',", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.764626Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " cel", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.806696Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ci", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.848776Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "us", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.891751Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "=True", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.933562Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ")]", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.975196Z", + "done": true, + "done_reason": "stop", + "total_duration": 1471473500, + "load_duration": 104730458, + "prompt_eval_count": 368, + "prompt_eval_duration": 527632084, + "eval_count": 21, + "eval_duration": 838372750, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/feae037e2abd.json b/tests/integration/recordings/responses/feae037e2abd.json new file mode 100644 index 000000000..732b71b23 --- /dev/null +++ b/tests/integration/recordings/responses/feae037e2abd.json @@ -0,0 +1,258 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nCall get_boiling_point tool and answer What is the boiling point of polyjuice?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n[get_boiling_point(liquid_name=\"polyjuice\", celcius=True)]<|eot_id|><|start_header_id|>ipython<|end_header_id|>\n\n-100<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0001, + "top_p": 0.9 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.185676Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "The", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.227434Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " boiling", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.268751Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " point", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.310105Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " of", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.351683Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " poly", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.396988Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ju", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.439384Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ice", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.481075Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " is", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.522627Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " -", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.564154Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "100", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.605696Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\u00b0C", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.647134Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ".", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.688465Z", + "done": true, + "done_reason": "stop", + "total_duration": 646686792, + "load_duration": 78333875, + "prompt_eval_count": 395, + "prompt_eval_duration": 64602125, + "eval_count": 13, + "eval_duration": 503233541, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/unit/distribution/routers/test_routing_tables.py b/tests/unit/distribution/routers/test_routing_tables.py index 456a5d041..70bb259c8 100644 --- a/tests/unit/distribution/routers/test_routing_tables.py +++ b/tests/unit/distribution/routers/test_routing_tables.py @@ -16,7 +16,7 @@ from llama_stack.apis.datasets.datasets import Dataset, DatasetPurpose, URIDataS from llama_stack.apis.datatypes import Api from llama_stack.apis.models import Model, ModelType from llama_stack.apis.shields.shields import Shield -from llama_stack.apis.tools import ListToolDefsResponse, ToolDef, ToolGroup, ToolParameter +from llama_stack.apis.tools import ListToolDefsResponse, ToolDef, ToolGroup from llama_stack.apis.vector_dbs import VectorDB from llama_stack.core.datatypes import RegistryEntrySource from llama_stack.core.routing_tables.benchmarks import BenchmarksRoutingTable @@ -137,7 +137,15 @@ class ToolGroupsImpl(Impl): ToolDef( name="test-tool", description="Test tool", - parameters=[ToolParameter(name="test-param", description="Test param", parameter_type="string")], + input_schema={ + "type": "object", + "properties": { + "test-param": { + "type": "string", + "description": "Test param" + } + } + }, ) ] ) diff --git a/tests/unit/providers/agent/test_meta_reference_agent.py b/tests/unit/providers/agent/test_meta_reference_agent.py index 07e5aa79d..90cc6d5d4 100644 --- a/tests/unit/providers/agent/test_meta_reference_agent.py +++ b/tests/unit/providers/agent/test_meta_reference_agent.py @@ -18,7 +18,7 @@ from llama_stack.apis.common.responses import PaginatedResponse from llama_stack.apis.inference import Inference from llama_stack.apis.resource import ResourceType from llama_stack.apis.safety import Safety -from llama_stack.apis.tools import ListToolsResponse, Tool, ToolGroups, ToolParameter, ToolRuntime +from llama_stack.apis.tools import ListToolDefsResponse, ToolDef, ToolGroups, ToolRuntime from llama_stack.apis.vector_io import VectorIO from llama_stack.providers.inline.agents.meta_reference.agent_instance import ChatAgent from llama_stack.providers.inline.agents.meta_reference.agents import MetaReferenceAgentsImpl @@ -232,32 +232,30 @@ async def test_delete_agent(agents_impl, sample_agent_config): async def test__initialize_tools(agents_impl, sample_agent_config): # Mock tool_groups_api.list_tools() - agents_impl.tool_groups_api.list_tools.return_value = ListToolsResponse( + agents_impl.tool_groups_api.list_tools.return_value = ListToolDefsResponse( data=[ - Tool( - identifier="story_maker", - provider_id="model-context-protocol", - type=ResourceType.tool, + ToolDef( + name="story_maker", toolgroup_id="mcp::my_mcp_server", description="Make a story", - parameters=[ - ToolParameter( - name="story_title", - parameter_type="string", - description="Title of the story", - required=True, - title="Story Title", - ), - ToolParameter( - name="input_words", - parameter_type="array", - description="Input words", - required=False, - items={"type": "string"}, - title="Input Words", - default=[], - ), - ], + input_schema={ + "type": "object", + "properties": { + "story_title": { + "type": "string", + "description": "Title of the story", + "title": "Story Title" + }, + "input_words": { + "type": "array", + "description": "Input words", + "items": {"type": "string"}, + "title": "Input Words", + "default": [] + } + }, + "required": ["story_title"] + }, ) ] ) diff --git a/tests/unit/providers/agents/meta_reference/test_openai_responses.py b/tests/unit/providers/agents/meta_reference/test_openai_responses.py index 5e5914a03..1056acec7 100644 --- a/tests/unit/providers/agents/meta_reference/test_openai_responses.py +++ b/tests/unit/providers/agents/meta_reference/test_openai_responses.py @@ -39,7 +39,7 @@ from llama_stack.apis.inference import ( OpenAIResponseFormatJSONSchema, OpenAIUserMessageParam, ) -from llama_stack.apis.tools.tools import Tool, ToolGroups, ToolInvocationResult, ToolParameter, ToolRuntime +from llama_stack.apis.tools.tools import ToolDef, ToolGroups, ToolInvocationResult, ToolRuntime from llama_stack.core.access_control.access_control import default_policy from llama_stack.core.datatypes import ResponsesStoreConfig from llama_stack.providers.inline.agents.meta_reference.responses.openai_responses import ( @@ -186,14 +186,20 @@ async def test_create_openai_response_with_string_input_with_tools(openai_respon input_text = "What is the capital of Ireland?" model = "meta-llama/Llama-3.1-8B-Instruct" - openai_responses_impl.tool_groups_api.get_tool.return_value = Tool( - identifier="web_search", - provider_id="client", + openai_responses_impl.tool_groups_api.get_tool.return_value = ToolDef( + name="web_search", toolgroup_id="web_search", description="Search the web for information", - parameters=[ - ToolParameter(name="query", parameter_type="string", description="The query to search for", required=True) - ], + input_schema={ + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "The query to search for" + } + }, + "required": ["query"] + }, ) openai_responses_impl.tool_runtime_api.invoke_tool.return_value = ToolInvocationResult( diff --git a/tests/unit/providers/inline/agents/meta_reference/responses/test_streaming.py b/tests/unit/providers/inline/agents/meta_reference/responses/test_streaming.py index 6fda2b508..c67b485b9 100644 --- a/tests/unit/providers/inline/agents/meta_reference/responses/test_streaming.py +++ b/tests/unit/providers/inline/agents/meta_reference/responses/test_streaming.py @@ -4,7 +4,7 @@ # This source code is licensed under the terms described in the LICENSE file in # the root directory of this source tree. -from llama_stack.apis.tools import ToolDef, ToolParameter +from llama_stack.apis.tools import ToolDef from llama_stack.providers.inline.agents.meta_reference.responses.streaming import ( convert_tooldef_to_chat_tool, ) @@ -20,15 +20,17 @@ def test_convert_tooldef_to_chat_tool_preserves_items_field(): tool_def = ToolDef( name="test_tool", description="A test tool with array parameter", - parameters=[ - ToolParameter( - name="tags", - parameter_type="array", - description="List of tags", - required=True, - items={"type": "string"}, - ) - ], + input_schema={ + "type": "object", + "properties": { + "tags": { + "type": "array", + "description": "List of tags", + "items": {"type": "string"} + } + }, + "required": ["tags"] + }, ) result = convert_tooldef_to_chat_tool(tool_def)