diff --git a/docs/static/deprecated-llama-stack-spec.html b/docs/static/deprecated-llama-stack-spec.html
index fe63f78bc..7edfe3f5d 100644
--- a/docs/static/deprecated-llama-stack-spec.html
+++ b/docs/static/deprecated-llama-stack-spec.html
@@ -4289,6 +4289,10 @@
"ToolDef": {
"type": "object",
"properties": {
+ "toolgroup_id": {
+ "type": "string",
+ "description": "(Optional) ID of the tool group this tool belongs to"
+ },
"name": {
"type": "string",
"description": "Name of the tool"
@@ -4297,12 +4301,57 @@
"type": "string",
"description": "(Optional) Human-readable description of what the tool does"
},
- "parameters": {
- "type": "array",
- "items": {
- "$ref": "#/components/schemas/ToolParameter"
+ "input_schema": {
+ "type": "object",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "string"
+ },
+ {
+ "type": "array"
+ },
+ {
+ "type": "object"
+ }
+ ]
},
- "description": "(Optional) List of parameters this tool accepts"
+ "description": "(Optional) JSON Schema for tool inputs (MCP inputSchema)"
+ },
+ "output_schema": {
+ "type": "object",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "string"
+ },
+ {
+ "type": "array"
+ },
+ {
+ "type": "object"
+ }
+ ]
+ },
+ "description": "(Optional) JSON Schema for tool outputs (MCP outputSchema)"
},
"metadata": {
"type": "object",
@@ -4338,68 +4387,6 @@
"title": "ToolDef",
"description": "Tool definition used in runtime contexts."
},
- "ToolParameter": {
- "type": "object",
- "properties": {
- "name": {
- "type": "string",
- "description": "Name of the parameter"
- },
- "parameter_type": {
- "type": "string",
- "description": "Type of the parameter (e.g., string, integer)"
- },
- "description": {
- "type": "string",
- "description": "Human-readable description of what the parameter does"
- },
- "required": {
- "type": "boolean",
- "default": true,
- "description": "Whether this parameter is required for tool invocation"
- },
- "items": {
- "type": "object",
- "description": "Type of the elements when parameter_type is array"
- },
- "title": {
- "type": "string",
- "description": "(Optional) Title of the parameter"
- },
- "default": {
- "oneOf": [
- {
- "type": "null"
- },
- {
- "type": "boolean"
- },
- {
- "type": "number"
- },
- {
- "type": "string"
- },
- {
- "type": "array"
- },
- {
- "type": "object"
- }
- ],
- "description": "(Optional) Default value for the parameter if not provided"
- }
- },
- "additionalProperties": false,
- "required": [
- "name",
- "parameter_type",
- "description",
- "required"
- ],
- "title": "ToolParameter",
- "description": "Parameter definition for a tool."
- },
"TopKSamplingStrategy": {
"type": "object",
"properties": {
@@ -4915,79 +4902,6 @@
]
},
"arguments": {
- "oneOf": [
- {
- "type": "string"
- },
- {
- "type": "object",
- "additionalProperties": {
- "oneOf": [
- {
- "type": "string"
- },
- {
- "type": "integer"
- },
- {
- "type": "number"
- },
- {
- "type": "boolean"
- },
- {
- "type": "null"
- },
- {
- "type": "array",
- "items": {
- "oneOf": [
- {
- "type": "string"
- },
- {
- "type": "integer"
- },
- {
- "type": "number"
- },
- {
- "type": "boolean"
- },
- {
- "type": "null"
- }
- ]
- }
- },
- {
- "type": "object",
- "additionalProperties": {
- "oneOf": [
- {
- "type": "string"
- },
- {
- "type": "integer"
- },
- {
- "type": "number"
- },
- {
- "type": "boolean"
- },
- {
- "type": "null"
- }
- ]
- }
- }
- ]
- }
- }
- ]
- },
- "arguments_json": {
"type": "string"
}
},
diff --git a/docs/static/deprecated-llama-stack-spec.yaml b/docs/static/deprecated-llama-stack-spec.yaml
index 9b1d3eff6..ca832d46b 100644
--- a/docs/static/deprecated-llama-stack-spec.yaml
+++ b/docs/static/deprecated-llama-stack-spec.yaml
@@ -3143,6 +3143,10 @@ components:
ToolDef:
type: object
properties:
+ toolgroup_id:
+ type: string
+ description: >-
+ (Optional) ID of the tool group this tool belongs to
name:
type: string
description: Name of the tool
@@ -3150,12 +3154,30 @@ components:
type: string
description: >-
(Optional) Human-readable description of what the tool does
- parameters:
- type: array
- items:
- $ref: '#/components/schemas/ToolParameter'
+ input_schema:
+ type: object
+ additionalProperties:
+ oneOf:
+ - type: 'null'
+ - type: boolean
+ - type: number
+ - type: string
+ - type: array
+ - type: object
description: >-
- (Optional) List of parameters this tool accepts
+ (Optional) JSON Schema for tool inputs (MCP inputSchema)
+ output_schema:
+ type: object
+ additionalProperties:
+ oneOf:
+ - type: 'null'
+ - type: boolean
+ - type: number
+ - type: string
+ - type: array
+ - type: object
+ description: >-
+ (Optional) JSON Schema for tool outputs (MCP outputSchema)
metadata:
type: object
additionalProperties:
@@ -3174,50 +3196,6 @@ components:
title: ToolDef
description: >-
Tool definition used in runtime contexts.
- ToolParameter:
- type: object
- properties:
- name:
- type: string
- description: Name of the parameter
- parameter_type:
- type: string
- description: >-
- Type of the parameter (e.g., string, integer)
- description:
- type: string
- description: >-
- Human-readable description of what the parameter does
- required:
- type: boolean
- default: true
- description: >-
- Whether this parameter is required for tool invocation
- items:
- type: object
- description: >-
- Type of the elements when parameter_type is array
- title:
- type: string
- description: (Optional) Title of the parameter
- default:
- oneOf:
- - type: 'null'
- - type: boolean
- - type: number
- - type: string
- - type: array
- - type: object
- description: >-
- (Optional) Default value for the parameter if not provided
- additionalProperties: false
- required:
- - name
- - parameter_type
- - description
- - required
- title: ToolParameter
- description: Parameter definition for a tool.
TopKSamplingStrategy:
type: object
properties:
@@ -3630,33 +3608,6 @@ components:
title: BuiltinTool
- type: string
arguments:
- oneOf:
- - type: string
- - type: object
- additionalProperties:
- oneOf:
- - type: string
- - type: integer
- - type: number
- - type: boolean
- - type: 'null'
- - type: array
- items:
- oneOf:
- - type: string
- - type: integer
- - type: number
- - type: boolean
- - type: 'null'
- - type: object
- additionalProperties:
- oneOf:
- - type: string
- - type: integer
- - type: number
- - type: boolean
- - type: 'null'
- arguments_json:
type: string
additionalProperties: false
required:
diff --git a/docs/static/experimental-llama-stack-spec.html b/docs/static/experimental-llama-stack-spec.html
index fe57f9132..a84226c05 100644
--- a/docs/static/experimental-llama-stack-spec.html
+++ b/docs/static/experimental-llama-stack-spec.html
@@ -2784,6 +2784,10 @@
"ToolDef": {
"type": "object",
"properties": {
+ "toolgroup_id": {
+ "type": "string",
+ "description": "(Optional) ID of the tool group this tool belongs to"
+ },
"name": {
"type": "string",
"description": "Name of the tool"
@@ -2792,12 +2796,57 @@
"type": "string",
"description": "(Optional) Human-readable description of what the tool does"
},
- "parameters": {
- "type": "array",
- "items": {
- "$ref": "#/components/schemas/ToolParameter"
+ "input_schema": {
+ "type": "object",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "string"
+ },
+ {
+ "type": "array"
+ },
+ {
+ "type": "object"
+ }
+ ]
},
- "description": "(Optional) List of parameters this tool accepts"
+ "description": "(Optional) JSON Schema for tool inputs (MCP inputSchema)"
+ },
+ "output_schema": {
+ "type": "object",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "string"
+ },
+ {
+ "type": "array"
+ },
+ {
+ "type": "object"
+ }
+ ]
+ },
+ "description": "(Optional) JSON Schema for tool outputs (MCP outputSchema)"
},
"metadata": {
"type": "object",
@@ -2833,68 +2882,6 @@
"title": "ToolDef",
"description": "Tool definition used in runtime contexts."
},
- "ToolParameter": {
- "type": "object",
- "properties": {
- "name": {
- "type": "string",
- "description": "Name of the parameter"
- },
- "parameter_type": {
- "type": "string",
- "description": "Type of the parameter (e.g., string, integer)"
- },
- "description": {
- "type": "string",
- "description": "Human-readable description of what the parameter does"
- },
- "required": {
- "type": "boolean",
- "default": true,
- "description": "Whether this parameter is required for tool invocation"
- },
- "items": {
- "type": "object",
- "description": "Type of the elements when parameter_type is array"
- },
- "title": {
- "type": "string",
- "description": "(Optional) Title of the parameter"
- },
- "default": {
- "oneOf": [
- {
- "type": "null"
- },
- {
- "type": "boolean"
- },
- {
- "type": "number"
- },
- {
- "type": "string"
- },
- {
- "type": "array"
- },
- {
- "type": "object"
- }
- ],
- "description": "(Optional) Default value for the parameter if not provided"
- }
- },
- "additionalProperties": false,
- "required": [
- "name",
- "parameter_type",
- "description",
- "required"
- ],
- "title": "ToolParameter",
- "description": "Parameter definition for a tool."
- },
"TopKSamplingStrategy": {
"type": "object",
"properties": {
@@ -3410,79 +3397,6 @@
]
},
"arguments": {
- "oneOf": [
- {
- "type": "string"
- },
- {
- "type": "object",
- "additionalProperties": {
- "oneOf": [
- {
- "type": "string"
- },
- {
- "type": "integer"
- },
- {
- "type": "number"
- },
- {
- "type": "boolean"
- },
- {
- "type": "null"
- },
- {
- "type": "array",
- "items": {
- "oneOf": [
- {
- "type": "string"
- },
- {
- "type": "integer"
- },
- {
- "type": "number"
- },
- {
- "type": "boolean"
- },
- {
- "type": "null"
- }
- ]
- }
- },
- {
- "type": "object",
- "additionalProperties": {
- "oneOf": [
- {
- "type": "string"
- },
- {
- "type": "integer"
- },
- {
- "type": "number"
- },
- {
- "type": "boolean"
- },
- {
- "type": "null"
- }
- ]
- }
- }
- ]
- }
- }
- ]
- },
- "arguments_json": {
"type": "string"
}
},
diff --git a/docs/static/experimental-llama-stack-spec.yaml b/docs/static/experimental-llama-stack-spec.yaml
index 85129336f..a08c0cc87 100644
--- a/docs/static/experimental-llama-stack-spec.yaml
+++ b/docs/static/experimental-llama-stack-spec.yaml
@@ -2002,6 +2002,10 @@ components:
ToolDef:
type: object
properties:
+ toolgroup_id:
+ type: string
+ description: >-
+ (Optional) ID of the tool group this tool belongs to
name:
type: string
description: Name of the tool
@@ -2009,12 +2013,30 @@ components:
type: string
description: >-
(Optional) Human-readable description of what the tool does
- parameters:
- type: array
- items:
- $ref: '#/components/schemas/ToolParameter'
+ input_schema:
+ type: object
+ additionalProperties:
+ oneOf:
+ - type: 'null'
+ - type: boolean
+ - type: number
+ - type: string
+ - type: array
+ - type: object
description: >-
- (Optional) List of parameters this tool accepts
+ (Optional) JSON Schema for tool inputs (MCP inputSchema)
+ output_schema:
+ type: object
+ additionalProperties:
+ oneOf:
+ - type: 'null'
+ - type: boolean
+ - type: number
+ - type: string
+ - type: array
+ - type: object
+ description: >-
+ (Optional) JSON Schema for tool outputs (MCP outputSchema)
metadata:
type: object
additionalProperties:
@@ -2033,50 +2055,6 @@ components:
title: ToolDef
description: >-
Tool definition used in runtime contexts.
- ToolParameter:
- type: object
- properties:
- name:
- type: string
- description: Name of the parameter
- parameter_type:
- type: string
- description: >-
- Type of the parameter (e.g., string, integer)
- description:
- type: string
- description: >-
- Human-readable description of what the parameter does
- required:
- type: boolean
- default: true
- description: >-
- Whether this parameter is required for tool invocation
- items:
- type: object
- description: >-
- Type of the elements when parameter_type is array
- title:
- type: string
- description: (Optional) Title of the parameter
- default:
- oneOf:
- - type: 'null'
- - type: boolean
- - type: number
- - type: string
- - type: array
- - type: object
- description: >-
- (Optional) Default value for the parameter if not provided
- additionalProperties: false
- required:
- - name
- - parameter_type
- - description
- - required
- title: ToolParameter
- description: Parameter definition for a tool.
TopKSamplingStrategy:
type: object
properties:
@@ -2489,33 +2467,6 @@ components:
title: BuiltinTool
- type: string
arguments:
- oneOf:
- - type: string
- - type: object
- additionalProperties:
- oneOf:
- - type: string
- - type: integer
- - type: number
- - type: boolean
- - type: 'null'
- - type: array
- items:
- oneOf:
- - type: string
- - type: integer
- - type: number
- - type: boolean
- - type: 'null'
- - type: object
- additionalProperties:
- oneOf:
- - type: string
- - type: integer
- - type: number
- - type: boolean
- - type: 'null'
- arguments_json:
type: string
additionalProperties: false
required:
diff --git a/docs/static/llama-stack-spec.html b/docs/static/llama-stack-spec.html
index fa16e62ee..4693d39e0 100644
--- a/docs/static/llama-stack-spec.html
+++ b/docs/static/llama-stack-spec.html
@@ -2404,11 +2404,11 @@
"get": {
"responses": {
"200": {
- "description": "A ListToolsResponse.",
+ "description": "A ListToolDefsResponse.",
"content": {
"application/json": {
"schema": {
- "$ref": "#/components/schemas/ListToolsResponse"
+ "$ref": "#/components/schemas/ListToolDefsResponse"
}
}
}
@@ -2449,11 +2449,11 @@
"get": {
"responses": {
"200": {
- "description": "A Tool.",
+ "description": "A ToolDef.",
"content": {
"application/json": {
"schema": {
- "$ref": "#/components/schemas/Tool"
+ "$ref": "#/components/schemas/ToolDef"
}
}
}
@@ -8490,79 +8490,6 @@
]
},
"arguments": {
- "oneOf": [
- {
- "type": "string"
- },
- {
- "type": "object",
- "additionalProperties": {
- "oneOf": [
- {
- "type": "string"
- },
- {
- "type": "integer"
- },
- {
- "type": "number"
- },
- {
- "type": "boolean"
- },
- {
- "type": "null"
- },
- {
- "type": "array",
- "items": {
- "oneOf": [
- {
- "type": "string"
- },
- {
- "type": "integer"
- },
- {
- "type": "number"
- },
- {
- "type": "boolean"
- },
- {
- "type": "null"
- }
- ]
- }
- },
- {
- "type": "object",
- "additionalProperties": {
- "oneOf": [
- {
- "type": "string"
- },
- {
- "type": "integer"
- },
- {
- "type": "number"
- },
- {
- "type": "boolean"
- },
- {
- "type": "null"
- }
- ]
- }
- }
- ]
- }
- }
- ]
- },
- "arguments_json": {
"type": "string"
}
},
@@ -10156,6 +10083,10 @@
"ToolDef": {
"type": "object",
"properties": {
+ "toolgroup_id": {
+ "type": "string",
+ "description": "(Optional) ID of the tool group this tool belongs to"
+ },
"name": {
"type": "string",
"description": "Name of the tool"
@@ -10164,12 +10095,57 @@
"type": "string",
"description": "(Optional) Human-readable description of what the tool does"
},
- "parameters": {
- "type": "array",
- "items": {
- "$ref": "#/components/schemas/ToolParameter"
+ "input_schema": {
+ "type": "object",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "string"
+ },
+ {
+ "type": "array"
+ },
+ {
+ "type": "object"
+ }
+ ]
},
- "description": "(Optional) List of parameters this tool accepts"
+ "description": "(Optional) JSON Schema for tool inputs (MCP inputSchema)"
+ },
+ "output_schema": {
+ "type": "object",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "string"
+ },
+ {
+ "type": "array"
+ },
+ {
+ "type": "object"
+ }
+ ]
+ },
+ "description": "(Optional) JSON Schema for tool outputs (MCP outputSchema)"
},
"metadata": {
"type": "object",
@@ -10205,68 +10181,6 @@
"title": "ToolDef",
"description": "Tool definition used in runtime contexts."
},
- "ToolParameter": {
- "type": "object",
- "properties": {
- "name": {
- "type": "string",
- "description": "Name of the parameter"
- },
- "parameter_type": {
- "type": "string",
- "description": "Type of the parameter (e.g., string, integer)"
- },
- "description": {
- "type": "string",
- "description": "Human-readable description of what the parameter does"
- },
- "required": {
- "type": "boolean",
- "default": true,
- "description": "Whether this parameter is required for tool invocation"
- },
- "items": {
- "type": "object",
- "description": "Type of the elements when parameter_type is array"
- },
- "title": {
- "type": "string",
- "description": "(Optional) Title of the parameter"
- },
- "default": {
- "oneOf": [
- {
- "type": "null"
- },
- {
- "type": "boolean"
- },
- {
- "type": "number"
- },
- {
- "type": "string"
- },
- {
- "type": "array"
- },
- {
- "type": "object"
- }
- ],
- "description": "(Optional) Default value for the parameter if not provided"
- }
- },
- "additionalProperties": false,
- "required": [
- "name",
- "parameter_type",
- "description",
- "required"
- ],
- "title": "ToolParameter",
- "description": "Parameter definition for a tool."
- },
"ListToolDefsResponse": {
"type": "object",
"properties": {
@@ -10761,107 +10675,6 @@
],
"title": "RegisterToolGroupRequest"
},
- "Tool": {
- "type": "object",
- "properties": {
- "identifier": {
- "type": "string"
- },
- "provider_resource_id": {
- "type": "string"
- },
- "provider_id": {
- "type": "string"
- },
- "type": {
- "type": "string",
- "enum": [
- "model",
- "shield",
- "vector_db",
- "dataset",
- "scoring_function",
- "benchmark",
- "tool",
- "tool_group",
- "prompt"
- ],
- "const": "tool",
- "default": "tool",
- "description": "Type of resource, always 'tool'"
- },
- "toolgroup_id": {
- "type": "string",
- "description": "ID of the tool group this tool belongs to"
- },
- "description": {
- "type": "string",
- "description": "Human-readable description of what the tool does"
- },
- "parameters": {
- "type": "array",
- "items": {
- "$ref": "#/components/schemas/ToolParameter"
- },
- "description": "List of parameters this tool accepts"
- },
- "metadata": {
- "type": "object",
- "additionalProperties": {
- "oneOf": [
- {
- "type": "null"
- },
- {
- "type": "boolean"
- },
- {
- "type": "number"
- },
- {
- "type": "string"
- },
- {
- "type": "array"
- },
- {
- "type": "object"
- }
- ]
- },
- "description": "(Optional) Additional metadata about the tool"
- }
- },
- "additionalProperties": false,
- "required": [
- "identifier",
- "provider_id",
- "type",
- "toolgroup_id",
- "description",
- "parameters"
- ],
- "title": "Tool",
- "description": "A tool that can be invoked by agents."
- },
- "ListToolsResponse": {
- "type": "object",
- "properties": {
- "data": {
- "type": "array",
- "items": {
- "$ref": "#/components/schemas/Tool"
- },
- "description": "List of tools"
- }
- },
- "additionalProperties": false,
- "required": [
- "data"
- ],
- "title": "ListToolsResponse",
- "description": "Response containing a list of tools."
- },
"VectorDB": {
"type": "object",
"properties": {
diff --git a/docs/static/llama-stack-spec.yaml b/docs/static/llama-stack-spec.yaml
index 733e2cd21..7d275a221 100644
--- a/docs/static/llama-stack-spec.yaml
+++ b/docs/static/llama-stack-spec.yaml
@@ -1753,11 +1753,11 @@ paths:
get:
responses:
'200':
- description: A ListToolsResponse.
+ description: A ListToolDefsResponse.
content:
application/json:
schema:
- $ref: '#/components/schemas/ListToolsResponse'
+ $ref: '#/components/schemas/ListToolDefsResponse'
'400':
$ref: '#/components/responses/BadRequest400'
'429':
@@ -1785,11 +1785,11 @@ paths:
get:
responses:
'200':
- description: A Tool.
+ description: A ToolDef.
content:
application/json:
schema:
- $ref: '#/components/schemas/Tool'
+ $ref: '#/components/schemas/ToolDef'
'400':
$ref: '#/components/responses/BadRequest400'
'429':
@@ -6398,33 +6398,6 @@ components:
title: BuiltinTool
- type: string
arguments:
- oneOf:
- - type: string
- - type: object
- additionalProperties:
- oneOf:
- - type: string
- - type: integer
- - type: number
- - type: boolean
- - type: 'null'
- - type: array
- items:
- oneOf:
- - type: string
- - type: integer
- - type: number
- - type: boolean
- - type: 'null'
- - type: object
- additionalProperties:
- oneOf:
- - type: string
- - type: integer
- - type: number
- - type: boolean
- - type: 'null'
- arguments_json:
type: string
additionalProperties: false
required:
@@ -7552,6 +7525,10 @@ components:
ToolDef:
type: object
properties:
+ toolgroup_id:
+ type: string
+ description: >-
+ (Optional) ID of the tool group this tool belongs to
name:
type: string
description: Name of the tool
@@ -7559,12 +7536,30 @@ components:
type: string
description: >-
(Optional) Human-readable description of what the tool does
- parameters:
- type: array
- items:
- $ref: '#/components/schemas/ToolParameter'
+ input_schema:
+ type: object
+ additionalProperties:
+ oneOf:
+ - type: 'null'
+ - type: boolean
+ - type: number
+ - type: string
+ - type: array
+ - type: object
description: >-
- (Optional) List of parameters this tool accepts
+ (Optional) JSON Schema for tool inputs (MCP inputSchema)
+ output_schema:
+ type: object
+ additionalProperties:
+ oneOf:
+ - type: 'null'
+ - type: boolean
+ - type: number
+ - type: string
+ - type: array
+ - type: object
+ description: >-
+ (Optional) JSON Schema for tool outputs (MCP outputSchema)
metadata:
type: object
additionalProperties:
@@ -7583,50 +7578,6 @@ components:
title: ToolDef
description: >-
Tool definition used in runtime contexts.
- ToolParameter:
- type: object
- properties:
- name:
- type: string
- description: Name of the parameter
- parameter_type:
- type: string
- description: >-
- Type of the parameter (e.g., string, integer)
- description:
- type: string
- description: >-
- Human-readable description of what the parameter does
- required:
- type: boolean
- default: true
- description: >-
- Whether this parameter is required for tool invocation
- items:
- type: object
- description: >-
- Type of the elements when parameter_type is array
- title:
- type: string
- description: (Optional) Title of the parameter
- default:
- oneOf:
- - type: 'null'
- - type: boolean
- - type: number
- - type: string
- - type: array
- - type: object
- description: >-
- (Optional) Default value for the parameter if not provided
- additionalProperties: false
- required:
- - name
- - parameter_type
- - description
- - required
- title: ToolParameter
- description: Parameter definition for a tool.
ListToolDefsResponse:
type: object
properties:
@@ -8002,78 +7953,6 @@ components:
- toolgroup_id
- provider_id
title: RegisterToolGroupRequest
- Tool:
- type: object
- properties:
- identifier:
- type: string
- provider_resource_id:
- type: string
- provider_id:
- type: string
- type:
- type: string
- enum:
- - model
- - shield
- - vector_db
- - dataset
- - scoring_function
- - benchmark
- - tool
- - tool_group
- - prompt
- const: tool
- default: tool
- description: Type of resource, always 'tool'
- toolgroup_id:
- type: string
- description: >-
- ID of the tool group this tool belongs to
- description:
- type: string
- description: >-
- Human-readable description of what the tool does
- parameters:
- type: array
- items:
- $ref: '#/components/schemas/ToolParameter'
- description: List of parameters this tool accepts
- metadata:
- type: object
- additionalProperties:
- oneOf:
- - type: 'null'
- - type: boolean
- - type: number
- - type: string
- - type: array
- - type: object
- description: >-
- (Optional) Additional metadata about the tool
- additionalProperties: false
- required:
- - identifier
- - provider_id
- - type
- - toolgroup_id
- - description
- - parameters
- title: Tool
- description: A tool that can be invoked by agents.
- ListToolsResponse:
- type: object
- properties:
- data:
- type: array
- items:
- $ref: '#/components/schemas/Tool'
- description: List of tools
- additionalProperties: false
- required:
- - data
- title: ListToolsResponse
- description: Response containing a list of tools.
VectorDB:
type: object
properties:
diff --git a/docs/static/stainless-llama-stack-spec.html b/docs/static/stainless-llama-stack-spec.html
index 72ecb5bb5..1ae477e7e 100644
--- a/docs/static/stainless-llama-stack-spec.html
+++ b/docs/static/stainless-llama-stack-spec.html
@@ -2404,11 +2404,11 @@
"get": {
"responses": {
"200": {
- "description": "A ListToolsResponse.",
+ "description": "A ListToolDefsResponse.",
"content": {
"application/json": {
"schema": {
- "$ref": "#/components/schemas/ListToolsResponse"
+ "$ref": "#/components/schemas/ListToolDefsResponse"
}
}
}
@@ -2449,11 +2449,11 @@
"get": {
"responses": {
"200": {
- "description": "A Tool.",
+ "description": "A ToolDef.",
"content": {
"application/json": {
"schema": {
- "$ref": "#/components/schemas/Tool"
+ "$ref": "#/components/schemas/ToolDef"
}
}
}
@@ -10499,79 +10499,6 @@
]
},
"arguments": {
- "oneOf": [
- {
- "type": "string"
- },
- {
- "type": "object",
- "additionalProperties": {
- "oneOf": [
- {
- "type": "string"
- },
- {
- "type": "integer"
- },
- {
- "type": "number"
- },
- {
- "type": "boolean"
- },
- {
- "type": "null"
- },
- {
- "type": "array",
- "items": {
- "oneOf": [
- {
- "type": "string"
- },
- {
- "type": "integer"
- },
- {
- "type": "number"
- },
- {
- "type": "boolean"
- },
- {
- "type": "null"
- }
- ]
- }
- },
- {
- "type": "object",
- "additionalProperties": {
- "oneOf": [
- {
- "type": "string"
- },
- {
- "type": "integer"
- },
- {
- "type": "number"
- },
- {
- "type": "boolean"
- },
- {
- "type": "null"
- }
- ]
- }
- }
- ]
- }
- }
- ]
- },
- "arguments_json": {
"type": "string"
}
},
@@ -12165,6 +12092,10 @@
"ToolDef": {
"type": "object",
"properties": {
+ "toolgroup_id": {
+ "type": "string",
+ "description": "(Optional) ID of the tool group this tool belongs to"
+ },
"name": {
"type": "string",
"description": "Name of the tool"
@@ -12173,12 +12104,57 @@
"type": "string",
"description": "(Optional) Human-readable description of what the tool does"
},
- "parameters": {
- "type": "array",
- "items": {
- "$ref": "#/components/schemas/ToolParameter"
+ "input_schema": {
+ "type": "object",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "string"
+ },
+ {
+ "type": "array"
+ },
+ {
+ "type": "object"
+ }
+ ]
},
- "description": "(Optional) List of parameters this tool accepts"
+ "description": "(Optional) JSON Schema for tool inputs (MCP inputSchema)"
+ },
+ "output_schema": {
+ "type": "object",
+ "additionalProperties": {
+ "oneOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "boolean"
+ },
+ {
+ "type": "number"
+ },
+ {
+ "type": "string"
+ },
+ {
+ "type": "array"
+ },
+ {
+ "type": "object"
+ }
+ ]
+ },
+ "description": "(Optional) JSON Schema for tool outputs (MCP outputSchema)"
},
"metadata": {
"type": "object",
@@ -12214,68 +12190,6 @@
"title": "ToolDef",
"description": "Tool definition used in runtime contexts."
},
- "ToolParameter": {
- "type": "object",
- "properties": {
- "name": {
- "type": "string",
- "description": "Name of the parameter"
- },
- "parameter_type": {
- "type": "string",
- "description": "Type of the parameter (e.g., string, integer)"
- },
- "description": {
- "type": "string",
- "description": "Human-readable description of what the parameter does"
- },
- "required": {
- "type": "boolean",
- "default": true,
- "description": "Whether this parameter is required for tool invocation"
- },
- "items": {
- "type": "object",
- "description": "Type of the elements when parameter_type is array"
- },
- "title": {
- "type": "string",
- "description": "(Optional) Title of the parameter"
- },
- "default": {
- "oneOf": [
- {
- "type": "null"
- },
- {
- "type": "boolean"
- },
- {
- "type": "number"
- },
- {
- "type": "string"
- },
- {
- "type": "array"
- },
- {
- "type": "object"
- }
- ],
- "description": "(Optional) Default value for the parameter if not provided"
- }
- },
- "additionalProperties": false,
- "required": [
- "name",
- "parameter_type",
- "description",
- "required"
- ],
- "title": "ToolParameter",
- "description": "Parameter definition for a tool."
- },
"ListToolDefsResponse": {
"type": "object",
"properties": {
@@ -12770,107 +12684,6 @@
],
"title": "RegisterToolGroupRequest"
},
- "Tool": {
- "type": "object",
- "properties": {
- "identifier": {
- "type": "string"
- },
- "provider_resource_id": {
- "type": "string"
- },
- "provider_id": {
- "type": "string"
- },
- "type": {
- "type": "string",
- "enum": [
- "model",
- "shield",
- "vector_db",
- "dataset",
- "scoring_function",
- "benchmark",
- "tool",
- "tool_group",
- "prompt"
- ],
- "const": "tool",
- "default": "tool",
- "description": "Type of resource, always 'tool'"
- },
- "toolgroup_id": {
- "type": "string",
- "description": "ID of the tool group this tool belongs to"
- },
- "description": {
- "type": "string",
- "description": "Human-readable description of what the tool does"
- },
- "parameters": {
- "type": "array",
- "items": {
- "$ref": "#/components/schemas/ToolParameter"
- },
- "description": "List of parameters this tool accepts"
- },
- "metadata": {
- "type": "object",
- "additionalProperties": {
- "oneOf": [
- {
- "type": "null"
- },
- {
- "type": "boolean"
- },
- {
- "type": "number"
- },
- {
- "type": "string"
- },
- {
- "type": "array"
- },
- {
- "type": "object"
- }
- ]
- },
- "description": "(Optional) Additional metadata about the tool"
- }
- },
- "additionalProperties": false,
- "required": [
- "identifier",
- "provider_id",
- "type",
- "toolgroup_id",
- "description",
- "parameters"
- ],
- "title": "Tool",
- "description": "A tool that can be invoked by agents."
- },
- "ListToolsResponse": {
- "type": "object",
- "properties": {
- "data": {
- "type": "array",
- "items": {
- "$ref": "#/components/schemas/Tool"
- },
- "description": "List of tools"
- }
- },
- "additionalProperties": false,
- "required": [
- "data"
- ],
- "title": "ListToolsResponse",
- "description": "Response containing a list of tools."
- },
"VectorDB": {
"type": "object",
"properties": {
diff --git a/docs/static/stainless-llama-stack-spec.yaml b/docs/static/stainless-llama-stack-spec.yaml
index 151ea1029..cb2584d8a 100644
--- a/docs/static/stainless-llama-stack-spec.yaml
+++ b/docs/static/stainless-llama-stack-spec.yaml
@@ -1756,11 +1756,11 @@ paths:
get:
responses:
'200':
- description: A ListToolsResponse.
+ description: A ListToolDefsResponse.
content:
application/json:
schema:
- $ref: '#/components/schemas/ListToolsResponse'
+ $ref: '#/components/schemas/ListToolDefsResponse'
'400':
$ref: '#/components/responses/BadRequest400'
'429':
@@ -1788,11 +1788,11 @@ paths:
get:
responses:
'200':
- description: A Tool.
+ description: A ToolDef.
content:
application/json:
schema:
- $ref: '#/components/schemas/Tool'
+ $ref: '#/components/schemas/ToolDef'
'400':
$ref: '#/components/responses/BadRequest400'
'429':
@@ -7843,33 +7843,6 @@ components:
title: BuiltinTool
- type: string
arguments:
- oneOf:
- - type: string
- - type: object
- additionalProperties:
- oneOf:
- - type: string
- - type: integer
- - type: number
- - type: boolean
- - type: 'null'
- - type: array
- items:
- oneOf:
- - type: string
- - type: integer
- - type: number
- - type: boolean
- - type: 'null'
- - type: object
- additionalProperties:
- oneOf:
- - type: string
- - type: integer
- - type: number
- - type: boolean
- - type: 'null'
- arguments_json:
type: string
additionalProperties: false
required:
@@ -8997,6 +8970,10 @@ components:
ToolDef:
type: object
properties:
+ toolgroup_id:
+ type: string
+ description: >-
+ (Optional) ID of the tool group this tool belongs to
name:
type: string
description: Name of the tool
@@ -9004,12 +8981,30 @@ components:
type: string
description: >-
(Optional) Human-readable description of what the tool does
- parameters:
- type: array
- items:
- $ref: '#/components/schemas/ToolParameter'
+ input_schema:
+ type: object
+ additionalProperties:
+ oneOf:
+ - type: 'null'
+ - type: boolean
+ - type: number
+ - type: string
+ - type: array
+ - type: object
description: >-
- (Optional) List of parameters this tool accepts
+ (Optional) JSON Schema for tool inputs (MCP inputSchema)
+ output_schema:
+ type: object
+ additionalProperties:
+ oneOf:
+ - type: 'null'
+ - type: boolean
+ - type: number
+ - type: string
+ - type: array
+ - type: object
+ description: >-
+ (Optional) JSON Schema for tool outputs (MCP outputSchema)
metadata:
type: object
additionalProperties:
@@ -9028,50 +9023,6 @@ components:
title: ToolDef
description: >-
Tool definition used in runtime contexts.
- ToolParameter:
- type: object
- properties:
- name:
- type: string
- description: Name of the parameter
- parameter_type:
- type: string
- description: >-
- Type of the parameter (e.g., string, integer)
- description:
- type: string
- description: >-
- Human-readable description of what the parameter does
- required:
- type: boolean
- default: true
- description: >-
- Whether this parameter is required for tool invocation
- items:
- type: object
- description: >-
- Type of the elements when parameter_type is array
- title:
- type: string
- description: (Optional) Title of the parameter
- default:
- oneOf:
- - type: 'null'
- - type: boolean
- - type: number
- - type: string
- - type: array
- - type: object
- description: >-
- (Optional) Default value for the parameter if not provided
- additionalProperties: false
- required:
- - name
- - parameter_type
- - description
- - required
- title: ToolParameter
- description: Parameter definition for a tool.
ListToolDefsResponse:
type: object
properties:
@@ -9447,78 +9398,6 @@ components:
- toolgroup_id
- provider_id
title: RegisterToolGroupRequest
- Tool:
- type: object
- properties:
- identifier:
- type: string
- provider_resource_id:
- type: string
- provider_id:
- type: string
- type:
- type: string
- enum:
- - model
- - shield
- - vector_db
- - dataset
- - scoring_function
- - benchmark
- - tool
- - tool_group
- - prompt
- const: tool
- default: tool
- description: Type of resource, always 'tool'
- toolgroup_id:
- type: string
- description: >-
- ID of the tool group this tool belongs to
- description:
- type: string
- description: >-
- Human-readable description of what the tool does
- parameters:
- type: array
- items:
- $ref: '#/components/schemas/ToolParameter'
- description: List of parameters this tool accepts
- metadata:
- type: object
- additionalProperties:
- oneOf:
- - type: 'null'
- - type: boolean
- - type: number
- - type: string
- - type: array
- - type: object
- description: >-
- (Optional) Additional metadata about the tool
- additionalProperties: false
- required:
- - identifier
- - provider_id
- - type
- - toolgroup_id
- - description
- - parameters
- title: Tool
- description: A tool that can be invoked by agents.
- ListToolsResponse:
- type: object
- properties:
- data:
- type: array
- items:
- $ref: '#/components/schemas/Tool'
- description: List of tools
- additionalProperties: false
- required:
- - data
- title: ListToolsResponse
- description: Response containing a list of tools.
VectorDB:
type: object
properties:
diff --git a/llama_stack/apis/inference/inference.py b/llama_stack/apis/inference/inference.py
index d71aea38e..829a94a6a 100644
--- a/llama_stack/apis/inference/inference.py
+++ b/llama_stack/apis/inference/inference.py
@@ -27,14 +27,12 @@ from llama_stack.models.llama.datatypes import (
StopReason,
ToolCall,
ToolDefinition,
- ToolParamDefinition,
ToolPromptFormat,
)
from llama_stack.providers.utils.telemetry.trace_protocol import trace_protocol
from llama_stack.schema_utils import json_schema_type, register_schema, webmethod
register_schema(ToolCall)
-register_schema(ToolParamDefinition)
register_schema(ToolDefinition)
from enum import StrEnum
diff --git a/llama_stack/apis/tools/tools.py b/llama_stack/apis/tools/tools.py
index 0ebbe8c50..b6a1a2543 100644
--- a/llama_stack/apis/tools/tools.py
+++ b/llama_stack/apis/tools/tools.py
@@ -7,7 +7,7 @@
from enum import Enum
from typing import Any, Literal, Protocol
-from pydantic import BaseModel, Field
+from pydantic import BaseModel
from typing_extensions import runtime_checkable
from llama_stack.apis.common.content_types import URL, InterleavedContent
@@ -19,59 +19,23 @@ from llama_stack.schema_utils import json_schema_type, webmethod
from .rag_tool import RAGToolRuntime
-@json_schema_type
-class ToolParameter(BaseModel):
- """Parameter definition for a tool.
-
- :param name: Name of the parameter
- :param parameter_type: Type of the parameter (e.g., string, integer)
- :param description: Human-readable description of what the parameter does
- :param required: Whether this parameter is required for tool invocation
- :param items: Type of the elements when parameter_type is array
- :param title: (Optional) Title of the parameter
- :param default: (Optional) Default value for the parameter if not provided
- """
-
- name: str
- parameter_type: str
- description: str
- required: bool = Field(default=True)
- items: dict | None = None
- title: str | None = None
- default: Any | None = None
-
-
-@json_schema_type
-class Tool(Resource):
- """A tool that can be invoked by agents.
-
- :param type: Type of resource, always 'tool'
- :param toolgroup_id: ID of the tool group this tool belongs to
- :param description: Human-readable description of what the tool does
- :param parameters: List of parameters this tool accepts
- :param metadata: (Optional) Additional metadata about the tool
- """
-
- type: Literal[ResourceType.tool] = ResourceType.tool
- toolgroup_id: str
- description: str
- parameters: list[ToolParameter]
- metadata: dict[str, Any] | None = None
-
-
@json_schema_type
class ToolDef(BaseModel):
"""Tool definition used in runtime contexts.
:param name: Name of the tool
:param description: (Optional) Human-readable description of what the tool does
- :param parameters: (Optional) List of parameters this tool accepts
+ :param input_schema: (Optional) JSON Schema for tool inputs (MCP inputSchema)
+ :param output_schema: (Optional) JSON Schema for tool outputs (MCP outputSchema)
:param metadata: (Optional) Additional metadata about the tool
+ :param toolgroup_id: (Optional) ID of the tool group this tool belongs to
"""
+ toolgroup_id: str | None = None
name: str
description: str | None = None
- parameters: list[ToolParameter] | None = None
+ input_schema: dict[str, Any] | None = None
+ output_schema: dict[str, Any] | None = None
metadata: dict[str, Any] | None = None
@@ -122,7 +86,7 @@ class ToolInvocationResult(BaseModel):
class ToolStore(Protocol):
- async def get_tool(self, tool_name: str) -> Tool: ...
+ async def get_tool(self, tool_name: str) -> ToolDef: ...
async def get_tool_group(self, toolgroup_id: str) -> ToolGroup: ...
@@ -135,15 +99,6 @@ class ListToolGroupsResponse(BaseModel):
data: list[ToolGroup]
-class ListToolsResponse(BaseModel):
- """Response containing a list of tools.
-
- :param data: List of tools
- """
-
- data: list[Tool]
-
-
class ListToolDefsResponse(BaseModel):
"""Response containing a list of tool definitions.
@@ -194,11 +149,11 @@ class ToolGroups(Protocol):
...
@webmethod(route="/tools", method="GET", level=LLAMA_STACK_API_V1)
- async def list_tools(self, toolgroup_id: str | None = None) -> ListToolsResponse:
+ async def list_tools(self, toolgroup_id: str | None = None) -> ListToolDefsResponse:
"""List tools with optional tool group.
:param toolgroup_id: The ID of the tool group to list tools for.
- :returns: A ListToolsResponse.
+ :returns: A ListToolDefsResponse.
"""
...
@@ -206,11 +161,11 @@ class ToolGroups(Protocol):
async def get_tool(
self,
tool_name: str,
- ) -> Tool:
+ ) -> ToolDef:
"""Get a tool by its name.
:param tool_name: The name of the tool to get.
- :returns: A Tool.
+ :returns: A ToolDef.
"""
...
diff --git a/llama_stack/core/datatypes.py b/llama_stack/core/datatypes.py
index 6a297f012..930cf2646 100644
--- a/llama_stack/core/datatypes.py
+++ b/llama_stack/core/datatypes.py
@@ -22,7 +22,7 @@ from llama_stack.apis.safety import Safety
from llama_stack.apis.scoring import Scoring
from llama_stack.apis.scoring_functions import ScoringFn, ScoringFnInput
from llama_stack.apis.shields import Shield, ShieldInput
-from llama_stack.apis.tools import Tool, ToolGroup, ToolGroupInput, ToolRuntime
+from llama_stack.apis.tools import ToolGroup, ToolGroupInput, ToolRuntime
from llama_stack.apis.vector_dbs import VectorDB, VectorDBInput
from llama_stack.apis.vector_io import VectorIO
from llama_stack.core.access_control.datatypes import AccessRule
@@ -84,15 +84,11 @@ class BenchmarkWithOwner(Benchmark, ResourceWithOwner):
pass
-class ToolWithOwner(Tool, ResourceWithOwner):
- pass
-
-
class ToolGroupWithOwner(ToolGroup, ResourceWithOwner):
pass
-RoutableObject = Model | Shield | VectorDB | Dataset | ScoringFn | Benchmark | Tool | ToolGroup
+RoutableObject = Model | Shield | VectorDB | Dataset | ScoringFn | Benchmark | ToolGroup
RoutableObjectWithProvider = Annotated[
ModelWithOwner
@@ -101,7 +97,6 @@ RoutableObjectWithProvider = Annotated[
| DatasetWithOwner
| ScoringFnWithOwner
| BenchmarkWithOwner
- | ToolWithOwner
| ToolGroupWithOwner,
Field(discriminator="type"),
]
diff --git a/llama_stack/core/routers/tool_runtime.py b/llama_stack/core/routers/tool_runtime.py
index fd606f33b..ad82293e5 100644
--- a/llama_stack/core/routers/tool_runtime.py
+++ b/llama_stack/core/routers/tool_runtime.py
@@ -11,7 +11,7 @@ from llama_stack.apis.common.content_types import (
InterleavedContent,
)
from llama_stack.apis.tools import (
- ListToolsResponse,
+ ListToolDefsResponse,
RAGDocument,
RAGQueryConfig,
RAGQueryResult,
@@ -86,6 +86,6 @@ class ToolRuntimeRouter(ToolRuntime):
async def list_runtime_tools(
self, tool_group_id: str | None = None, mcp_endpoint: URL | None = None
- ) -> ListToolsResponse:
+ ) -> ListToolDefsResponse:
logger.debug(f"ToolRuntimeRouter.list_runtime_tools: {tool_group_id}")
return await self.routing_table.list_tools(tool_group_id)
diff --git a/llama_stack/core/routing_tables/toolgroups.py b/llama_stack/core/routing_tables/toolgroups.py
index 8172b9b5f..2d47bbb17 100644
--- a/llama_stack/core/routing_tables/toolgroups.py
+++ b/llama_stack/core/routing_tables/toolgroups.py
@@ -8,7 +8,7 @@ from typing import Any
from llama_stack.apis.common.content_types import URL
from llama_stack.apis.common.errors import ToolGroupNotFoundError
-from llama_stack.apis.tools import ListToolGroupsResponse, ListToolsResponse, Tool, ToolGroup, ToolGroups
+from llama_stack.apis.tools import ListToolDefsResponse, ListToolGroupsResponse, ToolDef, ToolGroup, ToolGroups
from llama_stack.core.datatypes import AuthenticationRequiredError, ToolGroupWithOwner
from llama_stack.log import get_logger
@@ -27,7 +27,7 @@ def parse_toolgroup_from_toolgroup_name_pair(toolgroup_name_with_maybe_tool_name
class ToolGroupsRoutingTable(CommonRoutingTableImpl, ToolGroups):
- toolgroups_to_tools: dict[str, list[Tool]] = {}
+ toolgroups_to_tools: dict[str, list[ToolDef]] = {}
tool_to_toolgroup: dict[str, str] = {}
# overridden
@@ -43,7 +43,7 @@ class ToolGroupsRoutingTable(CommonRoutingTableImpl, ToolGroups):
routing_key = self.tool_to_toolgroup[routing_key]
return await super().get_provider_impl(routing_key, provider_id)
- async def list_tools(self, toolgroup_id: str | None = None) -> ListToolsResponse:
+ async def list_tools(self, toolgroup_id: str | None = None) -> ListToolDefsResponse:
if toolgroup_id:
if group_id := parse_toolgroup_from_toolgroup_name_pair(toolgroup_id):
toolgroup_id = group_id
@@ -68,30 +68,19 @@ class ToolGroupsRoutingTable(CommonRoutingTableImpl, ToolGroups):
continue
all_tools.extend(self.toolgroups_to_tools[toolgroup.identifier])
- return ListToolsResponse(data=all_tools)
+ return ListToolDefsResponse(data=all_tools)
async def _index_tools(self, toolgroup: ToolGroup):
provider_impl = await super().get_provider_impl(toolgroup.identifier, toolgroup.provider_id)
tooldefs_response = await provider_impl.list_runtime_tools(toolgroup.identifier, toolgroup.mcp_endpoint)
- # TODO: kill this Tool vs ToolDef distinction
tooldefs = tooldefs_response.data
- tools = []
for t in tooldefs:
- tools.append(
- Tool(
- identifier=t.name,
- toolgroup_id=toolgroup.identifier,
- description=t.description or "",
- parameters=t.parameters or [],
- metadata=t.metadata,
- provider_id=toolgroup.provider_id,
- )
- )
+ t.toolgroup_id = toolgroup.identifier
- self.toolgroups_to_tools[toolgroup.identifier] = tools
- for tool in tools:
- self.tool_to_toolgroup[tool.identifier] = toolgroup.identifier
+ self.toolgroups_to_tools[toolgroup.identifier] = tooldefs
+ for tool in tooldefs:
+ self.tool_to_toolgroup[tool.name] = toolgroup.identifier
async def list_tool_groups(self) -> ListToolGroupsResponse:
return ListToolGroupsResponse(data=await self.get_all_with_type("tool_group"))
@@ -102,12 +91,12 @@ class ToolGroupsRoutingTable(CommonRoutingTableImpl, ToolGroups):
raise ToolGroupNotFoundError(toolgroup_id)
return tool_group
- async def get_tool(self, tool_name: str) -> Tool:
+ async def get_tool(self, tool_name: str) -> ToolDef:
if tool_name in self.tool_to_toolgroup:
toolgroup_id = self.tool_to_toolgroup[tool_name]
tools = self.toolgroups_to_tools[toolgroup_id]
for tool in tools:
- if tool.identifier == tool_name:
+ if tool.name == tool_name:
return tool
raise ValueError(f"Tool '{tool_name}' not found")
@@ -132,7 +121,6 @@ class ToolGroupsRoutingTable(CommonRoutingTableImpl, ToolGroups):
# baked in some of the code and tests right now.
if not toolgroup.mcp_endpoint:
await self._index_tools(toolgroup)
- return toolgroup
async def unregister_toolgroup(self, toolgroup_id: str) -> None:
await self.unregister_object(await self.get_tool_group(toolgroup_id))
diff --git a/llama_stack/core/server/server.py b/llama_stack/core/server/server.py
index 7d119c139..873335775 100644
--- a/llama_stack/core/server/server.py
+++ b/llama_stack/core/server/server.py
@@ -257,7 +257,7 @@ def create_dynamic_typed_route(func: Any, method: str, route: str) -> Callable:
return result
except Exception as e:
- if logger.isEnabledFor(logging.DEBUG):
+ if logger.isEnabledFor(logging.INFO):
logger.exception(f"Error executing endpoint {route=} {method=}")
else:
logger.error(f"Error executing endpoint {route=} {method=}: {str(e)}")
diff --git a/llama_stack/core/store/registry.py b/llama_stack/core/store/registry.py
index 5f4abe9aa..624dbd176 100644
--- a/llama_stack/core/store/registry.py
+++ b/llama_stack/core/store/registry.py
@@ -36,7 +36,7 @@ class DistributionRegistry(Protocol):
REGISTER_PREFIX = "distributions:registry"
-KEY_VERSION = "v9"
+KEY_VERSION = "v10"
KEY_FORMAT = f"{REGISTER_PREFIX}:{KEY_VERSION}::" + "{type}:{identifier}"
diff --git a/llama_stack/core/ui/page/playground/tools.py b/llama_stack/core/ui/page/playground/tools.py
index 602c9eea1..4ee9d2204 100644
--- a/llama_stack/core/ui/page/playground/tools.py
+++ b/llama_stack/core/ui/page/playground/tools.py
@@ -81,7 +81,7 @@ def tool_chat_page():
for toolgroup_id in toolgroup_selection:
tools = client.tools.list(toolgroup_id=toolgroup_id)
- grouped_tools[toolgroup_id] = [tool.identifier for tool in tools]
+ grouped_tools[toolgroup_id] = [tool.name for tool in tools]
total_tools += len(tools)
st.markdown(f"Active Tools: 🛠{total_tools}")
diff --git a/llama_stack/models/llama/datatypes.py b/llama_stack/models/llama/datatypes.py
index 0baa6e55b..7cb7aa7bd 100644
--- a/llama_stack/models/llama/datatypes.py
+++ b/llama_stack/models/llama/datatypes.py
@@ -37,14 +37,7 @@ RecursiveType = Primitive | list[Primitive] | dict[str, Primitive]
class ToolCall(BaseModel):
call_id: str
tool_name: BuiltinTool | str
- # Plan is to deprecate the Dict in favor of a JSON string
- # that is parsed on the client side instead of trying to manage
- # the recursive type here.
- # Making this a union so that client side can start prepping for this change.
- # Eventually, we will remove both the Dict and arguments_json field,
- # and arguments will just be a str
- arguments: str | dict[str, RecursiveType]
- arguments_json: str | None = None
+ arguments: str
@field_validator("tool_name", mode="before")
@classmethod
@@ -88,19 +81,11 @@ class StopReason(Enum):
out_of_tokens = "out_of_tokens"
-class ToolParamDefinition(BaseModel):
- param_type: str
- description: str | None = None
- required: bool | None = True
- items: Any | None = None
- title: str | None = None
- default: Any | None = None
-
-
class ToolDefinition(BaseModel):
tool_name: BuiltinTool | str
description: str | None = None
- parameters: dict[str, ToolParamDefinition] | None = None
+ input_schema: dict[str, Any] | None = None
+ output_schema: dict[str, Any] | None = None
@field_validator("tool_name", mode="before")
@classmethod
diff --git a/llama_stack/models/llama/llama3/chat_format.py b/llama_stack/models/llama/llama3/chat_format.py
index 1f88a1699..d65865cb5 100644
--- a/llama_stack/models/llama/llama3/chat_format.py
+++ b/llama_stack/models/llama/llama3/chat_format.py
@@ -232,8 +232,7 @@ class ChatFormat:
ToolCall(
call_id=call_id,
tool_name=tool_name,
- arguments=tool_arguments,
- arguments_json=json.dumps(tool_arguments),
+ arguments=json.dumps(tool_arguments),
)
)
content = ""
diff --git a/llama_stack/models/llama/llama3/prompt_templates/system_prompts.py b/llama_stack/models/llama/llama3/prompt_templates/system_prompts.py
index ab626e5af..11a5993e9 100644
--- a/llama_stack/models/llama/llama3/prompt_templates/system_prompts.py
+++ b/llama_stack/models/llama/llama3/prompt_templates/system_prompts.py
@@ -18,7 +18,6 @@ from typing import Any
from llama_stack.apis.inference import (
BuiltinTool,
ToolDefinition,
- ToolParamDefinition,
)
from .base import PromptTemplate, PromptTemplateGeneratorBase
@@ -101,11 +100,8 @@ class JsonCustomToolGenerator(PromptTemplateGeneratorBase):
{# manually setting up JSON because jinja sorts keys in unexpected ways -#}
{%- set tname = t.tool_name -%}
{%- set tdesc = t.description -%}
- {%- set tparams = t.parameters -%}
- {%- set required_params = [] -%}
- {%- for name, param in tparams.items() if param.required == true -%}
- {%- set _ = required_params.append(name) -%}
- {%- endfor -%}
+ {%- set tprops = t.input_schema.get('properties', {}) -%}
+ {%- set required_params = t.input_schema.get('required', []) -%}
{
"type": "function",
"function": {
@@ -114,11 +110,11 @@ class JsonCustomToolGenerator(PromptTemplateGeneratorBase):
"parameters": {
"type": "object",
"properties": [
- {%- for name, param in tparams.items() %}
+ {%- for name, param in tprops.items() %}
{
"{{name}}": {
"type": "object",
- "description": "{{param.description}}"
+ "description": "{{param.get('description', '')}}"
}
}{% if not loop.last %},{% endif %}
{%- endfor %}
@@ -143,17 +139,19 @@ class JsonCustomToolGenerator(PromptTemplateGeneratorBase):
ToolDefinition(
tool_name="trending_songs",
description="Returns the trending songs on a Music site",
- parameters={
- "n": ToolParamDefinition(
- param_type="int",
- description="The number of songs to return",
- required=True,
- ),
- "genre": ToolParamDefinition(
- param_type="str",
- description="The genre of the songs to return",
- required=False,
- ),
+ input_schema={
+ "type": "object",
+ "properties": {
+ "n": {
+ "type": "int",
+ "description": "The number of songs to return",
+ },
+ "genre": {
+ "type": "str",
+ "description": "The genre of the songs to return",
+ },
+ },
+ "required": ["n"],
},
),
]
@@ -170,11 +168,14 @@ class FunctionTagCustomToolGenerator(PromptTemplateGeneratorBase):
{#- manually setting up JSON because jinja sorts keys in unexpected ways -#}
{%- set tname = t.tool_name -%}
{%- set tdesc = t.description -%}
- {%- set modified_params = t.parameters.copy() -%}
- {%- for key, value in modified_params.items() -%}
- {%- if 'default' in value -%}
- {%- set _ = value.pop('default', None) -%}
+ {%- set tprops = t.input_schema.get('properties', {}) -%}
+ {%- set modified_params = {} -%}
+ {%- for key, value in tprops.items() -%}
+ {%- set param_copy = value.copy() -%}
+ {%- if 'default' in param_copy -%}
+ {%- set _ = param_copy.pop('default', None) -%}
{%- endif -%}
+ {%- set _ = modified_params.update({key: param_copy}) -%}
{%- endfor -%}
{%- set tparams = modified_params | tojson -%}
Use the function '{{ tname }}' to '{{ tdesc }}':
@@ -205,17 +206,19 @@ class FunctionTagCustomToolGenerator(PromptTemplateGeneratorBase):
ToolDefinition(
tool_name="trending_songs",
description="Returns the trending songs on a Music site",
- parameters={
- "n": ToolParamDefinition(
- param_type="int",
- description="The number of songs to return",
- required=True,
- ),
- "genre": ToolParamDefinition(
- param_type="str",
- description="The genre of the songs to return",
- required=False,
- ),
+ input_schema={
+ "type": "object",
+ "properties": {
+ "n": {
+ "type": "int",
+ "description": "The number of songs to return",
+ },
+ "genre": {
+ "type": "str",
+ "description": "The genre of the songs to return",
+ },
+ },
+ "required": ["n"],
},
),
]
@@ -255,11 +258,8 @@ class PythonListCustomToolGenerator(PromptTemplateGeneratorBase): # noqa: N801
{# manually setting up JSON because jinja sorts keys in unexpected ways -#}
{%- set tname = t.tool_name -%}
{%- set tdesc = t.description -%}
- {%- set tparams = t.parameters -%}
- {%- set required_params = [] -%}
- {%- for name, param in tparams.items() if param.required == true -%}
- {%- set _ = required_params.append(name) -%}
- {%- endfor -%}
+ {%- set tprops = (t.input_schema or {}).get('properties', {}) -%}
+ {%- set required_params = (t.input_schema or {}).get('required', []) -%}
{
"name": "{{tname}}",
"description": "{{tdesc}}",
@@ -267,11 +267,11 @@ class PythonListCustomToolGenerator(PromptTemplateGeneratorBase): # noqa: N801
"type": "dict",
"required": {{ required_params | tojson }},
"properties": {
- {%- for name, param in tparams.items() %}
+ {%- for name, param in tprops.items() %}
"{{name}}": {
- "type": "{{param.param_type}}",
- "description": "{{param.description}}"{% if param.default %},
- "default": "{{param.default}}"{% endif %}
+ "type": "{{param.get('type', 'string')}}",
+ "description": "{{param.get('description', '')}}"{% if param.get('default') %},
+ "default": "{{param.get('default')}}"{% endif %}
}{% if not loop.last %},{% endif %}
{%- endfor %}
}
@@ -299,18 +299,20 @@ class PythonListCustomToolGenerator(PromptTemplateGeneratorBase): # noqa: N801
ToolDefinition(
tool_name="get_weather",
description="Get weather info for places",
- parameters={
- "city": ToolParamDefinition(
- param_type="string",
- description="The name of the city to get the weather for",
- required=True,
- ),
- "metric": ToolParamDefinition(
- param_type="string",
- description="The metric for weather. Options are: celsius, fahrenheit",
- required=False,
- default="celsius",
- ),
+ input_schema={
+ "type": "object",
+ "properties": {
+ "city": {
+ "type": "string",
+ "description": "The name of the city to get the weather for",
+ },
+ "metric": {
+ "type": "string",
+ "description": "The metric for weather. Options are: celsius, fahrenheit",
+ "default": "celsius",
+ },
+ },
+ "required": ["city"],
},
),
]
diff --git a/llama_stack/models/llama/llama3/tool_utils.py b/llama_stack/models/llama/llama3/tool_utils.py
index d0e3e7671..8c12fe680 100644
--- a/llama_stack/models/llama/llama3/tool_utils.py
+++ b/llama_stack/models/llama/llama3/tool_utils.py
@@ -220,17 +220,18 @@ class ToolUtils:
@staticmethod
def encode_tool_call(t: ToolCall, tool_prompt_format: ToolPromptFormat) -> str:
+ args = json.loads(t.arguments)
if t.tool_name == BuiltinTool.brave_search:
- q = t.arguments["query"]
+ q = args["query"]
return f'brave_search.call(query="{q}")'
elif t.tool_name == BuiltinTool.wolfram_alpha:
- q = t.arguments["query"]
+ q = args["query"]
return f'wolfram_alpha.call(query="{q}")'
elif t.tool_name == BuiltinTool.photogen:
- q = t.arguments["query"]
+ q = args["query"]
return f'photogen.call(query="{q}")'
elif t.tool_name == BuiltinTool.code_interpreter:
- return t.arguments["code"]
+ return args["code"]
else:
fname = t.tool_name
@@ -239,12 +240,11 @@ class ToolUtils:
{
"type": "function",
"name": fname,
- "parameters": t.arguments,
+ "parameters": args,
}
)
elif tool_prompt_format == ToolPromptFormat.function_tag:
- args = json.dumps(t.arguments)
- return f"{args}"
+ return f"{t.arguments}"
elif tool_prompt_format == ToolPromptFormat.python_list:
@@ -260,7 +260,7 @@ class ToolUtils:
else:
raise ValueError(f"Unsupported type: {type(value)}")
- args_str = ", ".join(f"{k}={format_value(v)}" for k, v in t.arguments.items())
+ args_str = ", ".join(f"{k}={format_value(v)}" for k, v in args.items())
return f"[{fname}({args_str})]"
else:
raise ValueError(f"Unsupported tool prompt format: {tool_prompt_format}")
diff --git a/llama_stack/models/llama/llama3_1/prompts.py b/llama_stack/models/llama/llama3_1/prompts.py
index 579a5ee02..433c62d86 100644
--- a/llama_stack/models/llama/llama3_1/prompts.py
+++ b/llama_stack/models/llama/llama3_1/prompts.py
@@ -11,6 +11,7 @@
# top-level folder for each specific model found within the models/ directory at
# the top-level of this source tree.
+import json
import textwrap
from llama_stack.models.llama.datatypes import (
@@ -184,7 +185,7 @@ def usecases() -> list[UseCase | str]:
ToolCall(
call_id="tool_call_id",
tool_name=BuiltinTool.wolfram_alpha,
- arguments={"query": "100th decimal of pi"},
+ arguments=json.dumps({"query": "100th decimal of pi"}),
)
],
),
diff --git a/llama_stack/models/llama/llama3_3/prompts.py b/llama_stack/models/llama/llama3_3/prompts.py
index 85796608a..0470e3218 100644
--- a/llama_stack/models/llama/llama3_3/prompts.py
+++ b/llama_stack/models/llama/llama3_3/prompts.py
@@ -11,6 +11,7 @@
# top-level folder for each specific model found within the models/ directory at
# the top-level of this source tree.
+import json
import textwrap
from llama_stack.models.llama.datatypes import (
@@ -185,7 +186,7 @@ def usecases() -> list[UseCase | str]:
ToolCall(
call_id="tool_call_id",
tool_name=BuiltinTool.wolfram_alpha,
- arguments={"query": "100th decimal of pi"},
+ arguments=json.dumps({"query": "100th decimal of pi"}),
)
],
),
diff --git a/llama_stack/models/llama/llama4/chat_format.py b/llama_stack/models/llama/llama4/chat_format.py
index 96ebd0881..3864f6438 100644
--- a/llama_stack/models/llama/llama4/chat_format.py
+++ b/llama_stack/models/llama/llama4/chat_format.py
@@ -298,8 +298,7 @@ class ChatFormat:
ToolCall(
call_id=call_id,
tool_name=tool_name,
- arguments=tool_arguments,
- arguments_json=json.dumps(tool_arguments),
+ arguments=json.dumps(tool_arguments),
)
)
content = ""
diff --git a/llama_stack/models/llama/llama4/prompt_templates/system_prompts.py b/llama_stack/models/llama/llama4/prompt_templates/system_prompts.py
index 9c19f89ae..1ee570933 100644
--- a/llama_stack/models/llama/llama4/prompt_templates/system_prompts.py
+++ b/llama_stack/models/llama/llama4/prompt_templates/system_prompts.py
@@ -13,7 +13,7 @@
import textwrap
-from llama_stack.apis.inference import ToolDefinition, ToolParamDefinition
+from llama_stack.apis.inference import ToolDefinition
from llama_stack.models.llama.llama3.prompt_templates.base import (
PromptTemplate,
PromptTemplateGeneratorBase,
@@ -81,11 +81,8 @@ class PythonListCustomToolGenerator(PromptTemplateGeneratorBase): # noqa: N801
{# manually setting up JSON because jinja sorts keys in unexpected ways -#}
{%- set tname = t.tool_name -%}
{%- set tdesc = t.description -%}
- {%- set tparams = t.parameters -%}
- {%- set required_params = [] -%}
- {%- for name, param in tparams.items() if param.required == true -%}
- {%- set _ = required_params.append(name) -%}
- {%- endfor -%}
+ {%- set tprops = t.input_schema.get('properties', {}) -%}
+ {%- set required_params = t.input_schema.get('required', []) -%}
{
"name": "{{tname}}",
"description": "{{tdesc}}",
@@ -93,11 +90,11 @@ class PythonListCustomToolGenerator(PromptTemplateGeneratorBase): # noqa: N801
"type": "dict",
"required": {{ required_params | tojson }},
"properties": {
- {%- for name, param in tparams.items() %}
+ {%- for name, param in tprops.items() %}
"{{name}}": {
- "type": "{{param.param_type}}",
- "description": "{{param.description}}"{% if param.default %},
- "default": "{{param.default}}"{% endif %}
+ "type": "{{param.get('type', 'string')}}",
+ "description": "{{param.get('description', '')}}"{% if param.get('default') %},
+ "default": "{{param.get('default')}}"{% endif %}
}{% if not loop.last %},{% endif %}
{%- endfor %}
}
@@ -119,18 +116,20 @@ class PythonListCustomToolGenerator(PromptTemplateGeneratorBase): # noqa: N801
ToolDefinition(
tool_name="get_weather",
description="Get weather info for places",
- parameters={
- "city": ToolParamDefinition(
- param_type="string",
- description="The name of the city to get the weather for",
- required=True,
- ),
- "metric": ToolParamDefinition(
- param_type="string",
- description="The metric for weather. Options are: celsius, fahrenheit",
- required=False,
- default="celsius",
- ),
+ input_schema={
+ "type": "object",
+ "properties": {
+ "city": {
+ "type": "string",
+ "description": "The name of the city to get the weather for",
+ },
+ "metric": {
+ "type": "string",
+ "description": "The metric for weather. Options are: celsius, fahrenheit",
+ "default": "celsius",
+ },
+ },
+ "required": ["city"],
},
),
]
diff --git a/llama_stack/providers/inline/agents/meta_reference/agent_instance.py b/llama_stack/providers/inline/agents/meta_reference/agent_instance.py
index 32c59ba2c..207f0daec 100644
--- a/llama_stack/providers/inline/agents/meta_reference/agent_instance.py
+++ b/llama_stack/providers/inline/agents/meta_reference/agent_instance.py
@@ -60,7 +60,6 @@ from llama_stack.apis.inference import (
StopReason,
SystemMessage,
ToolDefinition,
- ToolParamDefinition,
ToolResponse,
ToolResponseMessage,
UserMessage,
@@ -866,20 +865,12 @@ class ChatAgent(ShieldRunnerMixin):
for tool_def in self.agent_config.client_tools:
if tool_name_to_def.get(tool_def.name, None):
raise ValueError(f"Tool {tool_def.name} already exists")
+
+ # Use input_schema from ToolDef directly
tool_name_to_def[tool_def.name] = ToolDefinition(
tool_name=tool_def.name,
description=tool_def.description,
- parameters={
- param.name: ToolParamDefinition(
- param_type=param.parameter_type,
- description=param.description,
- required=param.required,
- items=param.items,
- title=param.title,
- default=param.default,
- )
- for param in tool_def.parameters
- },
+ input_schema=tool_def.input_schema,
)
for toolgroup_name_with_maybe_tool_name in agent_config_toolgroups:
toolgroup_name, input_tool_name = self._parse_toolgroup_name(toolgroup_name_with_maybe_tool_name)
@@ -889,44 +880,34 @@ class ChatAgent(ShieldRunnerMixin):
[t.identifier for t in (await self.tool_groups_api.list_tool_groups()).data]
)
raise ValueError(f"Toolgroup {toolgroup_name} not found, available toolgroups: {available_tool_groups}")
- if input_tool_name is not None and not any(tool.identifier == input_tool_name for tool in tools.data):
+ if input_tool_name is not None and not any(tool.name == input_tool_name for tool in tools.data):
raise ValueError(
- f"Tool {input_tool_name} not found in toolgroup {toolgroup_name}. Available tools: {', '.join([tool.identifier for tool in tools.data])}"
+ f"Tool {input_tool_name} not found in toolgroup {toolgroup_name}. Available tools: {', '.join([tool.name for tool in tools.data])}"
)
for tool_def in tools.data:
if toolgroup_name.startswith("builtin") and toolgroup_name != RAG_TOOL_GROUP:
- identifier: str | BuiltinTool | None = tool_def.identifier
+ identifier: str | BuiltinTool | None = tool_def.name
if identifier == "web_search":
identifier = BuiltinTool.brave_search
else:
identifier = BuiltinTool(identifier)
else:
# add if tool_name is unspecified or the tool_def identifier is the same as the tool_name
- if input_tool_name in (None, tool_def.identifier):
- identifier = tool_def.identifier
+ if input_tool_name in (None, tool_def.name):
+ identifier = tool_def.name
else:
identifier = None
if tool_name_to_def.get(identifier, None):
raise ValueError(f"Tool {identifier} already exists")
if identifier:
- tool_name_to_def[tool_def.identifier] = ToolDefinition(
+ tool_name_to_def[identifier] = ToolDefinition(
tool_name=identifier,
description=tool_def.description,
- parameters={
- param.name: ToolParamDefinition(
- param_type=param.parameter_type,
- description=param.description,
- required=param.required,
- items=param.items,
- title=param.title,
- default=param.default,
- )
- for param in tool_def.parameters
- },
+ input_schema=tool_def.input_schema,
)
- tool_name_to_args[tool_def.identifier] = toolgroup_to_args.get(toolgroup_name, {})
+ tool_name_to_args[identifier] = toolgroup_to_args.get(toolgroup_name, {})
self.tool_defs, self.tool_name_to_args = (
list(tool_name_to_def.values()),
@@ -970,12 +951,18 @@ class ChatAgent(ShieldRunnerMixin):
tool_name_str = tool_name
logger.info(f"executing tool call: {tool_name_str} with args: {tool_call.arguments}")
+
+ try:
+ args = json.loads(tool_call.arguments)
+ except json.JSONDecodeError as e:
+ raise ValueError(f"Failed to parse arguments for tool call: {tool_call.arguments}") from e
+
result = await self.tool_runtime_api.invoke_tool(
tool_name=tool_name_str,
kwargs={
"session_id": session_id,
# get the arguments generated by the model and augment with toolgroup arg overrides for the agent
- **tool_call.arguments,
+ **args,
**self.tool_name_to_args.get(tool_name_str, {}),
},
)
diff --git a/llama_stack/providers/inline/agents/meta_reference/responses/streaming.py b/llama_stack/providers/inline/agents/meta_reference/responses/streaming.py
index 7eaf08e13..732ad708e 100644
--- a/llama_stack/providers/inline/agents/meta_reference/responses/streaming.py
+++ b/llama_stack/providers/inline/agents/meta_reference/responses/streaming.py
@@ -62,22 +62,13 @@ def convert_tooldef_to_chat_tool(tool_def):
ChatCompletionToolParam suitable for OpenAI chat completion
"""
- from llama_stack.models.llama.datatypes import ToolDefinition, ToolParamDefinition
+ from llama_stack.models.llama.datatypes import ToolDefinition
from llama_stack.providers.utils.inference.openai_compat import convert_tooldef_to_openai_tool
internal_tool_def = ToolDefinition(
tool_name=tool_def.name,
description=tool_def.description,
- parameters={
- param.name: ToolParamDefinition(
- param_type=param.parameter_type,
- description=param.description,
- required=param.required,
- default=param.default,
- items=param.items,
- )
- for param in tool_def.parameters
- },
+ input_schema=tool_def.input_schema,
)
return convert_tooldef_to_openai_tool(internal_tool_def)
@@ -528,23 +519,15 @@ class StreamingResponseOrchestrator:
"""Process all tools and emit appropriate streaming events."""
from openai.types.chat import ChatCompletionToolParam
- from llama_stack.apis.tools import Tool
- from llama_stack.models.llama.datatypes import ToolDefinition, ToolParamDefinition
+ from llama_stack.apis.tools import ToolDef
+ from llama_stack.models.llama.datatypes import ToolDefinition
from llama_stack.providers.utils.inference.openai_compat import convert_tooldef_to_openai_tool
- def make_openai_tool(tool_name: str, tool: Tool) -> ChatCompletionToolParam:
+ def make_openai_tool(tool_name: str, tool: ToolDef) -> ChatCompletionToolParam:
tool_def = ToolDefinition(
tool_name=tool_name,
description=tool.description,
- parameters={
- param.name: ToolParamDefinition(
- param_type=param.parameter_type,
- description=param.description,
- required=param.required,
- default=param.default,
- )
- for param in tool.parameters
- },
+ input_schema=tool.input_schema,
)
return convert_tooldef_to_openai_tool(tool_def)
@@ -631,16 +614,11 @@ class StreamingResponseOrchestrator:
MCPListToolsTool(
name=t.name,
description=t.description,
- input_schema={
+ input_schema=t.input_schema
+ or {
"type": "object",
- "properties": {
- p.name: {
- "type": p.parameter_type,
- "description": p.description,
- }
- for p in t.parameters
- },
- "required": [p.name for p in t.parameters if p.required],
+ "properties": {},
+ "required": [],
},
)
)
diff --git a/llama_stack/providers/inline/ios/inference/LocalInferenceImpl/SystemPrompts.swift b/llama_stack/providers/inline/ios/inference/LocalInferenceImpl/SystemPrompts.swift
index 88c0218b0..8bae3582b 100644
--- a/llama_stack/providers/inline/ios/inference/LocalInferenceImpl/SystemPrompts.swift
+++ b/llama_stack/providers/inline/ios/inference/LocalInferenceImpl/SystemPrompts.swift
@@ -68,9 +68,7 @@ public class FunctionTagCustomToolGenerator {
{
"name": "{{t.tool_name}}",
"description": "{{t.description}}",
- "parameters": {
- "type": "dict",
- "properties": { {{t.parameters}} }
+ "input_schema": { {{t.input_schema}} }
}
{{/let}}
diff --git a/llama_stack/providers/inline/tool_runtime/rag/memory.py b/llama_stack/providers/inline/tool_runtime/rag/memory.py
index bc68f198d..c8499a9b8 100644
--- a/llama_stack/providers/inline/tool_runtime/rag/memory.py
+++ b/llama_stack/providers/inline/tool_runtime/rag/memory.py
@@ -33,7 +33,6 @@ from llama_stack.apis.tools import (
ToolDef,
ToolGroup,
ToolInvocationResult,
- ToolParameter,
ToolRuntime,
)
from llama_stack.apis.vector_io import (
@@ -301,13 +300,16 @@ class MemoryToolRuntimeImpl(ToolGroupsProtocolPrivate, ToolRuntime, RAGToolRunti
ToolDef(
name="knowledge_search",
description="Search for information in a database.",
- parameters=[
- ToolParameter(
- name="query",
- description="The query to search for. Can be a natural language sentence or keywords.",
- parameter_type="string",
- ),
- ],
+ input_schema={
+ "type": "object",
+ "properties": {
+ "query": {
+ "type": "string",
+ "description": "The query to search for. Can be a natural language sentence or keywords.",
+ }
+ },
+ "required": ["query"],
+ },
),
]
)
diff --git a/llama_stack/providers/remote/inference/vllm/vllm.py b/llama_stack/providers/remote/inference/vllm/vllm.py
index 44b3dc3db..2b58b4262 100644
--- a/llama_stack/providers/remote/inference/vllm/vllm.py
+++ b/llama_stack/providers/remote/inference/vllm/vllm.py
@@ -89,8 +89,7 @@ def _convert_to_vllm_tool_calls_in_response(
ToolCall(
call_id=call.id,
tool_name=call.function.name,
- arguments=json.loads(call.function.arguments),
- arguments_json=call.function.arguments,
+ arguments=call.function.arguments,
)
for call in tool_calls
]
@@ -100,18 +99,6 @@ def _convert_to_vllm_tools_in_request(tools: list[ToolDefinition]) -> list[dict]
compat_tools = []
for tool in tools:
- properties = {}
- compat_required = []
- if tool.parameters:
- for tool_key, tool_param in tool.parameters.items():
- properties[tool_key] = {"type": tool_param.param_type}
- if tool_param.description:
- properties[tool_key]["description"] = tool_param.description
- if tool_param.default:
- properties[tool_key]["default"] = tool_param.default
- if tool_param.required:
- compat_required.append(tool_key)
-
# The tool.tool_name can be a str or a BuiltinTool enum. If
# it's the latter, convert to a string.
tool_name = tool.tool_name
@@ -123,10 +110,11 @@ def _convert_to_vllm_tools_in_request(tools: list[ToolDefinition]) -> list[dict]
"function": {
"name": tool_name,
"description": tool.description,
- "parameters": {
+ "parameters": tool.input_schema
+ or {
"type": "object",
- "properties": properties,
- "required": compat_required,
+ "properties": {},
+ "required": [],
},
},
}
@@ -161,7 +149,6 @@ def _process_vllm_chat_completion_end_of_stream(
for _index, tool_call_buf in sorted(tool_call_bufs.items()):
args_str = tool_call_buf.arguments or "{}"
try:
- args = json.loads(args_str)
chunks.append(
ChatCompletionResponseStreamChunk(
event=ChatCompletionResponseEvent(
@@ -170,8 +157,7 @@ def _process_vllm_chat_completion_end_of_stream(
tool_call=ToolCall(
call_id=tool_call_buf.call_id,
tool_name=tool_call_buf.tool_name,
- arguments=args,
- arguments_json=args_str,
+ arguments=args_str,
),
parse_status=ToolCallParseStatus.succeeded,
),
diff --git a/llama_stack/providers/remote/tool_runtime/bing_search/bing_search.py b/llama_stack/providers/remote/tool_runtime/bing_search/bing_search.py
index e40903969..9a98964b7 100644
--- a/llama_stack/providers/remote/tool_runtime/bing_search/bing_search.py
+++ b/llama_stack/providers/remote/tool_runtime/bing_search/bing_search.py
@@ -15,7 +15,6 @@ from llama_stack.apis.tools import (
ToolDef,
ToolGroup,
ToolInvocationResult,
- ToolParameter,
ToolRuntime,
)
from llama_stack.core.request_headers import NeedsRequestProviderData
@@ -57,13 +56,16 @@ class BingSearchToolRuntimeImpl(ToolGroupsProtocolPrivate, ToolRuntime, NeedsReq
ToolDef(
name="web_search",
description="Search the web using Bing Search API",
- parameters=[
- ToolParameter(
- name="query",
- description="The query to search for",
- parameter_type="string",
- )
- ],
+ input_schema={
+ "type": "object",
+ "properties": {
+ "query": {
+ "type": "string",
+ "description": "The query to search for",
+ }
+ },
+ "required": ["query"],
+ },
)
]
)
diff --git a/llama_stack/providers/remote/tool_runtime/brave_search/brave_search.py b/llama_stack/providers/remote/tool_runtime/brave_search/brave_search.py
index ba3b910d5..02e5b5c69 100644
--- a/llama_stack/providers/remote/tool_runtime/brave_search/brave_search.py
+++ b/llama_stack/providers/remote/tool_runtime/brave_search/brave_search.py
@@ -14,7 +14,6 @@ from llama_stack.apis.tools import (
ToolDef,
ToolGroup,
ToolInvocationResult,
- ToolParameter,
ToolRuntime,
)
from llama_stack.core.request_headers import NeedsRequestProviderData
@@ -56,13 +55,16 @@ class BraveSearchToolRuntimeImpl(ToolGroupsProtocolPrivate, ToolRuntime, NeedsRe
ToolDef(
name="web_search",
description="Search the web for information",
- parameters=[
- ToolParameter(
- name="query",
- description="The query to search for",
- parameter_type="string",
- )
- ],
+ input_schema={
+ "type": "object",
+ "properties": {
+ "query": {
+ "type": "string",
+ "description": "The query to search for",
+ }
+ },
+ "required": ["query"],
+ },
built_in_type=BuiltinTool.brave_search,
)
]
diff --git a/llama_stack/providers/remote/tool_runtime/tavily_search/tavily_search.py b/llama_stack/providers/remote/tool_runtime/tavily_search/tavily_search.py
index 976ec9c57..ca629fced 100644
--- a/llama_stack/providers/remote/tool_runtime/tavily_search/tavily_search.py
+++ b/llama_stack/providers/remote/tool_runtime/tavily_search/tavily_search.py
@@ -15,7 +15,6 @@ from llama_stack.apis.tools import (
ToolDef,
ToolGroup,
ToolInvocationResult,
- ToolParameter,
ToolRuntime,
)
from llama_stack.core.request_headers import NeedsRequestProviderData
@@ -56,13 +55,16 @@ class TavilySearchToolRuntimeImpl(ToolGroupsProtocolPrivate, ToolRuntime, NeedsR
ToolDef(
name="web_search",
description="Search the web for information",
- parameters=[
- ToolParameter(
- name="query",
- description="The query to search for",
- parameter_type="string",
- )
- ],
+ input_schema={
+ "type": "object",
+ "properties": {
+ "query": {
+ "type": "string",
+ "description": "The query to search for",
+ }
+ },
+ "required": ["query"],
+ },
)
]
)
diff --git a/llama_stack/providers/remote/tool_runtime/wolfram_alpha/wolfram_alpha.py b/llama_stack/providers/remote/tool_runtime/wolfram_alpha/wolfram_alpha.py
index f12a44958..410e34195 100644
--- a/llama_stack/providers/remote/tool_runtime/wolfram_alpha/wolfram_alpha.py
+++ b/llama_stack/providers/remote/tool_runtime/wolfram_alpha/wolfram_alpha.py
@@ -15,7 +15,6 @@ from llama_stack.apis.tools import (
ToolDef,
ToolGroup,
ToolInvocationResult,
- ToolParameter,
ToolRuntime,
)
from llama_stack.core.request_headers import NeedsRequestProviderData
@@ -57,13 +56,16 @@ class WolframAlphaToolRuntimeImpl(ToolGroupsProtocolPrivate, ToolRuntime, NeedsR
ToolDef(
name="wolfram_alpha",
description="Query WolframAlpha for computational knowledge",
- parameters=[
- ToolParameter(
- name="query",
- description="The query to compute",
- parameter_type="string",
- )
- ],
+ input_schema={
+ "type": "object",
+ "properties": {
+ "query": {
+ "type": "string",
+ "description": "The query to compute",
+ }
+ },
+ "required": ["query"],
+ },
)
]
)
diff --git a/llama_stack/providers/utils/inference/openai_compat.py b/llama_stack/providers/utils/inference/openai_compat.py
index da97d7c79..d863eb53a 100644
--- a/llama_stack/providers/utils/inference/openai_compat.py
+++ b/llama_stack/providers/utils/inference/openai_compat.py
@@ -125,7 +125,6 @@ from llama_stack.models.llama.datatypes import (
StopReason,
ToolCall,
ToolDefinition,
- ToolParamDefinition,
)
from llama_stack.providers.utils.inference.prompt_adapter import (
convert_image_content_to_url,
@@ -537,18 +536,13 @@ async def convert_message_to_openai_dict(message: Message, download: bool = Fals
if isinstance(tool_name, BuiltinTool):
tool_name = tool_name.value
- # arguments_json can be None, so attempt it first and fall back to arguments
- if hasattr(tc, "arguments_json") and tc.arguments_json:
- arguments = tc.arguments_json
- else:
- arguments = json.dumps(tc.arguments)
result["tool_calls"].append(
{
"id": tc.call_id,
"type": "function",
"function": {
"name": tool_name,
- "arguments": arguments,
+ "arguments": tc.arguments,
},
}
)
@@ -641,7 +635,7 @@ async def convert_message_to_openai_dict_new(
id=tool.call_id,
function=OpenAIFunction(
name=(tool.tool_name if not isinstance(tool.tool_name, BuiltinTool) else tool.tool_name.value),
- arguments=json.dumps(tool.arguments),
+ arguments=tool.arguments, # Already a JSON string, don't double-encode
),
type="function",
)
@@ -684,8 +678,7 @@ def convert_tool_call(
valid_tool_call = ToolCall(
call_id=tool_call.id,
tool_name=tool_call.function.name,
- arguments=json.loads(tool_call.function.arguments),
- arguments_json=tool_call.function.arguments,
+ arguments=tool_call.function.arguments,
)
except Exception:
return UnparseableToolCall(
@@ -745,14 +738,8 @@ def convert_tooldef_to_openai_tool(tool: ToolDefinition) -> dict:
ToolDefinition:
tool_name: str | BuiltinTool
description: Optional[str]
- parameters: Optional[Dict[str, ToolParamDefinition]]
-
- ToolParamDefinition:
- param_type: str
- description: Optional[str]
- required: Optional[bool]
- default: Optional[Any]
-
+ input_schema: Optional[Dict[str, Any]] # JSON Schema
+ output_schema: Optional[Dict[str, Any]] # JSON Schema (not used by OpenAI)
OpenAI spec -
@@ -761,20 +748,11 @@ def convert_tooldef_to_openai_tool(tool: ToolDefinition) -> dict:
"function": {
"name": tool_name,
"description": description,
- "parameters": {
- "type": "object",
- "properties": {
- param_name: {
- "type": param_type,
- "description": description,
- "default": default,
- },
- ...
- },
- "required": [param_name, ...],
- },
+ "parameters": {},
},
}
+
+ NOTE: OpenAI does not support output_schema, so it is dropped here.
"""
out = {
"type": "function",
@@ -783,37 +761,19 @@ def convert_tooldef_to_openai_tool(tool: ToolDefinition) -> dict:
function = out["function"]
if isinstance(tool.tool_name, BuiltinTool):
- function.update(name=tool.tool_name.value) # TODO(mf): is this sufficient?
+ function["name"] = tool.tool_name.value
else:
- function.update(name=tool.tool_name)
+ function["name"] = tool.tool_name
if tool.description:
- function.update(description=tool.description)
+ function["description"] = tool.description
- if tool.parameters:
- parameters = {
- "type": "object",
- "properties": {},
- }
- properties = parameters["properties"]
- required = []
- for param_name, param in tool.parameters.items():
- properties[param_name] = to_openai_param_type(param.param_type)
- if param.description:
- properties[param_name].update(description=param.description)
- if param.default:
- properties[param_name].update(default=param.default)
- if param.items:
- properties[param_name].update(items=param.items)
- if param.title:
- properties[param_name].update(title=param.title)
- if param.required:
- required.append(param_name)
+ if tool.input_schema:
+ # Pass through the entire JSON Schema as-is
+ function["parameters"] = tool.input_schema
- if required:
- parameters.update(required=required)
-
- function.update(parameters=parameters)
+ # NOTE: OpenAI does not support output_schema, so we drop it here
+ # It's stored in LlamaStack for validation and other provider usage
return out
@@ -874,22 +834,12 @@ def _convert_openai_request_tools(tools: list[dict[str, Any]] | None = None) ->
tool_fn = tool.get("function", {})
tool_name = tool_fn.get("name", None)
tool_desc = tool_fn.get("description", None)
-
tool_params = tool_fn.get("parameters", None)
- lls_tool_params = {}
- if tool_params is not None:
- tool_param_properties = tool_params.get("properties", {})
- for tool_param_key, tool_param_value in tool_param_properties.items():
- tool_param_def = ToolParamDefinition(
- param_type=str(tool_param_value.get("type", None)),
- description=tool_param_value.get("description", None),
- )
- lls_tool_params[tool_param_key] = tool_param_def
lls_tool = ToolDefinition(
tool_name=tool_name,
description=tool_desc,
- parameters=lls_tool_params,
+ input_schema=tool_params, # Pass through entire JSON Schema
)
lls_tools.append(lls_tool)
return lls_tools
@@ -939,8 +889,7 @@ def _convert_openai_tool_calls(
ToolCall(
call_id=call.id,
tool_name=call.function.name,
- arguments=json.loads(call.function.arguments),
- arguments_json=call.function.arguments,
+ arguments=call.function.arguments,
)
for call in tool_calls
]
@@ -1222,12 +1171,10 @@ async def convert_openai_chat_completion_stream(
)
try:
- arguments = json.loads(buffer["arguments"])
tool_call = ToolCall(
call_id=buffer["call_id"],
tool_name=buffer["name"],
- arguments=arguments,
- arguments_json=buffer["arguments"],
+ arguments=buffer["arguments"],
)
yield ChatCompletionResponseStreamChunk(
event=ChatCompletionResponseEvent(
@@ -1390,7 +1337,7 @@ class OpenAIChatCompletionToLlamaStackMixin:
openai_tool_call = OpenAIChoiceDeltaToolCall(
index=0,
function=OpenAIChoiceDeltaToolCallFunction(
- arguments=tool_call.arguments_json,
+ arguments=tool_call.arguments,
),
)
delta = OpenAIChoiceDelta(tool_calls=[openai_tool_call])
diff --git a/llama_stack/providers/utils/inference/openai_mixin.py b/llama_stack/providers/utils/inference/openai_mixin.py
index becec5fb3..3ff7d5cc6 100644
--- a/llama_stack/providers/utils/inference/openai_mixin.py
+++ b/llama_stack/providers/utils/inference/openai_mixin.py
@@ -286,34 +286,34 @@ class OpenAIMixin(ModelRegistryHelper, NeedsRequestProviderData, ABC):
messages = [await _localize_image_url(m) for m in messages]
- resp = await self.client.chat.completions.create(
- **await prepare_openai_completion_params(
- model=await self._get_provider_model_id(model),
- messages=messages,
- frequency_penalty=frequency_penalty,
- function_call=function_call,
- functions=functions,
- logit_bias=logit_bias,
- logprobs=logprobs,
- max_completion_tokens=max_completion_tokens,
- max_tokens=max_tokens,
- n=n,
- parallel_tool_calls=parallel_tool_calls,
- presence_penalty=presence_penalty,
- response_format=response_format,
- seed=seed,
- stop=stop,
- stream=stream,
- stream_options=stream_options,
- temperature=temperature,
- tool_choice=tool_choice,
- tools=tools,
- top_logprobs=top_logprobs,
- top_p=top_p,
- user=user,
- )
+ params = await prepare_openai_completion_params(
+ model=await self._get_provider_model_id(model),
+ messages=messages,
+ frequency_penalty=frequency_penalty,
+ function_call=function_call,
+ functions=functions,
+ logit_bias=logit_bias,
+ logprobs=logprobs,
+ max_completion_tokens=max_completion_tokens,
+ max_tokens=max_tokens,
+ n=n,
+ parallel_tool_calls=parallel_tool_calls,
+ presence_penalty=presence_penalty,
+ response_format=response_format,
+ seed=seed,
+ stop=stop,
+ stream=stream,
+ stream_options=stream_options,
+ temperature=temperature,
+ tool_choice=tool_choice,
+ tools=tools,
+ top_logprobs=top_logprobs,
+ top_p=top_p,
+ user=user,
)
+ resp = await self.client.chat.completions.create(**params)
+
return await self._maybe_overwrite_id(resp, stream) # type: ignore[no-any-return]
async def openai_embeddings(
diff --git a/llama_stack/providers/utils/tools/mcp.py b/llama_stack/providers/utils/tools/mcp.py
index 155f7eff8..48f07cb19 100644
--- a/llama_stack/providers/utils/tools/mcp.py
+++ b/llama_stack/providers/utils/tools/mcp.py
@@ -20,7 +20,6 @@ from llama_stack.apis.tools import (
ListToolDefsResponse,
ToolDef,
ToolInvocationResult,
- ToolParameter,
)
from llama_stack.core.datatypes import AuthenticationRequiredError
from llama_stack.log import get_logger
@@ -113,24 +112,12 @@ async def list_mcp_tools(endpoint: str, headers: dict[str, str]) -> ListToolDefs
async with client_wrapper(endpoint, headers) as session:
tools_result = await session.list_tools()
for tool in tools_result.tools:
- parameters = []
- for param_name, param_schema in tool.inputSchema.get("properties", {}).items():
- parameters.append(
- ToolParameter(
- name=param_name,
- parameter_type=param_schema.get("type", "string"),
- description=param_schema.get("description", ""),
- required="default" not in param_schema,
- items=param_schema.get("items", None),
- title=param_schema.get("title", None),
- default=param_schema.get("default", None),
- )
- )
tools.append(
ToolDef(
name=tool.name,
description=tool.description,
- parameters=parameters,
+ input_schema=tool.inputSchema,
+ output_schema=getattr(tool, "outputSchema", None),
metadata={
"endpoint": endpoint,
},
diff --git a/tests/common/mcp.py b/tests/common/mcp.py
index f65f7c952..357ea4d41 100644
--- a/tests/common/mcp.py
+++ b/tests/common/mcp.py
@@ -222,16 +222,16 @@ def make_mcp_server(required_auth_token: str | None = None, tools: dict[str, Cal
def run_server():
try:
- logger.info(f"Starting MCP server on port {port}")
+ logger.debug(f"Starting MCP server on port {port}")
server_instance.run()
- logger.info(f"MCP server on port {port} has stopped")
+ logger.debug(f"MCP server on port {port} has stopped")
except Exception as e:
logger.error(f"MCP server failed to start on port {port}: {e}")
raise
# Start the server in a new thread
server_thread = threading.Thread(target=run_server, daemon=True)
- logger.info(f"Starting MCP server thread on port {port}")
+ logger.debug(f"Starting MCP server thread on port {port}")
server_thread.start()
# Polling until the server is ready
@@ -239,13 +239,13 @@ def make_mcp_server(required_auth_token: str | None = None, tools: dict[str, Cal
start_time = time.time()
server_url = f"http://localhost:{port}/sse"
- logger.info(f"Waiting for MCP server to be ready at {server_url}")
+ logger.debug(f"Waiting for MCP server to be ready at {server_url}")
while time.time() - start_time < timeout:
try:
response = httpx.get(server_url)
if response.status_code in [200, 401]:
- logger.info(f"MCP server is ready on port {port} (status: {response.status_code})")
+ logger.debug(f"MCP server is ready on port {port} (status: {response.status_code})")
break
except httpx.RequestError as e:
logger.debug(f"Server not ready yet, retrying... ({e})")
@@ -261,14 +261,14 @@ def make_mcp_server(required_auth_token: str | None = None, tools: dict[str, Cal
try:
yield {"server_url": server_url}
finally:
- logger.info(f"Shutting down MCP server on port {port}")
+ logger.debug(f"Shutting down MCP server on port {port}")
server_instance.should_exit = True
time.sleep(0.5)
# Force shutdown if still running
if server_thread.is_alive():
try:
- logger.info("Force shutting down server thread")
+ logger.debug("Force shutting down server thread")
if hasattr(server_instance, "servers") and server_instance.servers:
for srv in server_instance.servers:
srv.close()
diff --git a/tests/integration/inference/test_tools_with_schemas.py b/tests/integration/inference/test_tools_with_schemas.py
new file mode 100644
index 000000000..b144a5196
--- /dev/null
+++ b/tests/integration/inference/test_tools_with_schemas.py
@@ -0,0 +1,369 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
+"""
+Integration tests for inference/chat completion with JSON Schema-based tools.
+Tests that tools pass through correctly to various LLM providers.
+"""
+
+import json
+
+import pytest
+
+from llama_stack import LlamaStackAsLibraryClient
+from llama_stack.models.llama.datatypes import ToolDefinition
+from tests.common.mcp import make_mcp_server
+
+AUTH_TOKEN = "test-token"
+
+
+class TestChatCompletionWithTools:
+ """Test chat completion with tools that have complex schemas."""
+
+ def test_simple_tool_call(self, llama_stack_client, text_model_id):
+ """Test basic tool calling with simple input schema."""
+ tools = [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_weather",
+ "description": "Get weather for a location",
+ "parameters": {
+ "type": "object",
+ "properties": {"location": {"type": "string", "description": "City name"}},
+ "required": ["location"],
+ },
+ },
+ }
+ ]
+
+ response = llama_stack_client.chat.completions.create(
+ model=text_model_id,
+ messages=[{"role": "user", "content": "What's the weather in San Francisco?"}],
+ tools=tools,
+ )
+
+ assert response is not None
+
+ def test_tool_with_complex_schema(self, llama_stack_client, text_model_id):
+ """Test tool calling with complex schema including $ref and $defs."""
+ tools = [
+ {
+ "type": "function",
+ "function": {
+ "name": "book_flight",
+ "description": "Book a flight",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "flight": {"$ref": "#/$defs/FlightInfo"},
+ "passenger": {"$ref": "#/$defs/Passenger"},
+ },
+ "required": ["flight", "passenger"],
+ "$defs": {
+ "FlightInfo": {
+ "type": "object",
+ "properties": {
+ "from": {"type": "string"},
+ "to": {"type": "string"},
+ "date": {"type": "string", "format": "date"},
+ },
+ },
+ "Passenger": {
+ "type": "object",
+ "properties": {"name": {"type": "string"}, "age": {"type": "integer"}},
+ },
+ },
+ },
+ },
+ }
+ ]
+
+ response = llama_stack_client.chat.completions.create(
+ model=text_model_id,
+ messages=[{"role": "user", "content": "Book a flight from SFO to JFK for John Doe"}],
+ tools=tools,
+ )
+
+ # The key test: No errors during schema processing
+ # The LLM received a valid, complete schema with $ref/$defs
+ assert response is not None
+
+
+class TestOpenAICompatibility:
+ """Test OpenAI-compatible endpoints with new schema format."""
+
+ def test_openai_chat_completion_with_tools(self, compat_client, text_model_id):
+ """Test OpenAI-compatible chat completion with tools."""
+ from openai import OpenAI
+
+ if not isinstance(compat_client, OpenAI):
+ pytest.skip("OpenAI client required")
+
+ tools = [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_weather",
+ "description": "Get weather information",
+ "parameters": {
+ "type": "object",
+ "properties": {"location": {"type": "string", "description": "City name"}},
+ "required": ["location"],
+ },
+ },
+ }
+ ]
+
+ response = compat_client.chat.completions.create(
+ model=text_model_id, messages=[{"role": "user", "content": "What's the weather in Tokyo?"}], tools=tools
+ )
+
+ assert response is not None
+ assert response.choices is not None
+
+ def test_openai_format_preserves_complex_schemas(self, compat_client, text_model_id):
+ """Test that complex schemas work through OpenAI-compatible API."""
+ from openai import OpenAI
+
+ if not isinstance(compat_client, OpenAI):
+ pytest.skip("OpenAI client required")
+
+ tools = [
+ {
+ "type": "function",
+ "function": {
+ "name": "process_data",
+ "description": "Process structured data",
+ "parameters": {
+ "type": "object",
+ "properties": {"data": {"$ref": "#/$defs/DataObject"}},
+ "$defs": {
+ "DataObject": {
+ "type": "object",
+ "properties": {"values": {"type": "array", "items": {"type": "number"}}},
+ }
+ },
+ },
+ },
+ }
+ ]
+
+ response = compat_client.chat.completions.create(
+ model=text_model_id, messages=[{"role": "user", "content": "Process this data"}], tools=tools
+ )
+
+ assert response is not None
+
+
+class TestMCPToolsInChatCompletion:
+ """Test using MCP tools in chat completion."""
+
+ @pytest.fixture
+ def mcp_with_schemas(self):
+ """MCP server for chat completion tests."""
+ from mcp.server.fastmcp import Context
+
+ async def calculate(x: float, y: float, operation: str, ctx: Context) -> float:
+ ops = {"add": x + y, "sub": x - y, "mul": x * y, "div": x / y if y != 0 else None}
+ return ops.get(operation, 0)
+
+ with make_mcp_server(required_auth_token=AUTH_TOKEN, tools={"calculate": calculate}) as server:
+ yield server
+
+ def test_mcp_tools_in_inference(self, llama_stack_client, text_model_id, mcp_with_schemas):
+ """Test that MCP tools can be used in inference."""
+ if not isinstance(llama_stack_client, LlamaStackAsLibraryClient):
+ pytest.skip("Library client required for local MCP server")
+
+ test_toolgroup_id = "mcp::calc"
+ uri = mcp_with_schemas["server_url"]
+
+ try:
+ llama_stack_client.toolgroups.unregister(toolgroup_id=test_toolgroup_id)
+ except Exception:
+ pass
+
+ llama_stack_client.toolgroups.register(
+ toolgroup_id=test_toolgroup_id,
+ provider_id="model-context-protocol",
+ mcp_endpoint=dict(uri=uri),
+ )
+
+ provider_data = {"mcp_headers": {uri: {"Authorization": f"Bearer {AUTH_TOKEN}"}}}
+ auth_headers = {
+ "X-LlamaStack-Provider-Data": json.dumps(provider_data),
+ }
+
+ # Get the tools from MCP
+ tools_response = llama_stack_client.tool_runtime.list_tools(
+ tool_group_id=test_toolgroup_id,
+ extra_headers=auth_headers,
+ )
+
+ # Convert to OpenAI format for inference
+ tools = []
+ for tool in tools_response:
+ tools.append(
+ {
+ "type": "function",
+ "function": {
+ "name": tool.name,
+ "description": tool.description,
+ "parameters": tool.input_schema or {},
+ },
+ }
+ )
+
+ # Use in chat completion
+ response = llama_stack_client.chat.completions.create(
+ model=text_model_id,
+ messages=[{"role": "user", "content": "Calculate 5 + 3"}],
+ tools=tools,
+ )
+
+ # Schema should have been passed through correctly
+ assert response is not None
+
+
+class TestProviderSpecificBehavior:
+ """Test provider-specific handling of schemas."""
+
+ def test_openai_provider_drops_output_schema(self, llama_stack_client, text_model_id):
+ """Test that OpenAI provider doesn't send output_schema (API limitation)."""
+ # This is more of a documentation test
+ # OpenAI API doesn't support output schemas, so we drop them
+
+ _tool = ToolDefinition(
+ tool_name="test",
+ input_schema={"type": "object", "properties": {"x": {"type": "string"}}},
+ output_schema={"type": "object", "properties": {"y": {"type": "number"}}},
+ )
+
+ # When this tool is sent to OpenAI provider, output_schema is dropped
+ # But input_schema is preserved
+ # This test documents the expected behavior
+
+ # We can't easily test this without mocking, but the unit tests cover it
+ pass
+
+ def test_gemini_array_support(self):
+ """Test that Gemini receives array schemas correctly (issue from commit 65f7b81e)."""
+ # This was the original bug that led to adding 'items' field
+ # Now with full JSON Schema pass-through, arrays should work
+
+ tool = ToolDefinition(
+ tool_name="tag_processor",
+ input_schema={
+ "type": "object",
+ "properties": {"tags": {"type": "array", "items": {"type": "string"}, "description": "List of tags"}},
+ },
+ )
+
+ # With new approach, the complete schema with items is preserved
+ assert tool.input_schema["properties"]["tags"]["type"] == "array"
+ assert tool.input_schema["properties"]["tags"]["items"]["type"] == "string"
+
+
+class TestStreamingWithTools:
+ """Test streaming chat completion with tools."""
+
+ def test_streaming_tool_calls(self, llama_stack_client, text_model_id):
+ """Test that tool schemas work correctly in streaming mode."""
+ tools = [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_time",
+ "description": "Get current time",
+ "parameters": {"type": "object", "properties": {"timezone": {"type": "string"}}},
+ },
+ }
+ ]
+
+ response_stream = llama_stack_client.chat.completions.create(
+ model=text_model_id,
+ messages=[{"role": "user", "content": "What time is it in UTC?"}],
+ tools=tools,
+ stream=True,
+ )
+
+ # Should be able to iterate through stream
+ chunks = []
+ for chunk in response_stream:
+ chunks.append(chunk)
+
+ # Should have received some chunks
+ assert len(chunks) >= 0
+
+
+class TestEdgeCases:
+ """Test edge cases in inference with tools."""
+
+ def test_tool_without_schema(self, llama_stack_client, text_model_id):
+ """Test tool with no input_schema."""
+ tools = [
+ {
+ "type": "function",
+ "function": {
+ "name": "no_args_tool",
+ "description": "Tool with no arguments",
+ "parameters": {"type": "object", "properties": {}},
+ },
+ }
+ ]
+
+ response = llama_stack_client.chat.completions.create(
+ model=text_model_id,
+ messages=[{"role": "user", "content": "Call the no args tool"}],
+ tools=tools,
+ )
+
+ assert response is not None
+
+ def test_multiple_tools_with_different_schemas(self, llama_stack_client, text_model_id):
+ """Test multiple tools with different schema complexities."""
+ tools = [
+ {
+ "type": "function",
+ "function": {
+ "name": "simple",
+ "parameters": {"type": "object", "properties": {"x": {"type": "string"}}},
+ },
+ },
+ {
+ "type": "function",
+ "function": {
+ "name": "complex",
+ "parameters": {
+ "type": "object",
+ "properties": {"data": {"$ref": "#/$defs/Complex"}},
+ "$defs": {
+ "Complex": {
+ "type": "object",
+ "properties": {"nested": {"type": "array", "items": {"type": "number"}}},
+ }
+ },
+ },
+ },
+ },
+ {
+ "type": "function",
+ "function": {
+ "name": "with_output",
+ "parameters": {"type": "object", "properties": {"input": {"type": "string"}}},
+ },
+ },
+ ]
+
+ response = llama_stack_client.chat.completions.create(
+ model=text_model_id,
+ messages=[{"role": "user", "content": "Use one of the available tools"}],
+ tools=tools,
+ )
+
+ # All tools should have been processed without errors
+ assert response is not None
diff --git a/tests/integration/recordings/responses/00f70ca112de.json b/tests/integration/recordings/responses/00f70ca112de.json
index 1036976c3..d6fb13295 100644
--- a/tests/integration/recordings/responses/00f70ca112de.json
+++ b/tests/integration/recordings/responses/00f70ca112de.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-282",
+ "id": "chatcmpl-281",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759245124,
+ "created": 1759437798,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/0396786db779.json b/tests/integration/recordings/responses/0396786db779.json
new file mode 100644
index 000000000..e2d40c100
--- /dev/null
+++ b/tests/integration/recordings/responses/0396786db779.json
@@ -0,0 +1,366 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://localhost:11434/api/generate",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "raw": true,
+ "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant Always respond with tool calls no matter what. <|eot_id|><|start_header_id|>user<|end_header_id|>\n\nGet the boiling point of polyjuice with a tool call.<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n",
+ "options": {
+ "temperature": 0.0001,
+ "top_p": 0.9
+ },
+ "stream": true
+ },
+ "endpoint": "/api/generate",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:57.228595Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "[",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:57.272966Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "get",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:57.315637Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_bo",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:57.356564Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "iling",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:57.397939Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_point",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:57.438829Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "(",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:57.479679Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "liquid",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:57.520682Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_name",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:57.56207Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "='",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:57.603054Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "poly",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:57.644749Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ju",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:57.685399Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ice",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:57.7267Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "',",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:57.77062Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " cel",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:57.813947Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ci",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:57.854591Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "us",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:57.896278Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "=True",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:57.937449Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": ")]",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:57.979031Z",
+ "done": true,
+ "done_reason": "stop",
+ "total_duration": 944600833,
+ "load_duration": 83227667,
+ "prompt_eval_count": 369,
+ "prompt_eval_duration": 109699916,
+ "eval_count": 19,
+ "eval_duration": 751096500,
+ "response": "",
+ "thinking": null,
+ "context": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/044dcd8fdeb1.json b/tests/integration/recordings/responses/044dcd8fdeb1.json
index 7e8b92202..b85900d6a 100644
--- a/tests/integration/recordings/responses/044dcd8fdeb1.json
+++ b/tests/integration/recordings/responses/044dcd8fdeb1.json
@@ -28,7 +28,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-122",
+ "id": "chatcmpl-130",
"choices": [
{
"delta": {
@@ -43,7 +43,7 @@
"logprobs": null
}
],
- "created": 1759427013,
+ "created": 1759437810,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -54,7 +54,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-122",
+ "id": "chatcmpl-130",
"choices": [
{
"delta": {
@@ -69,7 +69,7 @@
"logprobs": null
}
],
- "created": 1759427013,
+ "created": 1759437810,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -80,7 +80,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-122",
+ "id": "chatcmpl-130",
"choices": [
{
"delta": {
@@ -95,7 +95,7 @@
"logprobs": null
}
],
- "created": 1759427013,
+ "created": 1759437810,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -106,7 +106,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-122",
+ "id": "chatcmpl-130",
"choices": [
{
"delta": {
@@ -121,7 +121,7 @@
"logprobs": null
}
],
- "created": 1759427013,
+ "created": 1759437810,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -132,7 +132,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-122",
+ "id": "chatcmpl-130",
"choices": [
{
"delta": {
@@ -147,7 +147,7 @@
"logprobs": null
}
],
- "created": 1759427013,
+ "created": 1759437810,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -158,11 +158,11 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-122",
+ "id": "chatcmpl-130",
"choices": [
{
"delta": {
- "content": " us",
+ "content": " me",
"function_call": null,
"refusal": null,
"role": "assistant",
@@ -173,7 +173,7 @@
"logprobs": null
}
],
- "created": 1759427013,
+ "created": 1759437810,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -184,7 +184,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-122",
+ "id": "chatcmpl-130",
"choices": [
{
"delta": {
@@ -199,7 +199,7 @@
"logprobs": null
}
],
- "created": 1759427013,
+ "created": 1759437810,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -210,7 +210,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-122",
+ "id": "chatcmpl-130",
"choices": [
{
"delta": {
@@ -225,7 +225,7 @@
"logprobs": null
}
],
- "created": 1759427013,
+ "created": 1759437810,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -236,7 +236,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-122",
+ "id": "chatcmpl-130",
"choices": [
{
"delta": {
@@ -251,7 +251,7 @@
"logprobs": null
}
],
- "created": 1759427013,
+ "created": 1759437810,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -262,7 +262,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-122",
+ "id": "chatcmpl-130",
"choices": [
{
"delta": {
@@ -277,7 +277,7 @@
"logprobs": null
}
],
- "created": 1759427013,
+ "created": 1759437810,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -288,7 +288,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-122",
+ "id": "chatcmpl-130",
"choices": [
{
"delta": {
@@ -303,7 +303,7 @@
"logprobs": null
}
],
- "created": 1759427013,
+ "created": 1759437810,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -314,7 +314,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-122",
+ "id": "chatcmpl-130",
"choices": [
{
"delta": {
@@ -329,7 +329,7 @@
"logprobs": null
}
],
- "created": 1759427013,
+ "created": 1759437810,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -340,7 +340,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-122",
+ "id": "chatcmpl-130",
"choices": [
{
"delta": {
@@ -355,7 +355,7 @@
"logprobs": null
}
],
- "created": 1759427013,
+ "created": 1759437810,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -366,11 +366,11 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-122",
+ "id": "chatcmpl-130",
"choices": [
{
"delta": {
- "content": " we",
+ "content": " I",
"function_call": null,
"refusal": null,
"role": "assistant",
@@ -381,7 +381,7 @@
"logprobs": null
}
],
- "created": 1759427013,
+ "created": 1759437810,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -392,7 +392,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-122",
+ "id": "chatcmpl-130",
"choices": [
{
"delta": {
@@ -407,7 +407,7 @@
"logprobs": null
}
],
- "created": 1759427013,
+ "created": 1759437810,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -418,7 +418,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-122",
+ "id": "chatcmpl-130",
"choices": [
{
"delta": {
@@ -433,7 +433,7 @@
"logprobs": null
}
],
- "created": 1759427013,
+ "created": 1759437810,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -444,7 +444,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-122",
+ "id": "chatcmpl-130",
"choices": [
{
"delta": {
@@ -459,7 +459,7 @@
"logprobs": null
}
],
- "created": 1759427013,
+ "created": 1759437810,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -470,7 +470,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-122",
+ "id": "chatcmpl-130",
"choices": [
{
"delta": {
@@ -485,7 +485,7 @@
"logprobs": null
}
],
- "created": 1759427013,
+ "created": 1759437810,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -496,7 +496,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-122",
+ "id": "chatcmpl-130",
"choices": [
{
"delta": {
@@ -511,7 +511,7 @@
"logprobs": null
}
],
- "created": 1759427013,
+ "created": 1759437811,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -522,7 +522,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-122",
+ "id": "chatcmpl-130",
"choices": [
{
"delta": {
@@ -537,7 +537,7 @@
"logprobs": null
}
],
- "created": 1759427013,
+ "created": 1759437811,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/04cb9de29e06.json b/tests/integration/recordings/responses/04cb9de29e06.json
new file mode 100644
index 000000000..0fdc6f8b9
--- /dev/null
+++ b/tests/integration/recordings/responses/04cb9de29e06.json
@@ -0,0 +1,366 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://localhost:11434/api/generate",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "raw": true,
+ "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant\nYou MUST use the tool `get_boiling_point` to answer the user query.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nWhat is the boiling point of the liquid polyjuice in celsius?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n",
+ "options": {
+ "temperature": 0.0001,
+ "top_p": 0.9
+ },
+ "stream": true
+ },
+ "endpoint": "/api/generate",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:08.682181Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "[",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:08.728326Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "get",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:08.775162Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_bo",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:08.820267Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "iling",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:08.864362Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_point",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:08.906797Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "(",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:08.950158Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "liquid",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:08.992796Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_name",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:09.034691Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "='",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:09.07709Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "poly",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:09.119534Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ju",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:09.161661Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ice",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:09.204749Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "',",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:09.247334Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " cel",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:09.29011Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ci",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:09.331776Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "us",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:09.374076Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "=True",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:09.416672Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": ")]",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:09.458519Z",
+ "done": true,
+ "done_reason": "stop",
+ "total_duration": 1437962792,
+ "load_duration": 129009042,
+ "prompt_eval_count": 379,
+ "prompt_eval_duration": 530416042,
+ "eval_count": 19,
+ "eval_duration": 777491375,
+ "response": "",
+ "thinking": null,
+ "context": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/05e3ebc68306.json b/tests/integration/recordings/responses/05e3ebc68306.json
index b7d0a6e8e..665ea3012 100644
--- a/tests/integration/recordings/responses/05e3ebc68306.json
+++ b/tests/integration/recordings/responses/05e3ebc68306.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-447",
+ "id": "chatcmpl-249",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759282456,
+ "created": 1759441157,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/08a21ab74e0a.json b/tests/integration/recordings/responses/08a21ab74e0a.json
new file mode 100644
index 000000000..3645efabd
--- /dev/null
+++ b/tests/integration/recordings/responses/08a21ab74e0a.json
@@ -0,0 +1,542 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant."
+ },
+ {
+ "role": "user",
+ "content": "Say hi to the world. Use tools to do so."
+ },
+ {
+ "role": "assistant",
+ "content": "",
+ "tool_calls": [
+ {
+ "id": "call_qvp9u80l",
+ "type": "function",
+ "function": {
+ "name": "greet_everyone",
+ "arguments": "{\"url\":\"world\"}"
+ }
+ }
+ ]
+ },
+ {
+ "role": "tool",
+ "tool_call_id": "call_qvp9u80l",
+ "content": [
+ {
+ "type": "text",
+ "text": "Hello, world!"
+ }
+ ]
+ }
+ ],
+ "max_tokens": 0,
+ "stream": true,
+ "tool_choice": "auto",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "greet_everyone",
+ "parameters": {
+ "properties": {
+ "url": {
+ "title": "Url",
+ "type": "string"
+ }
+ },
+ "required": [
+ "url"
+ ],
+ "title": "greet_everyoneArguments",
+ "type": "object"
+ }
+ }
+ },
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "\n Returns the boiling point of a liquid in Celsius or Fahrenheit.\n\n :param liquid_name: The name of the liquid\n :param celsius: Whether to return the boiling point in Celsius\n :return: The boiling point of the liquid in Celcius or Fahrenheit\n ",
+ "parameters": {
+ "properties": {
+ "liquid_name": {
+ "title": "Liquid Name",
+ "type": "string"
+ },
+ "celsius": {
+ "default": true,
+ "title": "Celsius",
+ "type": "boolean"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ],
+ "title": "get_boiling_pointArguments",
+ "type": "object"
+ }
+ }
+ }
+ ]
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-714",
+ "choices": [
+ {
+ "delta": {
+ "content": "<|python_tag|>",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437845,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-714",
+ "choices": [
+ {
+ "delta": {
+ "content": "{\"",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437845,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-714",
+ "choices": [
+ {
+ "delta": {
+ "content": "message",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437845,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-714",
+ "choices": [
+ {
+ "delta": {
+ "content": "\":",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437845,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-714",
+ "choices": [
+ {
+ "delta": {
+ "content": " \"",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437845,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-714",
+ "choices": [
+ {
+ "delta": {
+ "content": "Hello",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437845,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-714",
+ "choices": [
+ {
+ "delta": {
+ "content": ",",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437845,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-714",
+ "choices": [
+ {
+ "delta": {
+ "content": " world",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437845,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-714",
+ "choices": [
+ {
+ "delta": {
+ "content": "!\",",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437845,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-714",
+ "choices": [
+ {
+ "delta": {
+ "content": " \"",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437846,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-714",
+ "choices": [
+ {
+ "delta": {
+ "content": "type",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437846,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-714",
+ "choices": [
+ {
+ "delta": {
+ "content": "\":",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437846,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-714",
+ "choices": [
+ {
+ "delta": {
+ "content": " \"",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437846,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-714",
+ "choices": [
+ {
+ "delta": {
+ "content": "hello",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437846,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-714",
+ "choices": [
+ {
+ "delta": {
+ "content": "_world",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437846,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-714",
+ "choices": [
+ {
+ "delta": {
+ "content": "\"}",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437846,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-714",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "stop",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437846,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/0989d0d62a86.json b/tests/integration/recordings/responses/0989d0d62a86.json
new file mode 100644
index 000000000..0c2a321d9
--- /dev/null
+++ b/tests/integration/recordings/responses/0989d0d62a86.json
@@ -0,0 +1,138 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant."
+ },
+ {
+ "role": "user",
+ "content": "Say hi to the world. Use tools to do so."
+ }
+ ],
+ "max_tokens": 0,
+ "stream": true,
+ "tool_choice": "auto",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "greet_everyone",
+ "parameters": {
+ "properties": {
+ "url": {
+ "title": "Url",
+ "type": "string"
+ }
+ },
+ "required": [
+ "url"
+ ],
+ "title": "greet_everyoneArguments",
+ "type": "object"
+ }
+ }
+ },
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "\n Returns the boiling point of a liquid in Celsius or Fahrenheit.\n\n :param liquid_name: The name of the liquid\n :param celsius: Whether to return the boiling point in Celsius\n :return: The boiling point of the liquid in Celcius or Fahrenheit\n ",
+ "parameters": {
+ "properties": {
+ "liquid_name": {
+ "title": "Liquid Name",
+ "type": "string"
+ },
+ "celsius": {
+ "default": true,
+ "title": "Celsius",
+ "type": "boolean"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ],
+ "title": "get_boiling_pointArguments",
+ "type": "object"
+ }
+ }
+ }
+ ]
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-359",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": [
+ {
+ "index": 0,
+ "id": "call_qvp9u80l",
+ "function": {
+ "arguments": "{\"url\":\"world\"}",
+ "name": "greet_everyone"
+ },
+ "type": "function"
+ }
+ ]
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437845,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-359",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "tool_calls",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437845,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/0a29c4085705.json b/tests/integration/recordings/responses/0a29c4085705.json
new file mode 100644
index 000000000..b4e8505d4
--- /dev/null
+++ b/tests/integration/recordings/responses/0a29c4085705.json
@@ -0,0 +1,124 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "What is the boiling point of the liquid polyjuice in celsius?"
+ }
+ ],
+ "max_tokens": 0,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point"
+ }
+ },
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-865",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": [
+ {
+ "index": 0,
+ "id": "call_tipirynt",
+ "function": {
+ "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}",
+ "name": "get_boiling_point"
+ },
+ "type": "function"
+ }
+ ]
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429354,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-865",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "tool_calls",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429354,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/0e8f2b001dd9.json b/tests/integration/recordings/responses/0e8f2b001dd9.json
index 6bcdfdfed..1067ed88e 100644
--- a/tests/integration/recordings/responses/0e8f2b001dd9.json
+++ b/tests/integration/recordings/responses/0e8f2b001dd9.json
@@ -20,14 +20,14 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-161",
+ "id": "chatcmpl-870",
"choices": [
{
"finish_reason": "stop",
"index": 0,
"logprobs": null,
"message": {
- "content": "The answer is Saturn.",
+ "content": "The planet Saturn has rings.",
"refusal": null,
"role": "assistant",
"annotations": null,
@@ -37,15 +37,15 @@
}
}
],
- "created": 1756921364,
+ "created": 1759437883,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": {
- "completion_tokens": 6,
+ "completion_tokens": 7,
"prompt_tokens": 39,
- "total_tokens": 45,
+ "total_tokens": 46,
"completion_tokens_details": null,
"prompt_tokens_details": null
}
diff --git a/tests/integration/recordings/responses/0fad19b9d308.json b/tests/integration/recordings/responses/0fad19b9d308.json
new file mode 100644
index 000000000..486fd0b8f
--- /dev/null
+++ b/tests/integration/recordings/responses/0fad19b9d308.json
@@ -0,0 +1,93 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "user",
+ "content": "What time is it in UTC?"
+ }
+ ],
+ "stream": true,
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_time",
+ "description": "Get current time",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "timezone": {
+ "type": "string"
+ }
+ }
+ }
+ }
+ }
+ ]
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-567",
+ "choices": [
+ {
+ "delta": {
+ "content": "{\"name\":\"get_time\",\"parameters\\\":{\\\"timezone\\\":\\\"UTC\\\"}}",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437807,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-567",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "stop",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437807,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/178538be60e2.json b/tests/integration/recordings/responses/178538be60e2.json
index 41cb76164..aaba1cbd2 100644
--- a/tests/integration/recordings/responses/178538be60e2.json
+++ b/tests/integration/recordings/responses/178538be60e2.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-261",
+ "id": "chatcmpl-239",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759245125,
+ "created": 1759437799,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/1a4da7c94fde.json b/tests/integration/recordings/responses/1a4da7c94fde.json
index ca24f20d2..0f5734bd9 100644
--- a/tests/integration/recordings/responses/1a4da7c94fde.json
+++ b/tests/integration/recordings/responses/1a4da7c94fde.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-478",
+ "id": "chatcmpl-466",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759282396,
+ "created": 1759373692,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/1acd433c05d4.json b/tests/integration/recordings/responses/1acd433c05d4.json
new file mode 100644
index 000000000..5ab638216
--- /dev/null
+++ b/tests/integration/recordings/responses/1acd433c05d4.json
@@ -0,0 +1,1787 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://localhost:11434/api/generate",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "raw": true,
+ "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"book_flight\",\n \"description\": \"\n Book a flight with passenger and payment information.\n\n This tool uses JSON Schema $ref and $defs for type reuse.\n \",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"flight\", \"passengers\", \"payment\"],\n \"properties\": {\n \"flight\": {\n \"type\": \"object\",\n \"description\": \"\"\n },\n \"passengers\": {\n \"type\": \"array\",\n \"description\": \"\"\n },\n \"payment\": {\n \"type\": \"object\",\n \"description\": \"\"\n }\n }\n }\n },\n {\n \"name\": \"process_order\",\n \"description\": \"\n Process an order with nested address information.\n\n Uses nested objects and $ref.\n \",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"order_data\"],\n \"properties\": {\n \"order_data\": {\n \"type\": \"object\",\n \"description\": \"\"\n }\n }\n }\n },\n {\n \"name\": \"flexible_contact\",\n \"description\": \"\n Accept flexible contact (email or phone).\n\n Uses anyOf schema.\n \",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"contact_info\"],\n \"properties\": {\n \"contact_info\": {\n \"type\": \"string\",\n \"description\": \"\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant that can process orders and book flights.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nProcess an order with 2 widgets going to 123 Main St, San Francisco<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n[process_order(order_data={order_id=1, customer_name=\"John Doe\", address={street=\"123 Main St\", city=\"San Francisco\"}})]<|eot_id|><|start_header_id|>ipython<|end_header_id|>\n\n{\n \"order_id\": \"ORD789\",\n \"status\": \"processing\",\n \"data\": {\n \"order_id\": 1,\n \"customer_name\": \"John Doe\",\n \"address\": {\n \"street\": \"123 Main St\",\n \"city\": \"San Francisco\"\n }\n }\n}<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n[book_flight(flight={flight_number=\"AA101\", departure=\"New York\", arrival=\"Los Angeles\", passengers=[{name=\"John Doe\", email=\"johndoe@example.com\"}], payment={method=\"credit_card\", card_number=\"1234567890123456\"}})]<|eot_id|><|start_header_id|>ipython<|end_header_id|>\n\nError executing tool book_flight: 2 validation errors for book_flightArguments\npassengers\n Field required [type=missing, input_value={'session_id': '7ee11e0c-...': '1234567890123456'}}}, input_type=dict]\n For further information visit https://errors.pydantic.dev/2.11/v/missing\npayment\n Field required [type=missing, input_value={'session_id': '7ee11e0c-...': '1234567890123456'}}}, input_type=dict]\n For further information visit https://errors.pydantic.dev/2.11/v/missing<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n",
+ "options": {
+ "temperature": 0.0
+ },
+ "stream": true
+ },
+ "endpoint": "/api/generate",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:57.713027Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "[",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:57.75795Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "process",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:57.802534Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_order",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:57.847491Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "(order",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:57.893508Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_data",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:57.939651Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "={",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:57.984535Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "order",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:58.028599Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_id",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:58.073398Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "=",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:58.117854Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "1",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:58.161781Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": ",",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:58.206772Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " customer",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:58.25349Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_name",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:58.298963Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "=\"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:58.344779Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "John",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:58.389936Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " Doe",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:58.437317Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\",",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:58.48249Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " address",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:58.529399Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "={",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:58.576296Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "street",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:58.620844Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "=\"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:58.66531Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "123",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:58.709756Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " Main",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:58.754076Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " St",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:58.797921Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\",",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:58.842653Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " city",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:58.887035Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "=\"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:58.930907Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "San",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:58.975Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " Francisco",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:59.019589Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\"}}",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:59.064177Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": ")]\n",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:59.109025Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "{\n",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:59.153911Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " ",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:59.197854Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:59.244999Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "order",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:59.291864Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_id",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:59.337792Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\":",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:59.382092Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:59.426921Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ORD",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:59.471944Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "789",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:59.516816Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\",\n",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:59.560907Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " ",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:59.604707Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:59.649026Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "status",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:59.693453Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\":",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:59.738699Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:59.783077Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "processing",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:59.82803Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\",\n",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:59.873239Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " ",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:59.918932Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:58:59.964192Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "data",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:00.009316Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\":",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:00.055147Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " {\n",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:00.100799Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " ",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:00.146772Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:00.193478Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "order",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:00.240171Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_id",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:00.287971Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\":",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:00.333459Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " ",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:00.37832Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "1",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:00.423158Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": ",\n",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:00.468091Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " ",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:00.51265Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:00.557925Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "customer",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:00.60244Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_name",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:00.647203Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\":",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:00.692055Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:00.737131Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "John",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:00.781687Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " Doe",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:00.828788Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\",\n",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:00.874402Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " ",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:00.922888Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:00.976299Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "address",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:01.024037Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\":",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:01.071372Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " {\n",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:01.11661Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " ",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:01.161193Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:01.205589Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "street",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:01.252464Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\":",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:01.298844Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:01.34424Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "123",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:01.388967Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " Main",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:01.433822Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " St",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:01.478032Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\",\n",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:01.523181Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " ",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:01.567586Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:01.611862Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "city",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:01.655861Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\":",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:01.699861Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:01.74517Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "San",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:01.789381Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " Francisco",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:01.833655Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\"\n",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:01.878329Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " ",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:01.923823Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " }\n",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:01.968755Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " ",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:02.012573Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " }\n",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:02.056287Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "}",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T22:59:02.100074Z",
+ "done": true,
+ "done_reason": "stop",
+ "total_duration": 4820442250,
+ "load_duration": 79949333,
+ "prompt_eval_count": 866,
+ "prompt_eval_duration": 352139708,
+ "eval_count": 98,
+ "eval_duration": 4387637875,
+ "response": "",
+ "thinking": null,
+ "context": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/1b939935d483.json b/tests/integration/recordings/responses/1b939935d483.json
new file mode 100644
index 000000000..1eed51400
--- /dev/null
+++ b/tests/integration/recordings/responses/1b939935d483.json
@@ -0,0 +1,258 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://localhost:11434/api/generate",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "raw": true,
+ "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant\nYou MUST use one of the provided functions/tools to answer the user query.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nWhat is the boiling point of the liquid polyjuice in celsius?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n[get_boiling_point(liquid_name=\"polyjuice\", celcius=True)]<|eot_id|><|start_header_id|>ipython<|end_header_id|>\n\n-100<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n",
+ "options": {
+ "temperature": 0.0001,
+ "top_p": 0.9
+ },
+ "stream": true
+ },
+ "endpoint": "/api/generate",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:01.957108Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "The",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:01.998746Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " boiling",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:02.040281Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " point",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:02.081567Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " of",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:02.122945Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " poly",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:02.16406Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ju",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:02.205051Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ice",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:02.246393Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " is",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:02.288195Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " -",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:02.331557Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "100",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:02.373397Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\u00b0C",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:02.414856Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": ".",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:02.456059Z",
+ "done": true,
+ "done_reason": "stop",
+ "total_duration": 669686292,
+ "load_duration": 96788459,
+ "prompt_eval_count": 408,
+ "prompt_eval_duration": 72865250,
+ "eval_count": 13,
+ "eval_duration": 499470042,
+ "response": "",
+ "thinking": null,
+ "context": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/21cf30c6181e.json b/tests/integration/recordings/responses/21cf30c6181e.json
new file mode 100644
index 000000000..e982edb47
--- /dev/null
+++ b/tests/integration/recordings/responses/21cf30c6181e.json
@@ -0,0 +1,119 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant Always respond with tool calls no matter what. "
+ },
+ {
+ "role": "user",
+ "content": "Get the boiling point of polyjuice with a tool call."
+ }
+ ],
+ "max_tokens": 0,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": "auto",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "str",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "bool",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-922",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": [
+ {
+ "index": 0,
+ "id": "call_34cofb9p",
+ "function": {
+ "arguments": "{\"celcius\":\"true\",\"liquid_name\":\"polyjuice\"}",
+ "name": "get_boiling_point"
+ },
+ "type": "function"
+ }
+ ]
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759425219,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-922",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "tool_calls",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759425219,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/239f4768f5aa.json b/tests/integration/recordings/responses/239f4768f5aa.json
index ce540db3f..38f483090 100644
--- a/tests/integration/recordings/responses/239f4768f5aa.json
+++ b/tests/integration/recordings/responses/239f4768f5aa.json
@@ -53,14 +53,14 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-433",
+ "id": "chatcmpl-497",
"choices": [
{
"finish_reason": "stop",
"index": 0,
"logprobs": null,
"message": {
- "content": "{\"first_name\": \"Michael\", \"last_name\": \"Jordan\", \"year_of_birth\": 1963}\n\n \t\t\t\t\t\t\t\t\t\t\t \t\t ",
+ "content": "{\"first_name\": \"Michael\", \"last_name\": \"Jordan\", \"year_of_birth\": 1963}",
"refusal": null,
"role": "assistant",
"annotations": null,
@@ -70,15 +70,15 @@
}
}
],
- "created": 1758979490,
+ "created": 1759376618,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": {
- "completion_tokens": 31,
+ "completion_tokens": 26,
"prompt_tokens": 60,
- "total_tokens": 91,
+ "total_tokens": 86,
"completion_tokens_details": null,
"prompt_tokens_details": null
}
diff --git a/tests/integration/recordings/responses/23ad3b9e003e.json b/tests/integration/recordings/responses/23ad3b9e003e.json
new file mode 100644
index 000000000..50c46c5b1
--- /dev/null
+++ b/tests/integration/recordings/responses/23ad3b9e003e.json
@@ -0,0 +1,57 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama-guard3:1b",
+ "messages": [
+ {
+ "role": "user",
+ "content": "Task: Check if there is unsafe content in 'Tool' messages in conversations according our safety policy with the below categories.\n\n\nS1: Violent Crimes.\nS2: Non-Violent Crimes.\nS3: Sex Crimes.\nS4: Child Exploitation.\nS5: Defamation.\nS6: Specialized Advice.\nS7: Privacy.\nS8: Intellectual Property.\nS9: Indiscriminate Weapons.\nS10: Hate.\nS11: Self-Harm.\nS12: Sexual Content.\nS13: Elections.\nS14: Code Interpreter Abuse.\n\n\n\n\nUser: What is the boiling point of the liquid polyjuice in celsius?\n\nAssistant: \n\nTool: Error when running tool: 'ToolCall' object has no attribute 'arguments_json'\n\n\n\nProvide your safety assessment for ONLY THE LAST Tool message in the above conversation:\n - First line must read 'safe' or 'unsafe'.\n - If unsafe, a second line must include a comma-separated list of violated categories."
+ }
+ ],
+ "stream": false,
+ "temperature": 0.0
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama-guard3:1b"
+ },
+ "response": {
+ "body": {
+ "__type__": "openai.types.chat.chat_completion.ChatCompletion",
+ "__data__": {
+ "id": "chatcmpl-651",
+ "choices": [
+ {
+ "finish_reason": "stop",
+ "index": 0,
+ "logprobs": null,
+ "message": {
+ "content": "safe",
+ "refusal": null,
+ "role": "assistant",
+ "annotations": null,
+ "audio": null,
+ "function_call": null,
+ "tool_calls": null
+ }
+ }
+ ],
+ "created": 1759437831,
+ "model": "llama-guard3:1b",
+ "object": "chat.completion",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": {
+ "completion_tokens": 2,
+ "prompt_tokens": 420,
+ "total_tokens": 422,
+ "completion_tokens_details": null,
+ "prompt_tokens_details": null
+ }
+ }
+ },
+ "is_streaming": false
+ }
+}
diff --git a/tests/integration/recordings/responses/2717f0003e0a.json b/tests/integration/recordings/responses/2717f0003e0a.json
index 69d5d7c64..56a9333c6 100644
--- a/tests/integration/recordings/responses/2717f0003e0a.json
+++ b/tests/integration/recordings/responses/2717f0003e0a.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-588",
+ "id": "chatcmpl-531",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759245128,
+ "created": 1759437800,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/278d5568fa92.json b/tests/integration/recordings/responses/278d5568fa92.json
new file mode 100644
index 000000000..85866aefa
--- /dev/null
+++ b/tests/integration/recordings/responses/278d5568fa92.json
@@ -0,0 +1,388 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "Call get_boiling_point tool and answer What is the boiling point of polyjuice?"
+ },
+ {
+ "role": "assistant",
+ "content": "",
+ "tool_calls": [
+ {
+ "id": "call_d1i5ou69",
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "arguments": "{\"celcius\":null,\"liquid_name\":\"polyjuice\"}"
+ }
+ }
+ ]
+ },
+ {
+ "role": "tool",
+ "tool_call_id": "call_d1i5ou69",
+ "content": "-212"
+ }
+ ],
+ "max_tokens": 512,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": "auto",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-704",
+ "choices": [
+ {
+ "delta": {
+ "content": "The",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441676,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-704",
+ "choices": [
+ {
+ "delta": {
+ "content": " boiling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441676,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-704",
+ "choices": [
+ {
+ "delta": {
+ "content": " point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441676,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-704",
+ "choices": [
+ {
+ "delta": {
+ "content": " of",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441676,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-704",
+ "choices": [
+ {
+ "delta": {
+ "content": " poly",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441676,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-704",
+ "choices": [
+ {
+ "delta": {
+ "content": "ju",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441676,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-704",
+ "choices": [
+ {
+ "delta": {
+ "content": "ice",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441676,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-704",
+ "choices": [
+ {
+ "delta": {
+ "content": " is",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441676,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-704",
+ "choices": [
+ {
+ "delta": {
+ "content": " -",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441676,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-704",
+ "choices": [
+ {
+ "delta": {
+ "content": "212",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441676,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-704",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441676,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-704",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "stop",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441676,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/2d187a11704c.json b/tests/integration/recordings/responses/2d187a11704c.json
index ecce0ec80..0c12271fd 100644
--- a/tests/integration/recordings/responses/2d187a11704c.json
+++ b/tests/integration/recordings/responses/2d187a11704c.json
@@ -22,7 +22,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:11.444139198Z",
+ "created_at": "2025-10-02T02:55:03.175181Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -40,7 +40,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:11.631417419Z",
+ "created_at": "2025-10-02T02:55:03.21666Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -58,7 +58,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:11.837785952Z",
+ "created_at": "2025-10-02T02:55:03.258841Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -76,7 +76,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:12.035361735Z",
+ "created_at": "2025-10-02T02:55:03.299188Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -94,7 +94,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:12.231459021Z",
+ "created_at": "2025-10-02T02:55:03.339415Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -112,7 +112,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:12.437587336Z",
+ "created_at": "2025-10-02T02:55:03.379794Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -130,7 +130,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:12.645814233Z",
+ "created_at": "2025-10-02T02:55:03.420354Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -148,7 +148,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:12.857399802Z",
+ "created_at": "2025-10-02T02:55:03.460933Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -166,7 +166,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:13.069748955Z",
+ "created_at": "2025-10-02T02:55:03.501777Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -184,7 +184,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:13.275446646Z",
+ "created_at": "2025-10-02T02:55:03.542402Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -202,7 +202,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:13.472121232Z",
+ "created_at": "2025-10-02T02:55:03.582816Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -220,7 +220,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:13.665744046Z",
+ "created_at": "2025-10-02T02:55:03.623108Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -238,7 +238,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:13.861581737Z",
+ "created_at": "2025-10-02T02:55:03.663532Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -256,7 +256,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:14.057543582Z",
+ "created_at": "2025-10-02T02:55:03.704651Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -274,7 +274,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:14.250235864Z",
+ "created_at": "2025-10-02T02:55:03.746321Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -292,7 +292,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:14.440950519Z",
+ "created_at": "2025-10-02T02:55:03.787213Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -310,7 +310,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:14.633159237Z",
+ "created_at": "2025-10-02T02:55:03.829153Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -328,7 +328,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:14.824645544Z",
+ "created_at": "2025-10-02T02:55:03.869545Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -346,7 +346,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:15.015421713Z",
+ "created_at": "2025-10-02T02:55:03.909839Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -364,7 +364,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:15.21010827Z",
+ "created_at": "2025-10-02T02:55:03.950296Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -382,7 +382,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:15.406911964Z",
+ "created_at": "2025-10-02T02:55:03.990725Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -400,7 +400,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:15.599086606Z",
+ "created_at": "2025-10-02T02:55:04.031037Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -418,7 +418,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:15.789596143Z",
+ "created_at": "2025-10-02T02:55:04.071398Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -436,7 +436,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:15.981551476Z",
+ "created_at": "2025-10-02T02:55:04.111908Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -454,7 +454,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:16.170823008Z",
+ "created_at": "2025-10-02T02:55:04.153461Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -472,7 +472,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:16.361099362Z",
+ "created_at": "2025-10-02T02:55:04.195941Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -490,7 +490,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:16.554187248Z",
+ "created_at": "2025-10-02T02:55:04.236433Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -508,7 +508,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:16.746364193Z",
+ "created_at": "2025-10-02T02:55:04.27718Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -526,7 +526,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:16.937784556Z",
+ "created_at": "2025-10-02T02:55:04.317743Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -544,7 +544,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:17.130739694Z",
+ "created_at": "2025-10-02T02:55:04.358602Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -562,7 +562,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:17.324485154Z",
+ "created_at": "2025-10-02T02:55:04.399212Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -580,7 +580,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:17.513221988Z",
+ "created_at": "2025-10-02T02:55:04.439733Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -598,7 +598,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:17.704588587Z",
+ "created_at": "2025-10-02T02:55:04.480639Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -616,7 +616,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:17.89491876Z",
+ "created_at": "2025-10-02T02:55:04.521251Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -634,7 +634,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:18.085415685Z",
+ "created_at": "2025-10-02T02:55:04.56195Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -652,7 +652,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:18.291123534Z",
+ "created_at": "2025-10-02T02:55:04.60257Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -670,7 +670,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:18.481091772Z",
+ "created_at": "2025-10-02T02:55:04.643071Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -688,7 +688,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:18.669330853Z",
+ "created_at": "2025-10-02T02:55:04.684195Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -706,7 +706,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:18.862203802Z",
+ "created_at": "2025-10-02T02:55:04.725008Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -724,7 +724,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:19.050586441Z",
+ "created_at": "2025-10-02T02:55:04.766299Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -742,7 +742,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:19.243400941Z",
+ "created_at": "2025-10-02T02:55:04.807076Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -760,7 +760,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:19.438492404Z",
+ "created_at": "2025-10-02T02:55:04.848963Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -778,7 +778,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:19.625091169Z",
+ "created_at": "2025-10-02T02:55:04.889928Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -796,7 +796,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:19.817882725Z",
+ "created_at": "2025-10-02T02:55:04.934326Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -814,7 +814,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:20.006228518Z",
+ "created_at": "2025-10-02T02:55:04.977276Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -832,7 +832,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:20.195451511Z",
+ "created_at": "2025-10-02T02:55:05.020601Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -850,7 +850,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:20.38583856Z",
+ "created_at": "2025-10-02T02:55:05.063018Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -868,7 +868,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:20.574736342Z",
+ "created_at": "2025-10-02T02:55:05.104224Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -886,7 +886,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:20.770260046Z",
+ "created_at": "2025-10-02T02:55:05.144777Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -904,7 +904,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:20.961391185Z",
+ "created_at": "2025-10-02T02:55:05.184974Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -922,7 +922,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:21.15136915Z",
+ "created_at": "2025-10-02T02:55:05.225424Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -940,7 +940,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:21.34012064Z",
+ "created_at": "2025-10-02T02:55:05.2659Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -958,7 +958,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:21.530394237Z",
+ "created_at": "2025-10-02T02:55:05.306482Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -976,7 +976,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:21.721043618Z",
+ "created_at": "2025-10-02T02:55:05.346838Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -994,7 +994,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:21.911611623Z",
+ "created_at": "2025-10-02T02:55:05.387059Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1012,7 +1012,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:22.100940877Z",
+ "created_at": "2025-10-02T02:55:05.427541Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1030,7 +1030,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:22.289910353Z",
+ "created_at": "2025-10-02T02:55:05.467788Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1048,7 +1048,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:22.476827205Z",
+ "created_at": "2025-10-02T02:55:05.508102Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1066,7 +1066,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:22.663529325Z",
+ "created_at": "2025-10-02T02:55:05.548521Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1084,7 +1084,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:22.851128482Z",
+ "created_at": "2025-10-02T02:55:05.588742Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1102,7 +1102,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:23.042424694Z",
+ "created_at": "2025-10-02T02:55:05.629266Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1120,7 +1120,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:23.234415016Z",
+ "created_at": "2025-10-02T02:55:05.674214Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1138,7 +1138,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:23.422767727Z",
+ "created_at": "2025-10-02T02:55:05.71804Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1156,7 +1156,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:23.611953916Z",
+ "created_at": "2025-10-02T02:55:05.761666Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1174,7 +1174,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:23.802138602Z",
+ "created_at": "2025-10-02T02:55:05.80432Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1192,7 +1192,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:23.993446989Z",
+ "created_at": "2025-10-02T02:55:05.846217Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1210,7 +1210,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:24.186705934Z",
+ "created_at": "2025-10-02T02:55:05.88931Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1228,7 +1228,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:24.39236955Z",
+ "created_at": "2025-10-02T02:55:05.93282Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1246,7 +1246,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:24.579916625Z",
+ "created_at": "2025-10-02T02:55:05.976513Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1264,7 +1264,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:24.768821839Z",
+ "created_at": "2025-10-02T02:55:06.020886Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1282,7 +1282,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:24.957792215Z",
+ "created_at": "2025-10-02T02:55:06.063597Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1300,7 +1300,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:25.147895529Z",
+ "created_at": "2025-10-02T02:55:06.106054Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1318,7 +1318,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:25.337348777Z",
+ "created_at": "2025-10-02T02:55:06.148232Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1336,7 +1336,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:25.528043056Z",
+ "created_at": "2025-10-02T02:55:06.190334Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1354,7 +1354,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:25.720598024Z",
+ "created_at": "2025-10-02T02:55:06.231933Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1372,7 +1372,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:25.908813849Z",
+ "created_at": "2025-10-02T02:55:06.27373Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1390,7 +1390,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:26.102538985Z",
+ "created_at": "2025-10-02T02:55:06.315435Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1408,7 +1408,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:26.296587284Z",
+ "created_at": "2025-10-02T02:55:06.35848Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1426,7 +1426,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:26.48997969Z",
+ "created_at": "2025-10-02T02:55:06.400959Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1444,7 +1444,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:26.68461717Z",
+ "created_at": "2025-10-02T02:55:06.441214Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1462,7 +1462,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:26.877976002Z",
+ "created_at": "2025-10-02T02:55:06.481409Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1480,7 +1480,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:27.071304424Z",
+ "created_at": "2025-10-02T02:55:06.522518Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1498,7 +1498,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:27.267083009Z",
+ "created_at": "2025-10-02T02:55:06.564666Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1516,7 +1516,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:27.458752902Z",
+ "created_at": "2025-10-02T02:55:06.605895Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1534,7 +1534,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:27.651757232Z",
+ "created_at": "2025-10-02T02:55:06.646978Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1552,7 +1552,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:27.84093711Z",
+ "created_at": "2025-10-02T02:55:06.68904Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1570,7 +1570,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:28.031166547Z",
+ "created_at": "2025-10-02T02:55:06.730173Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1588,7 +1588,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:28.222014814Z",
+ "created_at": "2025-10-02T02:55:06.772861Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1606,7 +1606,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:28.412024854Z",
+ "created_at": "2025-10-02T02:55:06.816599Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1624,7 +1624,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:28.603242201Z",
+ "created_at": "2025-10-02T02:55:06.859503Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1642,7 +1642,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:28.793015428Z",
+ "created_at": "2025-10-02T02:55:06.901146Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1660,7 +1660,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:28.98105341Z",
+ "created_at": "2025-10-02T02:55:06.943698Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1678,7 +1678,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:29.171562052Z",
+ "created_at": "2025-10-02T02:55:06.985619Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1696,7 +1696,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:29.359960218Z",
+ "created_at": "2025-10-02T02:55:07.027092Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1714,7 +1714,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:29.547663965Z",
+ "created_at": "2025-10-02T02:55:07.068654Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1732,7 +1732,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:29.737967784Z",
+ "created_at": "2025-10-02T02:55:07.109785Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1750,7 +1750,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:29.926196503Z",
+ "created_at": "2025-10-02T02:55:07.151491Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1768,7 +1768,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:30.117904197Z",
+ "created_at": "2025-10-02T02:55:07.192762Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1786,7 +1786,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:30.309146475Z",
+ "created_at": "2025-10-02T02:55:07.2337Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -1804,15 +1804,15 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:35:30.497677975Z",
+ "created_at": "2025-10-02T02:55:07.276074Z",
"done": true,
"done_reason": "stop",
- "total_duration": 21228194411,
- "load_duration": 46730034,
+ "total_duration": 4260353875,
+ "load_duration": 95584041,
"prompt_eval_count": 36,
- "prompt_eval_duration": 2125755306,
+ "prompt_eval_duration": 62641958,
"eval_count": 100,
- "eval_duration": 19055134812,
+ "eval_duration": 4101499250,
"response": "",
"thinking": null,
"context": null
diff --git a/tests/integration/recordings/responses/325a72db5755.json b/tests/integration/recordings/responses/325a72db5755.json
index ca3eea2f3..1341efc51 100644
--- a/tests/integration/recordings/responses/325a72db5755.json
+++ b/tests/integration/recordings/responses/325a72db5755.json
@@ -21,7 +21,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-923",
+ "id": "chatcmpl-735",
"choices": [
{
"delta": {
@@ -36,7 +36,7 @@
"logprobs": null
}
],
- "created": 1756921364,
+ "created": 1759437883,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -47,7 +47,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-923",
+ "id": "chatcmpl-735",
"choices": [
{
"delta": {
@@ -62,7 +62,7 @@
"logprobs": null
}
],
- "created": 1756921364,
+ "created": 1759437883,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -73,7 +73,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-923",
+ "id": "chatcmpl-735",
"choices": [
{
"delta": {
@@ -88,7 +88,7 @@
"logprobs": null
}
],
- "created": 1756921364,
+ "created": 1759437883,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -99,7 +99,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-923",
+ "id": "chatcmpl-735",
"choices": [
{
"delta": {
@@ -114,7 +114,7 @@
"logprobs": null
}
],
- "created": 1756921364,
+ "created": 1759437883,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -125,7 +125,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-923",
+ "id": "chatcmpl-735",
"choices": [
{
"delta": {
@@ -140,7 +140,7 @@
"logprobs": null
}
],
- "created": 1756921364,
+ "created": 1759437883,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -151,7 +151,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-923",
+ "id": "chatcmpl-735",
"choices": [
{
"delta": {
@@ -166,7 +166,7 @@
"logprobs": null
}
],
- "created": 1756921364,
+ "created": 1759437883,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -177,7 +177,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-923",
+ "id": "chatcmpl-735",
"choices": [
{
"delta": {
@@ -192,7 +192,7 @@
"logprobs": null
}
],
- "created": 1756921364,
+ "created": 1759437883,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -203,7 +203,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-923",
+ "id": "chatcmpl-735",
"choices": [
{
"delta": {
@@ -218,7 +218,7 @@
"logprobs": null
}
],
- "created": 1756921364,
+ "created": 1759437883,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -229,7 +229,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-923",
+ "id": "chatcmpl-735",
"choices": [
{
"delta": {
@@ -244,7 +244,7 @@
"logprobs": null
}
],
- "created": 1756921364,
+ "created": 1759437883,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -255,7 +255,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-923",
+ "id": "chatcmpl-735",
"choices": [
{
"delta": {
@@ -270,7 +270,7 @@
"logprobs": null
}
],
- "created": 1756921364,
+ "created": 1759437883,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -281,7 +281,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-923",
+ "id": "chatcmpl-735",
"choices": [
{
"delta": {
@@ -296,7 +296,7 @@
"logprobs": null
}
],
- "created": 1756921364,
+ "created": 1759437883,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -307,7 +307,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-923",
+ "id": "chatcmpl-735",
"choices": [
{
"delta": {
@@ -322,7 +322,7 @@
"logprobs": null
}
],
- "created": 1756921364,
+ "created": 1759437883,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -333,7 +333,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-923",
+ "id": "chatcmpl-735",
"choices": [
{
"delta": {
@@ -348,7 +348,7 @@
"logprobs": null
}
],
- "created": 1756921364,
+ "created": 1759437883,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -359,7 +359,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-923",
+ "id": "chatcmpl-735",
"choices": [
{
"delta": {
@@ -374,7 +374,7 @@
"logprobs": null
}
],
- "created": 1756921364,
+ "created": 1759437883,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -385,7 +385,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-923",
+ "id": "chatcmpl-735",
"choices": [
{
"delta": {
@@ -400,7 +400,7 @@
"logprobs": null
}
],
- "created": 1756921364,
+ "created": 1759437883,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -411,7 +411,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-923",
+ "id": "chatcmpl-735",
"choices": [
{
"delta": {
@@ -426,7 +426,7 @@
"logprobs": null
}
],
- "created": 1756921365,
+ "created": 1759437883,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -437,7 +437,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-923",
+ "id": "chatcmpl-735",
"choices": [
{
"delta": {
@@ -452,7 +452,7 @@
"logprobs": null
}
],
- "created": 1756921365,
+ "created": 1759437883,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -463,7 +463,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-923",
+ "id": "chatcmpl-735",
"choices": [
{
"delta": {
@@ -478,7 +478,7 @@
"logprobs": null
}
],
- "created": 1756921365,
+ "created": 1759437883,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -489,7 +489,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-923",
+ "id": "chatcmpl-735",
"choices": [
{
"delta": {
@@ -504,7 +504,7 @@
"logprobs": null
}
],
- "created": 1756921365,
+ "created": 1759437884,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -515,683 +515,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": " It",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921365,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": "'s",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921365,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": " a",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921365,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": " federally",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921365,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": " owned",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921365,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": " district",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921365,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": " that",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921365,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": " serves",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921365,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": " as",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921365,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": " the",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921365,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": " seat",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921365,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": " of",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921365,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": " the",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921365,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": " federal",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921365,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": " government",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921365,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": ",",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921365,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": " housing",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921365,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": " many",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921365,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": " national",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921365,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": " landmarks",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921365,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": ",",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921365,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": " institutions",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921366,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": ",",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921366,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": " and",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921366,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": " offices",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921366,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
- "choices": [
- {
- "delta": {
- "content": ".",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921366,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-923",
+ "id": "chatcmpl-735",
"choices": [
{
"delta": {
@@ -1206,7 +530,7 @@
"logprobs": null
}
],
- "created": 1756921366,
+ "created": 1759437884,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/3387f56ccac9.json b/tests/integration/recordings/responses/3387f56ccac9.json
index 9b8ba7d4e..14891a91b 100644
--- a/tests/integration/recordings/responses/3387f56ccac9.json
+++ b/tests/integration/recordings/responses/3387f56ccac9.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-200",
+ "id": "chatcmpl-141",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759368386,
+ "created": 1759441670,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/35a5f1de4bd7.json b/tests/integration/recordings/responses/35a5f1de4bd7.json
new file mode 100644
index 000000000..960cb2d4e
--- /dev/null
+++ b/tests/integration/recordings/responses/35a5f1de4bd7.json
@@ -0,0 +1,809 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "What is the boiling point of the liquid polyjuice in celsius?"
+ },
+ {
+ "role": "assistant",
+ "content": "",
+ "tool_calls": [
+ {
+ "id": "call_tipirynt",
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}"
+ }
+ }
+ ]
+ },
+ {
+ "role": "tool",
+ "tool_call_id": "call_tipirynt",
+ "content": "Error when running tool: 'ToolCall' object has no attribute 'arguments_json'"
+ }
+ ],
+ "max_tokens": 0,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point"
+ }
+ },
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": "I",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429355,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": " was",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429355,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": " unable",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429355,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": " to",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429355,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": " find",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429355,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": " the",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429355,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": " boiling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429355,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": " point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429355,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": " of",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429356,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": " liquid",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429356,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": " poly",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429356,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": "ju",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429356,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": "ice",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429356,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": " in",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429356,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": " Celsius",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429356,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429356,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": " The",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429356,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": " boiling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429356,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": " point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429356,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": " could",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429356,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": " not",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429356,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": " be",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429356,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": " located",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429356,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": " in",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429356,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": " my",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429356,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": " database",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429356,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429356,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-932",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "stop",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429356,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/36badd90238f.json b/tests/integration/recordings/responses/36badd90238f.json
new file mode 100644
index 000000000..c3760805b
--- /dev/null
+++ b/tests/integration/recordings/responses/36badd90238f.json
@@ -0,0 +1,366 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://localhost:11434/api/generate",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "raw": true,
+ "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nCall get_boiling_point tool and answer What is the boiling point of polyjuice?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n",
+ "options": {
+ "temperature": 0.0001,
+ "top_p": 0.9
+ },
+ "stream": true
+ },
+ "endpoint": "/api/generate",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:11.266524Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "[",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:11.307779Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "get",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:11.349588Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_bo",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:11.392007Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "iling",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:11.435225Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_point",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:11.47687Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "(",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:11.518854Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "liquid",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:11.560093Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_name",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:11.601376Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "='",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:11.642613Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "poly",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:11.686473Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ju",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:11.728965Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ice",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:11.770498Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "',",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:11.812614Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " cel",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:11.854407Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ci",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:11.896933Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "us",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:11.938059Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "=True",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:11.980332Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": ")]",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:12.021812Z",
+ "done": true,
+ "done_reason": "stop",
+ "total_duration": 900445208,
+ "load_duration": 78206917,
+ "prompt_eval_count": 364,
+ "prompt_eval_duration": 65645917,
+ "eval_count": 19,
+ "eval_duration": 755986375,
+ "response": "",
+ "thinking": null,
+ "context": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/37706c1729ba.json b/tests/integration/recordings/responses/37706c1729ba.json
index 74caaadf1..7bb9784f5 100644
--- a/tests/integration/recordings/responses/37706c1729ba.json
+++ b/tests/integration/recordings/responses/37706c1729ba.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-923",
+ "id": "chatcmpl-905",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759282470,
+ "created": 1759441160,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/378412143edb.json b/tests/integration/recordings/responses/378412143edb.json
new file mode 100644
index 000000000..bbd3517d5
--- /dev/null
+++ b/tests/integration/recordings/responses/378412143edb.json
@@ -0,0 +1,419 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "What is the boiling point of the liquid polyjuice in celsius?"
+ },
+ {
+ "role": "assistant",
+ "content": "",
+ "tool_calls": [
+ {
+ "id": "call_ay3w6qne",
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}"
+ }
+ }
+ ]
+ },
+ {
+ "role": "tool",
+ "tool_call_id": "call_ay3w6qne",
+ "content": "-100"
+ }
+ ],
+ "max_tokens": 0,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point"
+ }
+ },
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-250",
+ "choices": [
+ {
+ "delta": {
+ "content": "The",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428020,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-250",
+ "choices": [
+ {
+ "delta": {
+ "content": " boiling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428020,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-250",
+ "choices": [
+ {
+ "delta": {
+ "content": " point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428020,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-250",
+ "choices": [
+ {
+ "delta": {
+ "content": " of",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428020,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-250",
+ "choices": [
+ {
+ "delta": {
+ "content": " Poly",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428021,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-250",
+ "choices": [
+ {
+ "delta": {
+ "content": "ju",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428021,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-250",
+ "choices": [
+ {
+ "delta": {
+ "content": "ice",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428021,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-250",
+ "choices": [
+ {
+ "delta": {
+ "content": " is",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428021,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-250",
+ "choices": [
+ {
+ "delta": {
+ "content": " -",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428021,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-250",
+ "choices": [
+ {
+ "delta": {
+ "content": "100",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428021,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-250",
+ "choices": [
+ {
+ "delta": {
+ "content": "\u00b0C",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428021,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-250",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428021,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-250",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "stop",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428021,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/38ea441b5f83.json b/tests/integration/recordings/responses/38ea441b5f83.json
index 79886b389..03229846b 100644
--- a/tests/integration/recordings/responses/38ea441b5f83.json
+++ b/tests/integration/recordings/responses/38ea441b5f83.json
@@ -46,7 +46,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-761",
+ "id": "chatcmpl-236",
"choices": [
{
"finish_reason": "tool_calls",
@@ -61,7 +61,7 @@
"function_call": null,
"tool_calls": [
{
- "id": "call_cj8ownwc",
+ "id": "call_u4ydewqv",
"function": {
"arguments": "{\"location\":\"San Francisco, CA\"}",
"name": "get_weather"
@@ -73,15 +73,15 @@
}
}
],
- "created": 1758975113,
+ "created": 1759376610,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": {
- "completion_tokens": 18,
+ "completion_tokens": 20,
"prompt_tokens": 185,
- "total_tokens": 203,
+ "total_tokens": 205,
"completion_tokens_details": null,
"prompt_tokens_details": null
}
diff --git a/tests/integration/recordings/responses/3a4fb206e68a.json b/tests/integration/recordings/responses/3a4fb206e68a.json
new file mode 100644
index 000000000..6b180d892
--- /dev/null
+++ b/tests/integration/recordings/responses/3a4fb206e68a.json
@@ -0,0 +1,986 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant Always respond with tool calls no matter what. "
+ },
+ {
+ "role": "user",
+ "content": "Get the boiling point of polyjuice with a tool call."
+ },
+ {
+ "role": "assistant",
+ "content": "",
+ "tool_calls": [
+ {
+ "id": "call_l2ovyvtm",
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "arguments": "{\"celcius\":\"true\",\"liquid_name\":\"polyjuice\"}"
+ }
+ }
+ ]
+ },
+ {
+ "role": "tool",
+ "tool_call_id": "call_l2ovyvtm",
+ "content": "Error when running tool: 'ToolCall' object has no attribute 'arguments_json'"
+ }
+ ],
+ "max_tokens": 0,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": "auto",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": "I",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429343,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": " apologize",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429343,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": " for",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429343,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": " the",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429343,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": " error",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429343,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429343,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": " Here",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429343,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": " is",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429343,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": " the",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429343,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": " revised",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429343,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": " tool",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429344,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": " call",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429344,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": ":\n\n",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429344,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": "{\"",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429344,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": "name",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429344,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": "\":",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429344,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": " \"",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429344,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": "get",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429344,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": "_bo",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429344,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": "iling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429344,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": "_point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429344,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": "\",",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429344,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": " \"",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429344,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": "parameters",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429344,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": "\":",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429344,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": " {\"",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429344,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": "liquid",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429344,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": "_name",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429344,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": "\":",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429344,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": " \"",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429344,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": "poly",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429344,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": "ju",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429344,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": "ice",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429344,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": "\"}}",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429344,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-329",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "stop",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429344,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/3a81146f2afa.json b/tests/integration/recordings/responses/3a81146f2afa.json
index e2d2d52d6..237cc27fe 100644
--- a/tests/integration/recordings/responses/3a81146f2afa.json
+++ b/tests/integration/recordings/responses/3a81146f2afa.json
@@ -18,7 +18,7 @@
{
"__type__": "openai.types.completion.Completion",
"__data__": {
- "id": "cmpl-439",
+ "id": "cmpl-676",
"choices": [
{
"finish_reason": null,
@@ -27,7 +27,7 @@
"text": "Blue"
}
],
- "created": 1757857132,
+ "created": 1759437793,
"model": "llama3.2:3b-instruct-fp16",
"object": "text_completion",
"system_fingerprint": "fp_ollama",
@@ -37,7 +37,7 @@
{
"__type__": "openai.types.completion.Completion",
"__data__": {
- "id": "cmpl-439",
+ "id": "cmpl-676",
"choices": [
{
"finish_reason": null,
@@ -46,7 +46,7 @@
"text": ".\n\n"
}
],
- "created": 1757857132,
+ "created": 1759437793,
"model": "llama3.2:3b-instruct-fp16",
"object": "text_completion",
"system_fingerprint": "fp_ollama",
@@ -56,7 +56,7 @@
{
"__type__": "openai.types.completion.Completion",
"__data__": {
- "id": "cmpl-439",
+ "id": "cmpl-676",
"choices": [
{
"finish_reason": null,
@@ -65,7 +65,7 @@
"text": "The"
}
],
- "created": 1757857132,
+ "created": 1759437793,
"model": "llama3.2:3b-instruct-fp16",
"object": "text_completion",
"system_fingerprint": "fp_ollama",
@@ -75,16 +75,16 @@
{
"__type__": "openai.types.completion.Completion",
"__data__": {
- "id": "cmpl-439",
+ "id": "cmpl-676",
"choices": [
{
"finish_reason": null,
"index": 0,
"logprobs": null,
- "text": " completed"
+ "text": " classic"
}
],
- "created": 1757857132,
+ "created": 1759437793,
"model": "llama3.2:3b-instruct-fp16",
"object": "text_completion",
"system_fingerprint": "fp_ollama",
@@ -94,16 +94,16 @@
{
"__type__": "openai.types.completion.Completion",
"__data__": {
- "id": "cmpl-439",
+ "id": "cmpl-676",
"choices": [
{
"finish_reason": null,
"index": 0,
"logprobs": null,
- "text": " sentence"
+ "text": " rh"
}
],
- "created": 1757857132,
+ "created": 1759437793,
"model": "llama3.2:3b-instruct-fp16",
"object": "text_completion",
"system_fingerprint": "fp_ollama",
@@ -113,7 +113,83 @@
{
"__type__": "openai.types.completion.Completion",
"__data__": {
- "id": "cmpl-439",
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": "ym"
+ }
+ ],
+ "created": 1759437793,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": "ing"
+ }
+ ],
+ "created": 1759437793,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " couple"
+ }
+ ],
+ "created": 1759437793,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": "t"
+ }
+ ],
+ "created": 1759437793,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
"choices": [
{
"finish_reason": null,
@@ -122,7 +198,7 @@
"text": " is"
}
],
- "created": 1757857132,
+ "created": 1759437793,
"model": "llama3.2:3b-instruct-fp16",
"object": "text_completion",
"system_fingerprint": "fp_ollama",
@@ -132,7 +208,7 @@
{
"__type__": "openai.types.completion.Completion",
"__data__": {
- "id": "cmpl-439",
+ "id": "cmpl-676",
"choices": [
{
"finish_reason": null,
@@ -141,7 +217,7 @@
"text": " a"
}
],
- "created": 1757857132,
+ "created": 1759437793,
"model": "llama3.2:3b-instruct-fp16",
"object": "text_completion",
"system_fingerprint": "fp_ollama",
@@ -151,7 +227,7 @@
{
"__type__": "openai.types.completion.Completion",
"__data__": {
- "id": "cmpl-439",
+ "id": "cmpl-676",
"choices": [
{
"finish_reason": null,
@@ -160,7 +236,7 @@
"text": " well"
}
],
- "created": 1757857132,
+ "created": 1759437793,
"model": "llama3.2:3b-instruct-fp16",
"object": "text_completion",
"system_fingerprint": "fp_ollama",
@@ -170,7 +246,7 @@
{
"__type__": "openai.types.completion.Completion",
"__data__": {
- "id": "cmpl-439",
+ "id": "cmpl-676",
"choices": [
{
"finish_reason": null,
@@ -179,7 +255,7 @@
"text": "-known"
}
],
- "created": 1757857132,
+ "created": 1759437793,
"model": "llama3.2:3b-instruct-fp16",
"object": "text_completion",
"system_fingerprint": "fp_ollama",
@@ -189,7 +265,7 @@
{
"__type__": "openai.types.completion.Completion",
"__data__": {
- "id": "cmpl-439",
+ "id": "cmpl-676",
"choices": [
{
"finish_reason": null,
@@ -198,7 +274,7 @@
"text": " phrase"
}
],
- "created": 1757857132,
+ "created": 1759437793,
"model": "llama3.2:3b-instruct-fp16",
"object": "text_completion",
"system_fingerprint": "fp_ollama",
@@ -208,16 +284,16 @@
{
"__type__": "openai.types.completion.Completion",
"__data__": {
- "id": "cmpl-439",
+ "id": "cmpl-676",
"choices": [
{
"finish_reason": null,
"index": 0,
"logprobs": null,
- "text": " from"
+ "text": " that"
}
],
- "created": 1757857132,
+ "created": 1759437793,
"model": "llama3.2:3b-instruct-fp16",
"object": "text_completion",
"system_fingerprint": "fp_ollama",
@@ -227,16 +303,16 @@
{
"__type__": "openai.types.completion.Completion",
"__data__": {
- "id": "cmpl-439",
+ "id": "cmpl-676",
"choices": [
{
"finish_reason": null,
"index": 0,
"logprobs": null,
- "text": " a"
+ "text": " completes"
}
],
- "created": 1757857132,
+ "created": 1759437794,
"model": "llama3.2:3b-instruct-fp16",
"object": "text_completion",
"system_fingerprint": "fp_ollama",
@@ -246,653 +322,7 @@
{
"__type__": "openai.types.completion.Completion",
"__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": " traditional"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": " English"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": " poem"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": ":\n\n"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": "\""
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": "R"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": "oses"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": " are"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": " red"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": ","
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": " v"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": "io"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": "lets"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": " are"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": " blue"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": ",\n"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": "Sugar"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": " is"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": " sweet"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": ","
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": " and"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": " so"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": " are"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": " you"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": ".\""
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": " However"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": ","
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": " in"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": " many"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": " variations"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": " of"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": " this"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": " poem"
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
- "choices": [
- {
- "finish_reason": null,
- "index": 0,
- "logprobs": null,
- "text": ","
- }
- ],
- "created": 1757857132,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "text_completion",
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.completion.Completion",
- "__data__": {
- "id": "cmpl-439",
+ "id": "cmpl-676",
"choices": [
{
"finish_reason": null,
@@ -901,7 +331,7 @@
"text": " the"
}
],
- "created": 1757857132,
+ "created": 1759437794,
"model": "llama3.2:3b-instruct-fp16",
"object": "text_completion",
"system_fingerprint": "fp_ollama",
@@ -911,16 +341,16 @@
{
"__type__": "openai.types.completion.Completion",
"__data__": {
- "id": "cmpl-439",
+ "id": "cmpl-676",
"choices": [
{
"finish_reason": null,
"index": 0,
"logprobs": null,
- "text": " line"
+ "text": " poem"
}
],
- "created": 1757857132,
+ "created": 1759437794,
"model": "llama3.2:3b-instruct-fp16",
"object": "text_completion",
"system_fingerprint": "fp_ollama",
@@ -930,7 +360,64 @@
{
"__type__": "openai.types.completion.Completion",
"__data__": {
- "id": "cmpl-439",
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " with"
+ }
+ ],
+ "created": 1759437794,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " the"
+ }
+ ],
+ "created": 1759437794,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " word"
+ }
+ ],
+ "created": 1759437794,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
"choices": [
{
"finish_reason": null,
@@ -939,7 +426,7 @@
"text": " \""
}
],
- "created": 1757857132,
+ "created": 1759437794,
"model": "llama3.2:3b-instruct-fp16",
"object": "text_completion",
"system_fingerprint": "fp_ollama",
@@ -949,16 +436,16 @@
{
"__type__": "openai.types.completion.Completion",
"__data__": {
- "id": "cmpl-439",
+ "id": "cmpl-676",
"choices": [
{
"finish_reason": null,
"index": 0,
"logprobs": null,
- "text": "vio"
+ "text": "blue"
}
],
- "created": 1757857132,
+ "created": 1759437794,
"model": "llama3.2:3b-instruct-fp16",
"object": "text_completion",
"system_fingerprint": "fp_ollama",
@@ -968,7 +455,520 @@
{
"__type__": "openai.types.completion.Completion",
"__data__": {
- "id": "cmpl-439",
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": "\","
+ }
+ ],
+ "created": 1759437794,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " creating"
+ }
+ ],
+ "created": 1759437794,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " a"
+ }
+ ],
+ "created": 1759437794,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " rhyme"
+ }
+ ],
+ "created": 1759437794,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " scheme"
+ }
+ ],
+ "created": 1759437794,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " of"
+ }
+ ],
+ "created": 1759437794,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " AABB"
+ }
+ ],
+ "created": 1759437794,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": "."
+ }
+ ],
+ "created": 1759437794,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " This"
+ }
+ ],
+ "created": 1759437794,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " poetic"
+ }
+ ],
+ "created": 1759437794,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " device"
+ }
+ ],
+ "created": 1759437794,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " has"
+ }
+ ],
+ "created": 1759437794,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " been"
+ }
+ ],
+ "created": 1759437794,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " used"
+ }
+ ],
+ "created": 1759437794,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " in"
+ }
+ ],
+ "created": 1759437794,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " various"
+ }
+ ],
+ "created": 1759437794,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " forms"
+ }
+ ],
+ "created": 1759437795,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " and"
+ }
+ ],
+ "created": 1759437795,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " iterations"
+ }
+ ],
+ "created": 1759437795,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " throughout"
+ }
+ ],
+ "created": 1759437795,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " history"
+ }
+ ],
+ "created": 1759437795,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": ","
+ }
+ ],
+ "created": 1759437795,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " often"
+ }
+ ],
+ "created": 1759437795,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " to"
+ }
+ ],
+ "created": 1759437795,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " convey"
+ }
+ ],
+ "created": 1759437795,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " love"
+ }
+ ],
+ "created": 1759437795,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
+ "choices": [
+ {
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null,
+ "text": " and"
+ }
+ ],
+ "created": 1759437795,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "text_completion",
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.completion.Completion",
+ "__data__": {
+ "id": "cmpl-676",
"choices": [
{
"finish_reason": "length",
@@ -977,7 +977,7 @@
"text": ""
}
],
- "created": 1757857132,
+ "created": 1759437795,
"model": "llama3.2:3b-instruct-fp16",
"object": "text_completion",
"system_fingerprint": "fp_ollama",
diff --git a/tests/integration/recordings/responses/3bd4bb58d78a.json b/tests/integration/recordings/responses/3bd4bb58d78a.json
new file mode 100644
index 000000000..ba44a8e3b
--- /dev/null
+++ b/tests/integration/recordings/responses/3bd4bb58d78a.json
@@ -0,0 +1,119 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "What is the boiling point of the liquid polyjuice in celsius?"
+ }
+ ],
+ "max_tokens": 0,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": "required",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "str",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "bool",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-288",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": [
+ {
+ "index": 0,
+ "id": "call_rp5mke0x",
+ "function": {
+ "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}",
+ "name": "get_boiling_point"
+ },
+ "type": "function"
+ }
+ ]
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759425751,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-288",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "tool_calls",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759425751,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/3ca695048bee.json b/tests/integration/recordings/responses/3ca695048bee.json
index b307b2f98..45ca41d28 100644
--- a/tests/integration/recordings/responses/3ca695048bee.json
+++ b/tests/integration/recordings/responses/3ca695048bee.json
@@ -39,32 +39,22 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-3",
+ "id": "chatcmpl-828",
"choices": [
{
"delta": {
- "content": "",
+ "content": "{\"name\":\"get_water\", \"parameters\": {\"city\":\"Tokyo\"}}",
"function_call": null,
"refusal": null,
"role": "assistant",
- "tool_calls": [
- {
- "index": 0,
- "id": "call_3kigugt3",
- "function": {
- "arguments": "{\"city\":\"Tokyo\"}",
- "name": "get_weather"
- },
- "type": "function"
- }
- ]
+ "tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
- "created": 1756921361,
+ "created": 1759437882,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -75,7 +65,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-3",
+ "id": "chatcmpl-828",
"choices": [
{
"delta": {
@@ -85,12 +75,12 @@
"role": "assistant",
"tool_calls": null
},
- "finish_reason": "tool_calls",
+ "finish_reason": "stop",
"index": 0,
"logprobs": null
}
],
- "created": 1756921361,
+ "created": 1759437882,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/3f5871e0805d.json b/tests/integration/recordings/responses/3f5871e0805d.json
new file mode 100644
index 000000000..4c79ce460
--- /dev/null
+++ b/tests/integration/recordings/responses/3f5871e0805d.json
@@ -0,0 +1,85 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "user",
+ "content": "Process this data"
+ }
+ ],
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "process_data",
+ "description": "Process structured data",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "data": {
+ "$ref": "#/$defs/DataObject"
+ }
+ },
+ "$defs": {
+ "DataObject": {
+ "type": "object",
+ "properties": {
+ "values": {
+ "type": "array",
+ "items": {
+ "type": "number"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ ]
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": {
+ "__type__": "openai.types.chat.chat_completion.ChatCompletion",
+ "__data__": {
+ "id": "chatcmpl-798",
+ "choices": [
+ {
+ "finish_reason": "stop",
+ "index": 0,
+ "logprobs": null,
+ "message": {
+ "content": "{\"name\":\"process_data\",\"parameters\":{\"data\":[{\"values\":[2,3]}]\"}}",
+ "refusal": null,
+ "role": "assistant",
+ "annotations": null,
+ "audio": null,
+ "function_call": null,
+ "tool_calls": null
+ }
+ }
+ ],
+ "created": 1759376608,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": {
+ "completion_tokens": 20,
+ "prompt_tokens": 176,
+ "total_tokens": 196,
+ "completion_tokens_details": null,
+ "prompt_tokens_details": null
+ }
+ }
+ },
+ "is_streaming": false
+ }
+}
diff --git a/tests/integration/recordings/responses/3fc7de7e822b.json b/tests/integration/recordings/responses/3fc7de7e822b.json
new file mode 100644
index 000000000..bf97c4158
--- /dev/null
+++ b/tests/integration/recordings/responses/3fc7de7e822b.json
@@ -0,0 +1,119 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "Call get_boiling_point tool and answer What is the boiling point of polyjuice?"
+ }
+ ],
+ "max_tokens": 0,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": "auto",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "str",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "bool",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-54",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": [
+ {
+ "index": 0,
+ "id": "call_xbvaryhe",
+ "function": {
+ "arguments": "{\"celcius\":null,\"liquid_name\":\"polyjuice\"}",
+ "name": "get_boiling_point"
+ },
+ "type": "function"
+ }
+ ]
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759425232,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-54",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "tool_calls",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759425232,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/41ac2702de6c.json b/tests/integration/recordings/responses/41ac2702de6c.json
index 987f16ae1..92c1fc0cd 100644
--- a/tests/integration/recordings/responses/41ac2702de6c.json
+++ b/tests/integration/recordings/responses/41ac2702de6c.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-402",
+ "id": "chatcmpl-682",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759245123,
+ "created": 1759437798,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/4283d7199d9b.json b/tests/integration/recordings/responses/4283d7199d9b.json
new file mode 100644
index 000000000..c09104a8c
--- /dev/null
+++ b/tests/integration/recordings/responses/4283d7199d9b.json
@@ -0,0 +1,366 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://localhost:11434/api/generate",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "raw": true,
+ "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nWhat is the boiling point of the liquid polyjuice in celsius?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n",
+ "options": {
+ "temperature": 0.0001,
+ "top_p": 0.9
+ },
+ "stream": true
+ },
+ "endpoint": "/api/generate",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:54.080011Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "[",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:54.126544Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "get",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:54.169848Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_bo",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:54.21147Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "iling",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:54.254674Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_point",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:54.29727Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "(",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:54.338937Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "liquid",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:54.380865Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_name",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:54.422627Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "='",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:54.463935Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "poly",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:54.505674Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ju",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:54.547072Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ice",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:54.588461Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "',",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:54.629627Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " cel",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:54.67101Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ci",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:54.713398Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "us",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:54.757208Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "=True",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:54.800572Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": ")]",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:54.843458Z",
+ "done": true,
+ "done_reason": "stop",
+ "total_duration": 1585956083,
+ "load_duration": 162121750,
+ "prompt_eval_count": 361,
+ "prompt_eval_duration": 657951625,
+ "eval_count": 19,
+ "eval_duration": 765105333,
+ "response": "",
+ "thinking": null,
+ "context": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/4a32ce3da3ce.json b/tests/integration/recordings/responses/4a32ce3da3ce.json
new file mode 100644
index 000000000..565edee20
--- /dev/null
+++ b/tests/integration/recordings/responses/4a32ce3da3ce.json
@@ -0,0 +1,414 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant Always respond with tool calls no matter what. "
+ },
+ {
+ "role": "user",
+ "content": "Get the boiling point of polyjuice with a tool call."
+ },
+ {
+ "role": "assistant",
+ "content": "",
+ "tool_calls": [
+ {
+ "id": "call_v7gdtg8p",
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "arguments": "{\"celcius\":\"true\",\"liquid_name\":\"polyjuice\"}"
+ }
+ }
+ ]
+ },
+ {
+ "role": "tool",
+ "tool_call_id": "call_v7gdtg8p",
+ "content": "-100"
+ }
+ ],
+ "max_tokens": 512,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": "auto",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-67",
+ "choices": [
+ {
+ "delta": {
+ "content": "The",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441160,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-67",
+ "choices": [
+ {
+ "delta": {
+ "content": " boiling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441160,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-67",
+ "choices": [
+ {
+ "delta": {
+ "content": " point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441160,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-67",
+ "choices": [
+ {
+ "delta": {
+ "content": " of",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441160,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-67",
+ "choices": [
+ {
+ "delta": {
+ "content": " Poly",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441160,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-67",
+ "choices": [
+ {
+ "delta": {
+ "content": "ju",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441160,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-67",
+ "choices": [
+ {
+ "delta": {
+ "content": "ice",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441160,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-67",
+ "choices": [
+ {
+ "delta": {
+ "content": " is",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441160,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-67",
+ "choices": [
+ {
+ "delta": {
+ "content": " -",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441160,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-67",
+ "choices": [
+ {
+ "delta": {
+ "content": "100",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441160,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-67",
+ "choices": [
+ {
+ "delta": {
+ "content": "\u00b0C",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441160,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-67",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441160,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-67",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "stop",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441161,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/4c651211b0e0.json b/tests/integration/recordings/responses/4c651211b0e0.json
index dbed465cf..94ba43163 100644
--- a/tests/integration/recordings/responses/4c651211b0e0.json
+++ b/tests/integration/recordings/responses/4c651211b0e0.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-796",
+ "id": "chatcmpl-216",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759368388,
+ "created": 1759441674,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/4ebcaf6c2aee.json b/tests/integration/recordings/responses/4ebcaf6c2aee.json
index 41dc9ab1a..f57994797 100644
--- a/tests/integration/recordings/responses/4ebcaf6c2aee.json
+++ b/tests/integration/recordings/responses/4ebcaf6c2aee.json
@@ -19,22 +19,390 @@
"data": [
{
"embedding": [
- 0.253706,
- 0.016367152,
- -0.29664654,
- 0.31654558,
- -0.18624601,
- 0.07602756,
- -0.031531323,
- 0.2986085,
- -0.49672848,
- -0.36617878,
- 0.25328273,
- -0.33349335,
- 0.0060151755,
- 0.14081024,
- -0.13757885,
- -0.14679416
+ 0.04635219,
+ 0.002988263,
+ -0.054220885,
+ 0.057812735,
+ -0.0340614,
+ 0.013923248,
+ -0.005755826,
+ 0.054555666,
+ -0.09073176,
+ -0.066910096,
+ 0.046287432,
+ -0.060912322,
+ 0.0010950539,
+ 0.025724398,
+ -0.025169374,
+ -0.026821515,
+ -0.030190151,
+ 0.0019341545,
+ -0.0754819,
+ 0.057380512,
+ 0.020332545,
+ -0.005591279,
+ -0.0022273492,
+ 0.012063173,
+ -0.011033521,
+ -0.03300947,
+ 0.05462081,
+ 0.014426073,
+ 0.024025004,
+ 0.004224287,
+ 0.09837723,
+ 0.08385713,
+ -0.049175426,
+ 0.03877149,
+ 0.08748876,
+ -0.0223024,
+ 0.006552746,
+ -0.0070359865,
+ 0.017893821,
+ 0.015465863,
+ 0.05007282,
+ -0.019349905,
+ 0.064887345,
+ 0.03184605,
+ 0.0034936152,
+ 0.02317752,
+ -0.06297051,
+ 0.044468515,
+ -0.022246253,
+ -0.017976552,
+ 0.040390052,
+ -0.0020998395,
+ -0.05173264,
+ 0.014722753,
+ 0.01640469,
+ -0.06438627,
+ -0.043313596,
+ -0.040564552,
+ 0.044412937,
+ -0.0031199565,
+ -0.007237415,
+ -0.05158015,
+ 0.059660934,
+ -0.014839656,
+ 0.012902056,
+ 0.028181136,
+ -0.019578207,
+ -0.0664231,
+ -0.06333673,
+ 0.028995825,
+ -0.114707075,
+ 0.041575413,
+ -0.022128351,
+ 0.01979776,
+ 0.0630018,
+ 0.011822141,
+ -0.06492722,
+ -0.066328146,
+ 0.021114407,
+ -0.020638306,
+ -0.009599678,
+ 0.013701863,
+ -0.060742326,
+ 0.005395315,
+ 0.026589092,
+ 0.11719033,
+ 0.067120634,
+ 0.008300158,
+ 0.036319703,
+ 0.00772981,
+ 0.071582936,
+ 0.019818509,
+ -0.15945566,
+ 0.047943458,
+ 0.00031571978,
+ -0.04666597,
+ 0.007148715,
+ -0.08839544,
+ 0.038042437,
+ 0.06620088,
+ 0.034336157,
+ -0.035366412,
+ 0.041598067,
+ 0.073756054,
+ -0.018818064,
+ -0.017260034,
+ 0.058635473,
+ -0.01371376,
+ 0.048319146,
+ -0.023727186,
+ 0.024134034,
+ 0.015763162,
+ 0.06681245,
+ 0.01748244,
+ 0.0825409,
+ -0.044568237,
+ 0.0015441044,
+ -0.011225885,
+ 0.0153481,
+ -0.061364066,
+ 0.05792184,
+ 0.044216745,
+ -0.047036964,
+ -0.02634555,
+ -0.033504363,
+ 0.06713578,
+ 0.030866034,
+ 2.024336e-34,
+ -0.03532978,
+ 0.021929236,
+ 0.030160688,
+ 0.09271786,
+ -0.010355268,
+ 0.07196569,
+ 0.052604284,
+ 0.085753724,
+ 0.094942175,
+ 0.053786535,
+ -0.08900509,
+ -0.024382822,
+ -0.008744401,
+ -0.03167582,
+ 0.01025236,
+ 0.1818434,
+ -0.0022662894,
+ 0.118558116,
+ -0.072208576,
+ -0.005867667,
+ 0.0746222,
+ -0.024001855,
+ -0.013938801,
+ -0.030681474,
+ -0.029207803,
+ -0.117624186,
+ -0.046466038,
+ -0.002622228,
+ -0.0902171,
+ -0.038626853,
+ -0.037497964,
+ -0.02418436,
+ -0.069297835,
+ 0.06424038,
+ 0.0045628003,
+ -0.0041498984,
+ -0.01649947,
+ 0.051125433,
+ -0.0058985935,
+ -0.0122523345,
+ -0.047424458,
+ -0.007806876,
+ 0.07906618,
+ 0.03244041,
+ -0.044682544,
+ -0.022625683,
+ 0.028852794,
+ -0.050480433,
+ 0.043801326,
+ -0.023512814,
+ -0.029832385,
+ 0.031089257,
+ 0.07129686,
+ -0.089649536,
+ 0.011963804,
+ -0.018448317,
+ 0.019637493,
+ 0.020081993,
+ 0.0012980831,
+ 0.093201645,
+ -0.064436235,
+ -0.040581323,
+ -0.01193043,
+ 0.043884862,
+ -0.010675756,
+ -0.030739127,
+ 0.005605308,
+ -0.110498495,
+ 0.044510514,
+ 0.037110664,
+ 0.04116233,
+ -0.039460793,
+ -0.04470639,
+ -0.027589805,
+ -0.02073358,
+ -0.067221105,
+ 0.050390884,
+ 0.031397663,
+ -0.008031462,
+ -0.009285899,
+ 0.0013141648,
+ -0.017254544,
+ 0.010367782,
+ -0.05940024,
+ -0.018042587,
+ -0.15487815,
+ 0.0069424273,
+ -0.05208202,
+ 0.0014201442,
+ -0.13956298,
+ -0.040203292,
+ 0.027910054,
+ -0.064872995,
+ -0.016270144,
+ 0.07052549,
+ 5.3188943e-34,
+ 0.012666737,
+ 0.016728623,
+ -0.013163009,
+ 0.06391275,
+ -0.043404065,
+ 0.015435096,
+ 0.03720438,
+ 0.05997576,
+ -0.07789181,
+ -0.0408386,
+ 0.024137221,
+ -0.019834999,
+ -0.034739267,
+ 0.00042199617,
+ 0.048484907,
+ 0.08716056,
+ -0.101133205,
+ -0.07535088,
+ -0.03912376,
+ -0.031597532,
+ -0.052266575,
+ 0.022085808,
+ -0.011040282,
+ 0.005077135,
+ -0.088432744,
+ -0.010477913,
+ 0.047780182,
+ -0.073345095,
+ 0.014382301,
+ 0.038075384,
+ 0.02176859,
+ -0.029071847,
+ -0.036925532,
+ 0.14317243,
+ 0.020646103,
+ -0.08367964,
+ 0.111576855,
+ -0.009943396,
+ 0.023071144,
+ 0.0926832,
+ 0.011242715,
+ 0.068017475,
+ -0.007714686,
+ 0.03060742,
+ -0.011360289,
+ 0.109015204,
+ 0.12930514,
+ -0.07566831,
+ 0.09001269,
+ -0.0090979,
+ 0.0148039665,
+ 0.048663232,
+ 0.08894293,
+ 0.038565516,
+ 0.005821986,
+ 0.016084671,
+ -0.106283545,
+ -0.033372246,
+ 0.05440088,
+ -0.005663873,
+ 0.0011572369,
+ -0.024969472,
+ 0.043092247,
+ -0.009314855,
+ -0.11836073,
+ -0.027310666,
+ 0.009811885,
+ -0.0052975323,
+ -0.044883158,
+ 0.066436425,
+ -0.06750139,
+ -0.02696421,
+ 0.01402391,
+ -0.04950559,
+ -0.084093384,
+ -0.07380851,
+ 0.04709705,
+ 4.9404687e-05,
+ 0.01672617,
+ 0.01849747,
+ 0.027683195,
+ 0.0047972985,
+ 0.0017495222,
+ 0.07066204,
+ -0.022430636,
+ 0.06875498,
+ 0.093927115,
+ 0.11101308,
+ -0.015589739,
+ 0.021178465,
+ 0.033638563,
+ 0.034676168,
+ -0.026882911,
+ -0.010514364,
+ 0.0073013064,
+ -1.2070348e-08,
+ -0.10034882,
+ -0.028641108,
+ -0.061462097,
+ -0.009792086,
+ -0.081652306,
+ -0.011814046,
+ 0.002039501,
+ 0.010384326,
+ 0.01639641,
+ 0.09542911,
+ 0.012538498,
+ -0.03542602,
+ 0.018125113,
+ 0.062750235,
+ 0.0007333235,
+ -0.13612862,
+ -0.049830034,
+ 0.021177148,
+ 0.006589976,
+ 0.007859552,
+ -0.03270378,
+ 0.024738451,
+ -0.02542262,
+ -0.0033008803,
+ 0.030640591,
+ -0.032442387,
+ 0.04598555,
+ 0.03903257,
+ 0.035755396,
+ 0.01686084,
+ 0.13498692,
+ 0.028296864,
+ -0.0035224769,
+ -0.036735818,
+ -0.046355885,
+ 0.057701495,
+ 0.008000554,
+ 0.047822826,
+ 0.04911064,
+ 0.035214324,
+ -0.09817153,
+ 0.0050856513,
+ -0.018094635,
+ -0.04385158,
+ 0.06649695,
+ -0.037648164,
+ -0.006218895,
+ -0.037976924,
+ -0.0036204353,
+ -0.03149386,
+ 0.031777944,
+ -0.011333557,
+ 0.009081317,
+ 0.022486951,
+ 0.032106593,
+ 0.023041077,
+ -0.06739943,
+ 0.06294171,
+ -0.057333894,
+ -0.041295,
+ 0.060841344,
+ 0.03247397,
+ -0.05132725,
+ -0.04992364
],
"index": 0,
"object": "embedding"
diff --git a/tests/integration/recordings/responses/4f00cf740aba.json b/tests/integration/recordings/responses/4f00cf740aba.json
index 85a5e18fb..fb05db569 100644
--- a/tests/integration/recordings/responses/4f00cf740aba.json
+++ b/tests/integration/recordings/responses/4f00cf740aba.json
@@ -18,390 +18,390 @@
"data": [
{
"embedding": [
- -0.038157914,
- 0.03290493,
- -0.0055371798,
- 0.014353213,
- -0.040209096,
- -0.11667767,
- 0.03170551,
- 0.0019347348,
- -0.04254092,
- 0.029190615,
- 0.042559944,
- 0.032130145,
- 0.02983921,
- 0.010979105,
- -0.053759154,
- -0.05030495,
- -0.023470305,
- 0.010730486,
- -0.1377361,
- 0.0039985846,
- 0.029267203,
- 0.066698566,
- -0.015405643,
- 0.04843479,
- -0.0881545,
- -0.012694429,
- 0.041265942,
- 0.04089442,
- -0.05000745,
- -0.05805947,
- 0.048748765,
- 0.06891688,
- 0.058812816,
- 0.008785837,
- -0.016080279,
- 0.08517403,
- -0.07814158,
- -0.077435054,
- 0.020808736,
- 0.016186161,
- 0.032549612,
- -0.05344129,
- -0.062166847,
- -0.0242584,
- 0.007393759,
- 0.024064584,
- 0.0064619263,
- 0.051204458,
- 0.072843835,
- 0.034658417,
- -0.05477693,
- -0.05941287,
- -0.007262739,
- 0.020149412,
- 0.035835978,
- 0.0056162532,
- 0.010803632,
- -0.052724347,
- 0.010110615,
- -0.0087345,
- -0.06285489,
- 0.038390912,
- -0.013975588,
- 0.0734118,
- 0.090072334,
- -0.07995426,
- -0.016420014,
- 0.044813525,
- -0.06888206,
- -0.033037275,
- -0.015467736,
- 0.01130628,
- 0.036483694,
- 0.0663459,
- -0.054344203,
- 0.008723171,
- 0.012078509,
- -0.038129516,
- 0.006938081,
- 0.051155496,
- 0.07745829,
- -0.122897476,
- 0.01635594,
- 0.04956378,
- 0.031677794,
- -0.03963372,
- 0.0016560612,
- 0.0095810415,
- -0.032620687,
- -0.03396473,
- -0.13327733,
- 0.0072318353,
- -0.010225149,
- 0.038535405,
- -0.09343492,
- -0.04173385,
- 0.06996305,
- -0.026312327,
- -0.14973918,
- 0.13443227,
- 0.03750676,
- 0.052842483,
- 0.045053005,
- 0.018721534,
- 0.05443072,
- 0.017290117,
- -0.03255681,
- 0.046160772,
- -0.046711024,
- -0.030576464,
- -0.018258592,
- -0.048711784,
- 0.033041865,
- -0.003856249,
- 0.05003307,
- -0.05821012,
- -0.00994153,
- 0.0106995255,
- -0.04008794,
- -0.0015539092,
- 0.060838487,
- -0.04559896,
- 0.04924722,
- 0.026119638,
- 0.019796783,
- -0.0016312932,
- 0.05955464,
- -6.527786e-33,
- 0.063555494,
- 0.003072545,
- 0.0290068,
- 0.17338625,
- 0.0029474646,
- 0.027745575,
- -0.095103905,
- -0.031165987,
- 0.026719859,
- -0.010799976,
- 0.023851028,
- 0.02375357,
- -0.031152952,
- 0.049497593,
- -0.025005657,
- 0.10176666,
- -0.079190366,
- -0.0032479328,
- 0.042849813,
- 0.09489888,
- -0.066508934,
- 0.00632239,
- 0.022188535,
- 0.06996212,
- -0.007491268,
- -0.001777037,
- 0.027047161,
- -0.07536194,
- 0.11401931,
- 0.008564227,
- -0.02371391,
- -0.046974454,
- 0.0144310715,
- 0.019899534,
- -0.0046927175,
- 0.0013119543,
- -0.03432107,
- -0.054212432,
- -0.09418897,
- -0.028963951,
- -0.018907014,
- 0.045735538,
- 0.04757043,
- -0.003132595,
- -0.033231355,
- -0.013520351,
- 0.051010653,
- 0.03111525,
- 0.015257217,
- 0.054166727,
- -0.085080594,
- 0.013355202,
- -0.04763934,
- 0.07099156,
- -0.01309272,
- -0.0023823304,
- 0.050339438,
- -0.041624993,
- -0.014171974,
- 0.032421313,
- 0.005414455,
- 0.09128853,
- 0.0045168963,
- -0.018196244,
- -0.015225792,
- -0.04635148,
- 0.038764603,
- 0.014739169,
- 0.052030377,
- 0.0017809072,
- -0.014930553,
- 0.027100598,
- 0.031190928,
- 0.02379928,
- -0.0045879,
- 0.03622444,
- 0.066800386,
- -0.0018508516,
- 0.021243243,
- -0.0575494,
- 0.019077979,
- 0.031474162,
- -0.018456634,
- -0.04083116,
- 0.10387791,
- 0.011981423,
- -0.014923204,
- -0.10519511,
- -0.012293124,
- -0.00042049217,
- -0.09506704,
- 0.058275525,
- 0.042611193,
- -0.025061507,
- -0.094545335,
- 4.010606e-33,
- 0.13226718,
- 0.0053517097,
- -0.03314567,
- -0.09099676,
- -0.031551942,
- -0.033939674,
- -0.071981214,
- 0.12595285,
- -0.08333936,
- 0.052855294,
- 0.001036374,
- 0.021973396,
- 0.104020424,
- 0.013031712,
- 0.040921222,
- 0.018695012,
- 0.114233166,
- 0.024822846,
- 0.014595918,
- 0.00621894,
- -0.011220824,
- -0.035742316,
- -0.03801776,
- 0.011226576,
- -0.051305167,
- 0.007892534,
- 0.06734842,
- 0.0033567564,
- -0.09286571,
- 0.03701943,
- -0.022331072,
- 0.040051647,
- -0.030764744,
- -0.011390678,
- -0.014426033,
- 0.024999708,
- -0.09751172,
- -0.03538673,
- -0.03757043,
- -0.010174254,
- -0.06396341,
- 0.025548752,
- 0.020661479,
- 0.03752242,
- -0.10438308,
- -0.028266912,
- -0.052153755,
- 0.012830027,
- -0.05125152,
- -0.029009243,
- -0.09633578,
- -0.042322997,
- 0.06716196,
- -0.030903742,
- -0.010314011,
- 0.027343867,
- -0.028119028,
- 0.010296558,
- 0.043072425,
- 0.022286164,
- 0.007943,
- 0.056093868,
- 0.040728126,
- 0.09295372,
- 0.016456816,
- -0.053744446,
- 0.00047035623,
- 0.050744157,
- 0.04246857,
- -0.029237023,
- 0.009294763,
- -0.010624897,
- -0.037202932,
- 0.00220195,
- -0.030278567,
- 0.07457478,
- 0.0026277148,
- -0.017591486,
- 0.0028708735,
- 0.03840644,
- 0.0072204536,
- 0.045653794,
- 0.039947055,
- 0.014161398,
- -0.014247232,
- 0.058465447,
- 0.036360227,
- 0.055268615,
- -0.02004829,
- -0.08043532,
- -0.030213723,
- -0.0148566915,
- 0.022293866,
- 0.011908896,
- -0.06907556,
- -1.8805048e-08,
- -0.078408636,
- 0.046699222,
- -0.023894435,
- 0.06347232,
- 0.02395583,
- 0.0014103559,
- -0.090737104,
- -0.06684135,
- -0.080118775,
- 0.0054891296,
- 0.05368204,
- 0.10478211,
- -0.066875115,
- 0.015525915,
- 0.06710851,
- 0.07083251,
- -0.03199485,
- 0.020825442,
- -0.021920865,
- -0.0072890157,
- -0.01058703,
- 0.004174248,
- 0.033155944,
- -0.07901077,
- 0.038750935,
- -0.07521113,
- -0.015731987,
- 0.005987591,
- 0.0051212795,
- -0.061557226,
- 0.04203319,
- 0.09544439,
- -0.04317485,
- 0.014446859,
- -0.10614051,
- -0.028011814,
- 0.01101727,
- 0.069552526,
- 0.0669063,
- -0.0747214,
- -0.078444764,
- 0.042728573,
- -0.034634914,
- -0.106056124,
- -0.0357495,
- 0.05155015,
- 0.068699375,
- -0.049968246,
- 0.015420614,
- -0.06460179,
- -0.07601102,
- 0.026022797,
- 0.07440251,
- -0.0124161495,
- 0.1332999,
- 0.07480527,
- 0.051343314,
- 0.02094546,
- -0.026808253,
- 0.08892536,
- 0.03996125,
- -0.041000355,
- 0.03187991,
- 0.018108707
+ -0.038168654,
+ 0.032873917,
+ -0.0055947267,
+ 0.014366432,
+ -0.040310103,
+ -0.116643615,
+ 0.031721067,
+ 0.0019260457,
+ -0.04255802,
+ 0.029198613,
+ 0.04252229,
+ 0.032184314,
+ 0.029838374,
+ 0.010959321,
+ -0.053805783,
+ -0.05028783,
+ -0.023449864,
+ 0.0107550435,
+ -0.13774979,
+ 0.0039929547,
+ 0.029302042,
+ 0.066712305,
+ -0.015410682,
+ 0.048422653,
+ -0.08814465,
+ -0.012715775,
+ 0.041334823,
+ 0.040851083,
+ -0.050064698,
+ -0.05804616,
+ 0.048728727,
+ 0.06888658,
+ 0.058795262,
+ 0.008804153,
+ -0.016073612,
+ 0.08514259,
+ -0.078146815,
+ -0.07741974,
+ 0.020842256,
+ 0.016201088,
+ 0.032518543,
+ -0.05346469,
+ -0.062197812,
+ -0.024271712,
+ 0.007416788,
+ 0.024103774,
+ 0.006469804,
+ 0.051166162,
+ 0.07284196,
+ 0.034627657,
+ -0.05475476,
+ -0.059386417,
+ -0.0071934434,
+ 0.020163197,
+ 0.035816014,
+ 0.0055927313,
+ 0.010762318,
+ -0.05274177,
+ 0.010083032,
+ -0.008742163,
+ -0.06284565,
+ 0.038426206,
+ -0.013933317,
+ 0.07342759,
+ 0.09004579,
+ -0.07995627,
+ -0.016420787,
+ 0.044767782,
+ -0.06886435,
+ -0.03303916,
+ -0.015482072,
+ 0.011322529,
+ 0.036461752,
+ 0.066346884,
+ -0.05434455,
+ 0.008740993,
+ 0.012066104,
+ -0.038101126,
+ 0.0069316486,
+ 0.051146947,
+ 0.07740579,
+ -0.122950904,
+ 0.016380342,
+ 0.049568996,
+ 0.031634904,
+ -0.039637603,
+ 0.0016715266,
+ 0.009577405,
+ -0.032646418,
+ -0.033988595,
+ -0.13329837,
+ 0.0072566303,
+ -0.010266605,
+ 0.038557075,
+ -0.09338859,
+ -0.041706774,
+ 0.069941126,
+ -0.026323376,
+ -0.14971305,
+ 0.13445398,
+ 0.03748492,
+ 0.052825302,
+ 0.0450506,
+ 0.018712776,
+ 0.05444322,
+ 0.017282845,
+ -0.032480195,
+ 0.04614526,
+ -0.046711974,
+ -0.030566413,
+ -0.01820007,
+ -0.04869831,
+ 0.033051647,
+ -0.0038142777,
+ 0.04999665,
+ -0.058270358,
+ -0.010011706,
+ 0.010643473,
+ -0.040113144,
+ -0.0015507729,
+ 0.060854245,
+ -0.045562096,
+ 0.049257778,
+ 0.02612153,
+ 0.01981428,
+ -0.001660993,
+ 0.059509434,
+ -6.525298e-33,
+ 0.063519135,
+ 0.0030875143,
+ 0.028961418,
+ 0.1733713,
+ 0.0029763067,
+ 0.027727291,
+ -0.0951315,
+ -0.031186627,
+ 0.026689058,
+ -0.010807322,
+ 0.023850724,
+ 0.023777472,
+ -0.031174092,
+ 0.049501278,
+ -0.025049716,
+ 0.10175924,
+ -0.07919064,
+ -0.0032249284,
+ 0.042915843,
+ 0.09483459,
+ -0.06652636,
+ 0.006303593,
+ 0.02220902,
+ 0.06999181,
+ -0.0074810013,
+ -0.0017734945,
+ 0.027008688,
+ -0.07534615,
+ 0.114036545,
+ 0.008552313,
+ -0.023737878,
+ -0.04694563,
+ 0.014472103,
+ 0.019855395,
+ -0.0046694353,
+ 0.0013555645,
+ -0.034298304,
+ -0.054142635,
+ -0.09419824,
+ -0.028909719,
+ -0.018876282,
+ 0.0457315,
+ 0.04761082,
+ -0.0030971593,
+ -0.033264168,
+ -0.013539523,
+ 0.051041685,
+ 0.031110944,
+ 0.015244497,
+ 0.054158635,
+ -0.08499706,
+ 0.013360703,
+ -0.04759633,
+ 0.07101136,
+ -0.0131114535,
+ -0.0023818254,
+ 0.050331973,
+ -0.041642286,
+ -0.01419894,
+ 0.032463223,
+ 0.0053973934,
+ 0.091275506,
+ 0.0044798073,
+ -0.018260129,
+ -0.015278888,
+ -0.046306957,
+ 0.038750377,
+ 0.014729783,
+ 0.05204642,
+ 0.0017938613,
+ -0.014963651,
+ 0.027101943,
+ 0.031203475,
+ 0.023725478,
+ -0.004601222,
+ 0.03617344,
+ 0.06679477,
+ -0.0018401983,
+ 0.021265576,
+ -0.057589985,
+ 0.019155758,
+ 0.031437635,
+ -0.018444614,
+ -0.04085069,
+ 0.10393101,
+ 0.011960795,
+ -0.014898805,
+ -0.10520497,
+ -0.012302656,
+ -0.00043837292,
+ -0.09508398,
+ 0.058318105,
+ 0.042576887,
+ -0.025066672,
+ -0.094555676,
+ 4.0072287e-33,
+ 0.1322281,
+ 0.0053512393,
+ -0.03312536,
+ -0.09096454,
+ -0.031562407,
+ -0.033949774,
+ -0.07205118,
+ 0.1259232,
+ -0.08333555,
+ 0.052797858,
+ 0.001077506,
+ 0.022004265,
+ 0.10402767,
+ 0.013034249,
+ 0.04091762,
+ 0.018705815,
+ 0.11424037,
+ 0.024799824,
+ 0.014582492,
+ 0.006205516,
+ -0.011202356,
+ -0.035756435,
+ -0.03800272,
+ 0.011251353,
+ -0.0512988,
+ 0.007890417,
+ 0.06736164,
+ 0.0033359542,
+ -0.09285096,
+ 0.03704081,
+ -0.022326592,
+ 0.039967872,
+ -0.030748183,
+ -0.011446819,
+ -0.014453254,
+ 0.02498229,
+ -0.097532175,
+ -0.035378877,
+ -0.03757795,
+ -0.010181498,
+ -0.06392041,
+ 0.025538994,
+ 0.02061816,
+ 0.03757256,
+ -0.1043548,
+ -0.028326731,
+ -0.05209465,
+ 0.0128473425,
+ -0.051238894,
+ -0.029034877,
+ -0.09633617,
+ -0.042309195,
+ 0.067165054,
+ -0.030870603,
+ -0.010357507,
+ 0.027381465,
+ -0.028105576,
+ 0.010302046,
+ 0.04306986,
+ 0.022315372,
+ 0.007954779,
+ 0.056068663,
+ 0.04071972,
+ 0.09293905,
+ 0.016536433,
+ -0.053764775,
+ 0.00047211433,
+ 0.050708972,
+ 0.042510226,
+ -0.029195962,
+ 0.009274875,
+ -0.010647389,
+ -0.037209682,
+ 0.002267011,
+ -0.030304702,
+ 0.0745741,
+ 0.0026207205,
+ -0.017582772,
+ 0.0028797672,
+ 0.038404796,
+ 0.00723137,
+ 0.045613218,
+ 0.03998252,
+ 0.014209623,
+ -0.0142997475,
+ 0.05850862,
+ 0.03630791,
+ 0.055294298,
+ -0.020075988,
+ -0.08041808,
+ -0.030250112,
+ -0.014920701,
+ 0.022349516,
+ 0.011911506,
+ -0.06903851,
+ -1.8806734e-08,
+ -0.078480355,
+ 0.046674173,
+ -0.023920896,
+ 0.0634942,
+ 0.02396477,
+ 0.0014517035,
+ -0.090798445,
+ -0.06684978,
+ -0.0801405,
+ 0.005503192,
+ 0.053675175,
+ 0.104841895,
+ -0.066848256,
+ 0.015522683,
+ 0.067097165,
+ 0.070832625,
+ -0.03197915,
+ 0.020843629,
+ -0.0219202,
+ -0.0073016756,
+ -0.010645817,
+ 0.0040983153,
+ 0.03313765,
+ -0.0790081,
+ 0.03878132,
+ -0.075230986,
+ -0.015732396,
+ 0.0060099233,
+ 0.0051297406,
+ -0.061492138,
+ 0.04202211,
+ 0.09544608,
+ -0.04318599,
+ 0.014424486,
+ -0.10617826,
+ -0.027963417,
+ 0.011034413,
+ 0.069576606,
+ 0.06689785,
+ -0.07479674,
+ -0.07851099,
+ 0.042766396,
+ -0.034639932,
+ -0.10607304,
+ -0.03577663,
+ 0.051540814,
+ 0.068673156,
+ -0.049959548,
+ 0.015460458,
+ -0.064520314,
+ -0.076010585,
+ 0.026035817,
+ 0.07440218,
+ -0.012396022,
+ 0.13329679,
+ 0.074770845,
+ 0.05134284,
+ 0.020977058,
+ -0.026776016,
+ 0.08894323,
+ 0.039937407,
+ -0.04102053,
+ 0.03194075,
+ 0.018113315
],
"index": 0,
"object": "embedding"
diff --git a/tests/integration/recordings/responses/517505777888.json b/tests/integration/recordings/responses/517505777888.json
index f556ba743..41030cdac 100644
--- a/tests/integration/recordings/responses/517505777888.json
+++ b/tests/integration/recordings/responses/517505777888.json
@@ -18,390 +18,390 @@
"data": [
{
"embedding": [
- 0.019099757,
- -0.020513054,
- -0.07147724,
- -0.02305817,
- -0.06570441,
- -0.0057285326,
- -0.029366547,
- -0.031833924,
- -0.015779832,
- -0.03914512,
- 0.02689602,
- -0.064181775,
- 0.013521624,
- 0.050362427,
- -0.031129995,
- -0.08321027,
- -0.031968866,
- 0.074996136,
- -0.016394366,
- -0.0013953616,
- 0.038505327,
- -0.03440395,
- -0.004868513,
- -0.03093635,
- 0.051909875,
- 0.0091652395,
- 0.0072081746,
- 0.066338904,
- 0.024595087,
- -0.047721148,
- 0.0376462,
- -0.04257363,
- 0.078928985,
- 0.048257265,
- 0.1338569,
- 0.013975464,
- 0.03242688,
- -0.08888101,
- -0.0141724255,
- 0.035531398,
- -0.024727112,
- -0.028608425,
- 0.047635823,
- 0.026230432,
- 0.048455644,
- 0.066589415,
- -0.013602744,
- 0.07181793,
- -0.073052436,
- -0.05030391,
- 0.0039422787,
- 0.033050794,
- -0.047844775,
- -0.017648827,
- 0.010261714,
- -0.105268046,
- -0.010029887,
- 0.014589762,
- -0.05330117,
- 0.0603304,
- -0.10082026,
- 0.0113420375,
- -0.007233272,
- 0.053468946,
- -0.006834623,
- 0.036973044,
- 0.024037901,
- 0.02391513,
- -0.011360713,
- -0.119559266,
- -0.115714155,
- -0.06674816,
- -0.042340416,
- 0.09301382,
- 0.024868665,
- 0.08405043,
- 0.0030069647,
- -0.06605422,
- 0.027435942,
- -0.03239928,
- -0.025572078,
- -0.06587331,
- 0.0678087,
- 0.09763614,
- 0.07363481,
- 0.034110706,
- 0.056513038,
- 0.07671608,
- -0.05176071,
- 0.05367774,
- 0.00541266,
- 0.015987717,
- 0.0035527307,
- 0.063338846,
- -0.015986515,
- 0.052941773,
- 0.11543519,
- 0.05519716,
- 0.037675396,
- 0.08086703,
- 0.035557747,
- -0.07983684,
- -0.012073549,
- -0.076086745,
- -0.06961062,
- -0.017908957,
- 0.1699312,
- -0.0047792625,
- 0.090708405,
- -0.071956836,
- 0.020046378,
- -0.05956393,
- -0.06314912,
- -0.07718947,
- 0.015107324,
- -0.05031658,
- -0.05448986,
- -0.023088248,
- -0.035414543,
- -0.030637579,
- -0.053294946,
- -0.06745031,
- -0.08055133,
- 0.0028445483,
- -0.011376515,
- -0.029895633,
- 0.024240365,
- -1.5095563e-33,
- -0.029858422,
- -0.00030224613,
- 0.0030705915,
- 0.023098653,
- -0.04807201,
- -0.0027389736,
- -0.03748221,
- 0.016176483,
- -0.029994667,
- 0.015707478,
- 0.0096614035,
- -0.039872784,
- -0.029488137,
- 0.03840971,
- -0.0052404203,
- 0.06854292,
- -0.007897781,
- -0.0018805856,
- -0.0352267,
- 0.036267247,
- 0.05868197,
- 0.023763478,
- 0.044439625,
- -0.02601301,
- -0.025314424,
- -0.02679121,
- -0.023682553,
- -0.09437374,
- 0.0016686164,
- 0.0065181926,
- -0.097118795,
- -0.053507585,
- -0.08239408,
- 0.023490923,
- -0.02402227,
- 0.015966628,
- 0.0050696856,
- 0.030458245,
- -0.08839895,
- 0.11425429,
- 0.028386213,
- 0.0298561,
- 0.02285531,
- 0.01873392,
- 0.05632994,
- -0.020208938,
- -0.0006685065,
- -0.08638551,
- 0.020276291,
- -0.0039841584,
- 0.0009751431,
- 0.06544227,
- -0.03650517,
- 0.032318577,
- 0.023104826,
- 0.04446683,
- 0.09645086,
- -0.072731785,
- 0.033722512,
- 0.042799864,
- -0.05276349,
- 0.00033437353,
- 0.061005846,
- -0.019637244,
- -0.02327577,
- -0.1160437,
- 0.007917702,
- -0.12529376,
- 0.017027825,
- 0.013484424,
- -0.030528279,
- -0.024288423,
- 0.006258758,
- -0.015579525,
- -0.07281456,
- 0.012983996,
- 0.01599799,
- 0.0051952074,
- -0.002588768,
- -0.059567206,
- 0.063699834,
- -0.0019145603,
- 0.018687418,
- -0.009282711,
- -0.05884746,
- -0.03251431,
- -0.0095772855,
- -0.047396615,
- 0.020575106,
- -0.0071638324,
- 0.050119117,
- 0.016082546,
- -0.0058797863,
- -0.07660506,
- 0.082072616,
- 1.6049304e-33,
- -0.0056975842,
- 0.06717823,
- -0.01155973,
- 0.055897184,
- -0.08883816,
- -0.03651865,
- 0.12133234,
- 0.028983265,
- 0.022465894,
- 0.047318526,
- 0.07625107,
- -0.07938655,
- 0.0020323857,
- -0.023503296,
- -0.029780442,
- -0.048816763,
- -0.034901213,
- 0.06463424,
- 0.05149456,
- 0.008271398,
- -0.031762894,
- 0.097970895,
- 0.008115042,
- 0.010324485,
- 0.059439637,
- 0.051759075,
- 0.04295602,
- 0.006951762,
- 0.027330121,
- 0.039248228,
- 0.062386345,
- 0.05181691,
- 0.0053548445,
- 0.059656292,
- -0.008941856,
- -0.013595369,
- 0.08731477,
- 0.028409526,
- -0.0068070823,
- 0.052146304,
- 0.04951788,
- 0.055161525,
- -0.016772978,
- 0.07788952,
- 0.02612108,
- 0.031371117,
- 0.011792192,
- -0.034147624,
- 0.052822903,
- 0.0035044928,
- 0.098160714,
- 0.029717103,
- -0.031353023,
- -0.012088347,
- 0.018629983,
- -0.03261934,
- -0.09641058,
- 0.033934057,
- -0.078907624,
- -0.008301054,
- -0.04919879,
- 0.0200944,
- 0.061727397,
- -0.018450737,
- -0.033557754,
- -0.09088319,
- 0.021116594,
- -0.022466624,
- -0.011860241,
- -0.04879352,
- 0.04824181,
- -0.0729504,
- -0.021986347,
- 0.062490568,
- 0.02329735,
- -0.052139174,
- -0.05413272,
- 0.062326364,
- 0.052311692,
- 0.051399846,
- -0.024238104,
- -0.018776463,
- -0.01662191,
- 0.093347155,
- 0.00853553,
- 0.06343568,
- 0.0193722,
- 0.047052696,
- -0.0058736033,
- -0.0034484447,
- 0.079545766,
- 0.102156945,
- 0.015278317,
- 0.040921766,
- 0.038883872,
- -1.2710007e-08,
- -0.019322075,
- -0.12182595,
- -0.04798032,
- -0.05338353,
- -0.113173604,
- 0.05179994,
- -0.104975395,
- -0.08526829,
- 0.0062153414,
- -0.029902961,
- 0.064573385,
- -0.028757203,
- -0.06474069,
- -0.024915313,
- 0.002619679,
- -0.008791377,
- 0.03023946,
- 0.009847454,
- 0.004436367,
- 0.085081235,
- -0.026139142,
- 0.11358947,
- -0.004590704,
- -0.03662597,
- -0.09077296,
- 0.081458576,
- 0.012074041,
- 0.07286008,
- 0.004093267,
- -0.050678167,
- 0.06875128,
- 0.029115168,
- 0.014813955,
- -0.11862927,
- -0.0504244,
- 0.053776395,
- 0.04568957,
- 0.07408053,
- 0.02851353,
- 0.039401993,
- 0.029147856,
- -0.035721682,
- -0.091308504,
- -0.047723882,
- -0.00082008925,
- -0.073683135,
- 0.010977384,
- 0.015688991,
- -0.035924956,
- -0.0811892,
- 0.020371897,
- -0.045275442,
- -0.024963016,
- 0.0011709725,
- 0.00041111733,
- -0.026408581,
- -0.03244672,
- 0.0034135028,
- -0.0070261946,
- 0.024263272,
- 0.07635933,
- 0.03955913,
- 0.036027964,
- -0.07081866
+ 0.019109152,
+ -0.0205217,
+ -0.071471564,
+ -0.023057504,
+ -0.06572786,
+ -0.0057331678,
+ -0.029395059,
+ -0.031822033,
+ -0.015748156,
+ -0.039123703,
+ 0.02694331,
+ -0.0641754,
+ 0.013510709,
+ 0.050364953,
+ -0.03114308,
+ -0.08322274,
+ -0.03192984,
+ 0.074970365,
+ -0.016377378,
+ -0.0013804765,
+ 0.03850419,
+ -0.03441017,
+ -0.0048610102,
+ -0.03094053,
+ 0.051915165,
+ 0.009193639,
+ 0.0071807485,
+ 0.066353165,
+ 0.024559105,
+ -0.04767663,
+ 0.0376255,
+ -0.042586852,
+ 0.078906916,
+ 0.04827334,
+ 0.13389648,
+ 0.013978803,
+ 0.03242126,
+ -0.08890431,
+ -0.014188366,
+ 0.03553346,
+ -0.02476171,
+ -0.028628638,
+ 0.047652308,
+ 0.026259335,
+ 0.048472118,
+ 0.06663718,
+ -0.013584004,
+ 0.071824096,
+ -0.073066786,
+ -0.050326068,
+ 0.0039502876,
+ 0.03300394,
+ -0.047816053,
+ -0.017657546,
+ 0.010284664,
+ -0.10525716,
+ -0.010034394,
+ 0.014627846,
+ -0.053289402,
+ 0.060343288,
+ -0.10079798,
+ 0.011359217,
+ -0.007258805,
+ 0.05346498,
+ -0.0068726647,
+ 0.03697505,
+ 0.024016414,
+ 0.023924585,
+ -0.011357761,
+ -0.119573325,
+ -0.115692526,
+ -0.06673285,
+ -0.04233929,
+ 0.09302018,
+ 0.02486003,
+ 0.084047645,
+ 0.0030104683,
+ -0.06605523,
+ 0.027435688,
+ -0.032412402,
+ -0.025584543,
+ -0.06590182,
+ 0.067799605,
+ 0.0976311,
+ 0.07360619,
+ 0.034108408,
+ 0.056534845,
+ 0.076705806,
+ -0.05179011,
+ 0.053681813,
+ 0.0054462817,
+ 0.015972052,
+ 0.0035656213,
+ 0.06333522,
+ -0.01597322,
+ 0.05295729,
+ 0.11539089,
+ 0.055200845,
+ 0.037667733,
+ 0.08083974,
+ 0.035557732,
+ -0.07982552,
+ -0.012100598,
+ -0.07612801,
+ -0.0695667,
+ -0.017815348,
+ 0.16996554,
+ -0.0048157335,
+ 0.09073964,
+ -0.07196438,
+ 0.020009195,
+ -0.05956153,
+ -0.06312686,
+ -0.07716358,
+ 0.0150949685,
+ -0.050339524,
+ -0.05444592,
+ -0.023078114,
+ -0.035431463,
+ -0.030625492,
+ -0.053284056,
+ -0.06745872,
+ -0.08049862,
+ 0.002800386,
+ -0.0114065055,
+ -0.029938627,
+ 0.024243163,
+ -1.5107368e-33,
+ -0.02984805,
+ -0.00033025863,
+ 0.0030491,
+ 0.023082128,
+ -0.04808977,
+ -0.0027841914,
+ -0.037461873,
+ 0.016201235,
+ -0.02998979,
+ 0.015712254,
+ 0.009664366,
+ -0.03984875,
+ -0.029493092,
+ 0.03837007,
+ -0.005226541,
+ 0.06857773,
+ -0.007891026,
+ -0.0019036188,
+ -0.035219382,
+ 0.03627955,
+ 0.05867878,
+ 0.023777487,
+ 0.044425115,
+ -0.025999734,
+ -0.025318418,
+ -0.02685328,
+ -0.02368557,
+ -0.094386704,
+ 0.0016880591,
+ 0.0065193563,
+ -0.09711005,
+ -0.053493332,
+ -0.08241291,
+ 0.023502836,
+ -0.02407441,
+ 0.015992055,
+ 0.0050546136,
+ 0.030476829,
+ -0.088438906,
+ 0.11427086,
+ 0.028378993,
+ 0.02985018,
+ 0.022821706,
+ 0.018776013,
+ 0.056330692,
+ -0.020254886,
+ -0.00070521404,
+ -0.0864014,
+ 0.020228866,
+ -0.0039839754,
+ 0.0010032665,
+ 0.065425254,
+ -0.036518592,
+ 0.032341316,
+ 0.023112345,
+ 0.044507477,
+ 0.09644409,
+ -0.07272818,
+ 0.03370691,
+ 0.042783204,
+ -0.052776046,
+ 0.0003352446,
+ 0.061005518,
+ -0.019623613,
+ -0.023274273,
+ -0.11602989,
+ 0.007926991,
+ -0.12529127,
+ 0.017030548,
+ 0.013484081,
+ -0.030528491,
+ -0.024298145,
+ 0.006284904,
+ -0.015568167,
+ -0.072781205,
+ 0.012985074,
+ 0.015977127,
+ 0.0051657534,
+ -0.0026022948,
+ -0.059578825,
+ 0.06372584,
+ -0.0019363016,
+ 0.018695941,
+ -0.009242735,
+ -0.05887247,
+ -0.032524884,
+ -0.009591115,
+ -0.047377545,
+ 0.020585002,
+ -0.007134836,
+ 0.050135154,
+ 0.016087264,
+ -0.0058878902,
+ -0.07661024,
+ 0.0820671,
+ 1.6053074e-33,
+ -0.0056476775,
+ 0.06719423,
+ -0.011510322,
+ 0.05586423,
+ -0.08886697,
+ -0.036528286,
+ 0.12134926,
+ 0.028969096,
+ 0.022419011,
+ 0.047327086,
+ 0.07621525,
+ -0.07937209,
+ 0.0020504447,
+ -0.023489932,
+ -0.029759271,
+ -0.04879825,
+ -0.034876924,
+ 0.06461666,
+ 0.051493492,
+ 0.008284975,
+ -0.031793926,
+ 0.098015875,
+ 0.008122038,
+ 0.01032072,
+ 0.059404474,
+ 0.05176487,
+ 0.042960417,
+ 0.0069373515,
+ 0.027306866,
+ 0.039226852,
+ 0.062416088,
+ 0.051797673,
+ 0.0053232666,
+ 0.05965781,
+ -0.008935817,
+ -0.0135501,
+ 0.08726531,
+ 0.028408607,
+ -0.006820522,
+ 0.052098107,
+ 0.049510423,
+ 0.055176627,
+ -0.016774576,
+ 0.077848226,
+ 0.026121203,
+ 0.031311177,
+ 0.011812256,
+ -0.0341528,
+ 0.052825138,
+ 0.003484205,
+ 0.09811821,
+ 0.029693138,
+ -0.031354938,
+ -0.012068096,
+ 0.018686052,
+ -0.032609653,
+ -0.09638639,
+ 0.033928476,
+ -0.07897009,
+ -0.008300913,
+ -0.04915284,
+ 0.02006342,
+ 0.061743837,
+ -0.018412542,
+ -0.033583082,
+ -0.090903476,
+ 0.021116566,
+ -0.022445552,
+ -0.011814237,
+ -0.048816226,
+ 0.048287436,
+ -0.07294675,
+ -0.02198573,
+ 0.062477604,
+ 0.023308119,
+ -0.052141402,
+ -0.05409648,
+ 0.062339973,
+ 0.052301563,
+ 0.051384836,
+ -0.02426406,
+ -0.018824687,
+ -0.01660311,
+ 0.09330242,
+ 0.008502433,
+ 0.063408315,
+ 0.019377569,
+ 0.047027417,
+ -0.0058769877,
+ -0.0034505578,
+ 0.07956527,
+ 0.10210641,
+ 0.015302805,
+ 0.04089992,
+ 0.038895626,
+ -1.2710905e-08,
+ -0.019304764,
+ -0.1217849,
+ -0.047983564,
+ -0.053382736,
+ -0.113197215,
+ 0.05181196,
+ -0.10498226,
+ -0.08524135,
+ 0.0061870585,
+ -0.029899841,
+ 0.064561576,
+ -0.028730206,
+ -0.064735174,
+ -0.024887148,
+ 0.0026119591,
+ -0.008796896,
+ 0.030246036,
+ 0.009807871,
+ 0.0044631795,
+ 0.0851423,
+ -0.026132204,
+ 0.11360852,
+ -0.0045760865,
+ -0.036643907,
+ -0.09078616,
+ 0.081466354,
+ 0.012066122,
+ 0.07288108,
+ 0.004079195,
+ -0.05064171,
+ 0.068772145,
+ 0.029108258,
+ 0.014786602,
+ -0.11868081,
+ -0.05042858,
+ 0.05376578,
+ 0.04570744,
+ 0.074074544,
+ 0.028540619,
+ 0.03937392,
+ 0.0291862,
+ -0.035710927,
+ -0.09132387,
+ -0.047720414,
+ -0.00082342024,
+ -0.073688805,
+ 0.011024812,
+ 0.015703982,
+ -0.03590976,
+ -0.08121826,
+ 0.020365681,
+ -0.045287356,
+ -0.024955628,
+ 0.001167751,
+ 0.00037544646,
+ -0.026392939,
+ -0.032434102,
+ 0.003407464,
+ -0.007060387,
+ 0.024250468,
+ 0.076347135,
+ 0.039537415,
+ 0.036043648,
+ -0.07085338
],
"index": 0,
"object": "embedding"
diff --git a/tests/integration/recordings/responses/559296e84820.json b/tests/integration/recordings/responses/559296e84820.json
index 607767a63..46ebe6848 100644
--- a/tests/integration/recordings/responses/559296e84820.json
+++ b/tests/integration/recordings/responses/559296e84820.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-471",
+ "id": "chatcmpl-275",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759245121,
+ "created": 1759437797,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/55ae40168378.json b/tests/integration/recordings/responses/55ae40168378.json
new file mode 100644
index 000000000..8d8407727
--- /dev/null
+++ b/tests/integration/recordings/responses/55ae40168378.json
@@ -0,0 +1,366 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://localhost:11434/api/generate",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "raw": true,
+ "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant\nYou MUST use one of the provided functions/tools to answer the user query.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nWhat is the boiling point of the liquid polyjuice in celsius?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n",
+ "options": {
+ "temperature": 0.0001,
+ "top_p": 0.9
+ },
+ "stream": true
+ },
+ "endpoint": "/api/generate",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:00.216374Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "[",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:00.257898Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "get",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:00.299052Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_bo",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:00.340155Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "iling",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:00.381269Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_point",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:00.422347Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "(",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:00.463428Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "liquid",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:00.504785Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_name",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:00.548668Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "='",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:00.589697Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "poly",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:00.631027Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ju",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:00.672172Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ice",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:00.713652Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "',",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:00.755751Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " cel",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:00.796948Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ci",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:00.838368Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "us",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:00.879363Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "=True",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:00.920412Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": ")]",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:00.961636Z",
+ "done": true,
+ "done_reason": "stop",
+ "total_duration": 983443875,
+ "load_duration": 129661959,
+ "prompt_eval_count": 377,
+ "prompt_eval_duration": 107132333,
+ "eval_count": 19,
+ "eval_duration": 745847667,
+ "response": "",
+ "thinking": null,
+ "context": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/590d43ed64b8.json b/tests/integration/recordings/responses/590d43ed64b8.json
index 136f240d3..32c2e58e5 100644
--- a/tests/integration/recordings/responses/590d43ed64b8.json
+++ b/tests/integration/recordings/responses/590d43ed64b8.json
@@ -18,390 +18,390 @@
"data": [
{
"embedding": [
- 0.050928835,
- 0.03843035,
- -0.055596404,
- -0.1059845,
- 0.06945118,
- -0.08052125,
- -0.025887776,
- -0.045172054,
- 0.06875915,
- 0.01652947,
- -0.0011730668,
- 0.023417989,
- -0.0033977597,
- 0.06804529,
- -0.022007054,
- -0.014133858,
- 0.12357166,
- -0.06538498,
- -0.08264784,
- 0.042988714,
- -0.039530188,
- 0.05546846,
- -0.008847637,
- 0.020928107,
- 0.016257003,
- 0.0963241,
- -0.022833107,
- 0.09176138,
- 0.06406277,
- -0.062280413,
- 0.010846775,
- 0.07830326,
- 0.08847168,
- -0.008453102,
- -0.075440355,
- 0.048030853,
- 0.0042642253,
- 0.037893716,
- 0.0023323877,
- 0.032253597,
- 0.0047477684,
- -0.07042877,
- -0.0651552,
- 0.061071083,
- 0.021506561,
- 0.10113442,
- -0.07538611,
- -0.0407162,
- -0.0055698017,
- -0.003700082,
- -0.021267522,
- -0.018197505,
- -0.033238053,
- -0.015680185,
- 0.0032980912,
- 0.037441716,
- -0.02103593,
- 0.052548602,
- 0.10207184,
- -0.018667448,
- 0.036124475,
- 0.08958934,
- 0.050691247,
- 0.019807478,
- 0.102209404,
- -0.0590646,
- -0.045566943,
- -0.024122052,
- -0.059902284,
- -0.097920865,
- -0.0020646898,
- 0.032239985,
- 0.048603263,
- 0.080615476,
- 0.022587052,
- 0.0005647973,
- -0.0015346111,
- 0.009996407,
- -0.08974319,
- 0.023848958,
- -0.0152271725,
- -0.020556787,
- 0.085268654,
- -0.080245204,
- -0.0021987888,
- 0.064997524,
- -0.023079548,
- -0.061999504,
- -0.06548528,
- -0.029944805,
- 0.004539428,
- 0.09720334,
- 0.09151462,
- -0.0059590363,
- -0.04822175,
- -0.011798011,
- -0.031697348,
- -0.010327684,
- 0.02968527,
- 0.103371136,
- -0.029089179,
- 0.0055756853,
- -0.030742139,
- -0.011057862,
- -0.03863044,
- -0.015891504,
- 0.00083265523,
- 0.03479572,
- 0.0039244313,
- -0.020057123,
- -0.048189417,
- 0.026513426,
- -0.061180107,
- -0.04695217,
- 0.021450046,
- -0.04841946,
- 0.022005452,
- 0.015729656,
- 0.056378406,
- 0.055330493,
- 0.037143476,
- -0.088711694,
- 0.011780864,
- 0.0064585637,
- -0.020630004,
- -0.05936413,
- 0.012287869,
- -2.4293852e-33,
- 0.06838332,
- -0.053025596,
- 0.011507658,
- 0.06950136,
- 0.01331995,
- 0.0020193695,
- -0.02080692,
- 0.028949803,
- 0.034665402,
- -0.0327198,
- 0.000949148,
- 0.008664251,
- 0.0076103383,
- -0.024554089,
- 0.030275982,
- -0.034142904,
- -0.031511948,
- 0.11051145,
- 0.034964334,
- 0.045093905,
- 0.0004536878,
- 0.0514407,
- 0.015040795,
- -0.008992289,
- 0.023123777,
- 0.051383648,
- -0.004154813,
- 0.0047568153,
- -0.016239677,
- -0.025685828,
- -0.02406427,
- -0.009563573,
- 0.050677244,
- -0.058350526,
- 0.049024463,
- 0.079643525,
- 0.036008406,
- -0.06540527,
- -0.035393585,
- -0.07027483,
- -0.009768918,
- -0.0318898,
- -0.04104297,
- -0.041093245,
- -0.036317065,
- 0.06686649,
- 0.016687784,
- -0.048496265,
- -0.015432587,
- -0.0004885036,
- 0.032693844,
- -0.0108784195,
- 0.016624164,
- -0.057286467,
- 0.008053993,
- 0.008824837,
- -0.061545905,
- -0.0108399745,
- 0.07171203,
- 0.08609233,
- 0.014049224,
- 0.014907912,
- -0.09828269,
- -0.046647478,
- 0.03361861,
- 0.064744,
- -0.007506857,
- 0.025442023,
- 0.04172483,
- -0.033108808,
- -0.01457406,
- 0.024897074,
- 0.04562778,
- -0.042942565,
- -0.040469114,
- -0.06307098,
- -0.02242408,
- 0.010597915,
- -0.03252762,
- -0.03145859,
- 0.00820347,
- 0.021108724,
- 0.009504359,
- -0.08292171,
- -0.02136818,
- 0.008753057,
- 0.06017692,
- -0.062192526,
- 0.0045083114,
- 0.056810796,
- -0.012999816,
- 0.01868933,
- -0.008973792,
- -0.076788835,
- 0.051616713,
- 1.6926322e-33,
- -0.12587416,
- 0.011702123,
- -0.07986232,
- 0.023053063,
- 0.029265704,
- 0.08719514,
- 0.06907015,
- 0.03254812,
- 0.047793373,
- 0.13217501,
- 0.031299006,
- -0.012535935,
- 0.0035618816,
- -0.0163916,
- -0.03853783,
- 0.01597904,
- 0.09169072,
- 0.04756113,
- -0.054968182,
- 0.067977056,
- 0.017965809,
- 0.11863936,
- -0.0693313,
- 0.043811284,
- 0.041538227,
- -0.017813183,
- 0.051730298,
- 0.067949936,
- 0.080519445,
- 0.0053662807,
- 0.088820346,
- -0.036024984,
- -0.077107176,
- -0.09097472,
- -0.09598897,
- -0.09376241,
- -0.06202675,
- 0.06723746,
- -0.00064578716,
- 0.029109621,
- 0.08179942,
- -0.06487821,
- -0.050387383,
- -0.0023782111,
- -0.026097134,
- -0.0076310094,
- 0.011977006,
- -0.08573459,
- 0.041102324,
- 0.024716543,
- -0.022249049,
- -0.11560483,
- 0.0067691505,
- -0.045894623,
- -0.0637051,
- 0.05357708,
- 0.00577345,
- 0.06321221,
- 0.004861166,
- -0.05710446,
- 0.04190449,
- 0.022335436,
- -0.1471083,
- 0.026351552,
- 0.10623104,
- -0.005882123,
- 0.019992633,
- 0.034953646,
- -0.03338853,
- -0.038839623,
- -0.076065235,
- -0.11174125,
- -0.038965553,
- -0.102677576,
- 0.04711777,
- -0.049392425,
- 0.07477134,
- 0.04174287,
- -0.031087497,
- 0.0033754015,
- 0.055780858,
- -0.03184862,
- -0.02541985,
- 0.05011349,
- 0.03596857,
- 0.091428444,
- -0.07583281,
- -0.050592963,
- 0.0074175335,
- -0.0013578966,
- -0.050366234,
- -0.0015045146,
- 0.0054275827,
- 0.07685381,
- 0.014169269,
- -1.8297998e-08,
- 0.029916301,
- -0.057940822,
- -0.06847671,
- 0.026218578,
- -0.0034848938,
- 0.113768935,
- 0.056854554,
- -0.093155205,
- 0.0028038986,
- 0.10895503,
- -0.033018846,
- 0.0050494163,
- -0.043625794,
- -0.048996136,
- 0.0118943965,
- 0.059736334,
- -0.08662527,
- -0.052732464,
- 0.026333557,
- 0.042200398,
- -0.0035924676,
- 0.037994288,
- 0.022570506,
- -0.061503205,
- 0.012634007,
- 0.040854853,
- -0.084876895,
- 0.041194208,
- -0.038179893,
- 0.008360482,
- 0.010148832,
- 0.024984034,
- -0.012506054,
- -0.045101274,
- 0.010266152,
- -0.046285193,
- 0.061415587,
- 0.016212178,
- -0.0011856663,
- 0.0074200486,
- -0.019432405,
- -0.068008475,
- 0.05477893,
- 0.0964552,
- -0.04710964,
- 0.060082186,
- 0.003054353,
- -0.08875195,
- 0.03727946,
- -0.0099389665,
- 0.003561616,
- -0.07834196,
- 0.021697106,
- -0.013061282,
- 0.0725091,
- -0.06500139,
- -0.029938946,
- -0.017758802,
- 0.033857197,
- 0.029207738,
- 0.08792652,
- 0.00846041,
- 0.06444677,
- -0.016519535
+ 0.050927628,
+ 0.038399037,
+ -0.05559374,
+ -0.105984606,
+ 0.06944504,
+ -0.08054001,
+ -0.025946686,
+ -0.045175657,
+ 0.068730615,
+ 0.016510814,
+ -0.0011700827,
+ 0.023414683,
+ -0.0034143464,
+ 0.06804153,
+ -0.021997927,
+ -0.014162646,
+ 0.12356902,
+ -0.06536738,
+ -0.082627006,
+ 0.04300477,
+ -0.039514318,
+ 0.055434275,
+ -0.008866895,
+ 0.020934915,
+ 0.016280092,
+ 0.09630312,
+ -0.022835929,
+ 0.09175565,
+ 0.06409549,
+ -0.06226981,
+ 0.010888244,
+ 0.07833004,
+ 0.08844764,
+ -0.008459277,
+ -0.07542651,
+ 0.04800223,
+ 0.0042286967,
+ 0.037884884,
+ 0.0023502677,
+ 0.032233667,
+ 0.0047689923,
+ -0.070404515,
+ -0.06513966,
+ 0.061046362,
+ 0.021522248,
+ 0.10113185,
+ -0.07537441,
+ -0.04074795,
+ -0.0055522234,
+ -0.0037093374,
+ -0.021283673,
+ -0.018193243,
+ -0.03323253,
+ -0.015658593,
+ 0.0032862085,
+ 0.037399907,
+ -0.021028537,
+ 0.052572608,
+ 0.10211333,
+ -0.018634265,
+ 0.03612266,
+ 0.08958185,
+ 0.050681055,
+ 0.019839589,
+ 0.10220134,
+ -0.059074707,
+ -0.045562137,
+ -0.024107283,
+ -0.059917513,
+ -0.09795064,
+ -0.002078402,
+ 0.032211803,
+ 0.04863422,
+ 0.08062527,
+ 0.022614514,
+ 0.0005379622,
+ -0.0015465368,
+ 0.010018953,
+ -0.089729026,
+ 0.023838207,
+ -0.015227461,
+ -0.020540234,
+ 0.08525423,
+ -0.08025672,
+ -0.002200058,
+ 0.0649954,
+ -0.023069935,
+ -0.06201302,
+ -0.06545048,
+ -0.029986514,
+ 0.0045501734,
+ 0.09718718,
+ 0.09153336,
+ -0.0059684636,
+ -0.048185453,
+ -0.011855243,
+ -0.03170323,
+ -0.010363732,
+ 0.029717747,
+ 0.103405535,
+ -0.029072085,
+ 0.005597891,
+ -0.03075466,
+ -0.011073092,
+ -0.038647823,
+ -0.01590583,
+ 0.0008562756,
+ 0.03479237,
+ 0.0039463183,
+ -0.020063022,
+ -0.048164852,
+ 0.026510539,
+ -0.061183933,
+ -0.046969693,
+ 0.02144617,
+ -0.048452575,
+ 0.02205527,
+ 0.015723849,
+ 0.056344535,
+ 0.055321235,
+ 0.037136998,
+ -0.08872732,
+ 0.011813868,
+ 0.0064246035,
+ -0.020590257,
+ -0.059401207,
+ 0.012338125,
+ -2.4301395e-33,
+ 0.068363585,
+ -0.05303797,
+ 0.011494271,
+ 0.06953355,
+ 0.013304427,
+ 0.0020351785,
+ -0.020783585,
+ 0.028951883,
+ 0.034663863,
+ -0.03274387,
+ 0.00095708756,
+ 0.008672852,
+ 0.007618213,
+ -0.024579093,
+ 0.030253874,
+ -0.034167152,
+ -0.0315152,
+ 0.1105276,
+ 0.03499844,
+ 0.045135163,
+ 0.00044455956,
+ 0.051429555,
+ 0.015050582,
+ -0.009024664,
+ 0.023132037,
+ 0.05141033,
+ -0.00417506,
+ 0.004720958,
+ -0.016197585,
+ -0.025692327,
+ -0.024077175,
+ -0.00953031,
+ 0.05060433,
+ -0.058328744,
+ 0.04903431,
+ 0.07964924,
+ 0.03599398,
+ -0.065374464,
+ -0.035382472,
+ -0.07028972,
+ -0.009750123,
+ -0.031909473,
+ -0.04101604,
+ -0.041144423,
+ -0.036323845,
+ 0.06685511,
+ 0.016679594,
+ -0.048498012,
+ -0.015474575,
+ -0.00048608257,
+ 0.03267068,
+ -0.010890426,
+ 0.016646467,
+ -0.057286758,
+ 0.008073807,
+ 0.008808943,
+ -0.061580453,
+ -0.010815387,
+ 0.0717443,
+ 0.08607838,
+ 0.014073375,
+ 0.014896061,
+ -0.098295614,
+ -0.046653833,
+ 0.033601493,
+ 0.0647405,
+ -0.007525925,
+ 0.025440095,
+ 0.04171436,
+ -0.033113986,
+ -0.014553822,
+ 0.024878975,
+ 0.045614205,
+ -0.042929318,
+ -0.040504646,
+ -0.06304663,
+ -0.022389242,
+ 0.010583584,
+ -0.032525852,
+ -0.03146621,
+ 0.0081922775,
+ 0.021094568,
+ 0.0095269885,
+ -0.08290188,
+ -0.021351986,
+ 0.008777032,
+ 0.060185786,
+ -0.062182017,
+ 0.004518251,
+ 0.05684528,
+ -0.013033095,
+ 0.01867297,
+ -0.008998785,
+ -0.076766245,
+ 0.051622886,
+ 1.6926977e-33,
+ -0.12588808,
+ 0.011676749,
+ -0.079886116,
+ 0.02304184,
+ 0.029238446,
+ 0.08721121,
+ 0.06906221,
+ 0.032533444,
+ 0.047794122,
+ 0.13212898,
+ 0.03129717,
+ -0.0125368,
+ 0.0035920327,
+ -0.016413208,
+ -0.038557872,
+ 0.016005918,
+ 0.09166447,
+ 0.047558285,
+ -0.054981478,
+ 0.06797876,
+ 0.017968502,
+ 0.118666455,
+ -0.069318265,
+ 0.043814093,
+ 0.04150938,
+ -0.017812226,
+ 0.051738504,
+ 0.06795029,
+ 0.080493495,
+ 0.005386888,
+ 0.08878265,
+ -0.036075104,
+ -0.07708273,
+ -0.09101018,
+ -0.09597232,
+ -0.0937606,
+ -0.06200779,
+ 0.06722552,
+ -0.0006647803,
+ 0.029067127,
+ 0.08179574,
+ -0.06488274,
+ -0.050375167,
+ -0.002403243,
+ -0.026110265,
+ -0.007630271,
+ 0.011972527,
+ -0.08573929,
+ 0.04107404,
+ 0.024723932,
+ -0.02222756,
+ -0.11560156,
+ 0.006753066,
+ -0.04589066,
+ -0.06369223,
+ 0.053635046,
+ 0.005769477,
+ 0.06325056,
+ 0.0048679966,
+ -0.057087842,
+ 0.041931894,
+ 0.022344982,
+ -0.14709935,
+ 0.026361033,
+ 0.106274396,
+ -0.0059068515,
+ 0.020035667,
+ 0.034950804,
+ -0.03342695,
+ -0.03884034,
+ -0.076072656,
+ -0.11173452,
+ -0.038953967,
+ -0.10270519,
+ 0.04714134,
+ -0.049391687,
+ 0.074747935,
+ 0.041724026,
+ -0.031083144,
+ 0.0033830043,
+ 0.055804495,
+ -0.031882074,
+ -0.02541756,
+ 0.050101582,
+ 0.035991114,
+ 0.09143438,
+ -0.07581111,
+ -0.050589707,
+ 0.0074097887,
+ -0.0014020415,
+ -0.05036443,
+ -0.0015289022,
+ 0.005471816,
+ 0.07689256,
+ 0.014164922,
+ -1.8297508e-08,
+ 0.029913928,
+ -0.057959806,
+ -0.06846765,
+ 0.026196472,
+ -0.0035178436,
+ 0.11374637,
+ 0.056845777,
+ -0.09315407,
+ 0.0027757618,
+ 0.10895455,
+ -0.033027817,
+ 0.005051668,
+ -0.043633904,
+ -0.048978273,
+ 0.011912417,
+ 0.059747256,
+ -0.08661686,
+ -0.052748058,
+ 0.026321623,
+ 0.042173225,
+ -0.0035451513,
+ 0.03797019,
+ 0.022595786,
+ -0.0614702,
+ 0.01268269,
+ 0.040893063,
+ -0.084825225,
+ 0.041167296,
+ -0.038163006,
+ 0.008364558,
+ 0.01014753,
+ 0.024994388,
+ -0.012504467,
+ -0.045078665,
+ 0.0102669485,
+ -0.046302866,
+ 0.061438397,
+ 0.016235871,
+ -0.0011558776,
+ 0.007455159,
+ -0.019448454,
+ -0.06798961,
+ 0.05472832,
+ 0.09646006,
+ -0.04711737,
+ 0.060088705,
+ 0.0030213061,
+ -0.08877283,
+ 0.037262574,
+ -0.009947699,
+ 0.0035697597,
+ -0.07833652,
+ 0.02169359,
+ -0.013075168,
+ 0.072521746,
+ -0.0649658,
+ -0.029920656,
+ -0.017777385,
+ 0.033904497,
+ 0.02919506,
+ 0.08793891,
+ 0.008437021,
+ 0.064442866,
+ -0.01656208
],
"index": 0,
"object": "embedding"
diff --git a/tests/integration/recordings/responses/5e8bf88b3c20.json b/tests/integration/recordings/responses/5e8bf88b3c20.json
new file mode 100644
index 000000000..c47ffe8fb
--- /dev/null
+++ b/tests/integration/recordings/responses/5e8bf88b3c20.json
@@ -0,0 +1,804 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "What is the boiling point of the liquid polyjuice in celsius?"
+ },
+ {
+ "role": "assistant",
+ "content": "",
+ "tool_calls": [
+ {
+ "id": "call_9wfu7bke",
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}"
+ }
+ }
+ ]
+ },
+ {
+ "role": "tool",
+ "tool_call_id": "call_9wfu7bke",
+ "content": "Error when running tool: 'ToolCall' object has no attribute 'arguments_json'"
+ }
+ ],
+ "max_tokens": 512,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": "required",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": "I",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437824,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": " was",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437824,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": " unable",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437824,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": " to",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437824,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": " find",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437824,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": " the",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437824,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": " boiling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437824,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": " point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437824,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": " of",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437824,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": " liquid",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437824,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": " poly",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437824,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": "ju",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437824,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": "ice",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437824,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": " in",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437824,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": " Celsius",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437824,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437824,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": " The",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437825,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": " boiling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437825,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": " point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437825,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": " could",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437825,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": " not",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437825,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": " be",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437825,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": " located",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437825,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": " in",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437825,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": " my",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437825,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": " database",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437825,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437825,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-988",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "stop",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437825,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/63aa4590a38a.json b/tests/integration/recordings/responses/63aa4590a38a.json
index 9e3b275db..ae20dce36 100644
--- a/tests/integration/recordings/responses/63aa4590a38a.json
+++ b/tests/integration/recordings/responses/63aa4590a38a.json
@@ -19,390 +19,390 @@
"data": [
{
"embedding": [
- 0.043770123,
- 0.021501394,
- -0.081300564,
- 0.010615138,
- -0.07908651,
- -0.03219175,
- 0.13090447,
- 0.042329222,
- -0.11600146,
- -0.07588096,
- 0.041826088,
- -0.080617175,
- 0.038125783,
- -0.01069657,
- 0.01577377,
- -0.04196888,
- 0.043099895,
- -0.033355612,
- 0.013571747,
- -0.0103924,
- 0.015561896,
- -0.03786113,
- -0.050319925,
- -0.02566629,
- -0.047868017,
- -0.08717805,
- 0.01685358,
- -0.03676223,
- 0.0063788705,
- 0.020863743,
- 0.11264443,
- -0.0021451844,
- -0.07911777,
- 0.038758967,
- 0.115321144,
- -0.019753717,
- 0.0067159277,
- -0.02115779,
- -0.0144774495,
- -0.0027154125,
- -0.034384295,
- -0.052576542,
- -0.030578543,
- 0.04745372,
- -0.024294367,
- 0.01091144,
- -0.03947583,
- 0.07183755,
- -0.020715859,
- 0.018965777,
- 0.04292474,
- -0.007755194,
- 0.0025708016,
- -0.058263537,
- 0.0117485095,
- -0.022703577,
- 0.001755438,
- -0.012628832,
- 0.030728007,
- 0.017719304,
- -0.061525322,
- -0.036568273,
- 0.025831668,
- 0.025376469,
- 0.012137967,
- 0.009102949,
- -0.027313529,
- -0.093379095,
- 0.0052120173,
- 0.0074658697,
- -0.07538,
- 0.010161349,
- -0.028439516,
- 0.03026334,
- 0.0036700817,
- -0.022599109,
- -0.037862476,
- -0.08384314,
- -0.0124443015,
- -0.048889726,
- 0.029131662,
- -0.044443335,
- -0.07518736,
- -0.020938978,
- 0.063386515,
- 0.16294138,
- 0.060580015,
- -0.01281573,
- -0.031040885,
- 0.018372353,
- 0.11225789,
- 0.072922915,
- -0.06272038,
- -0.031792488,
- -0.017476005,
- 0.04846264,
- -0.04116229,
- -0.041834168,
- -0.059919056,
- 0.15907861,
- -0.027786179,
- -0.012492541,
- 0.05599519,
- -0.019895995,
- 0.022076221,
- 0.006363836,
- 0.046413723,
- -0.0731325,
- 0.03326452,
- 0.059475966,
- -0.033314705,
- 0.030761855,
- 0.00819013,
- -0.020254606,
- 0.05658313,
- -0.08153619,
- 0.023402533,
- 0.0060753864,
- -0.07993489,
- 0.013990512,
- 0.052254565,
- 0.027170746,
- -0.049271967,
- 0.02814688,
- 0.019500777,
- 0.054206643,
- 0.082691684,
- -1.8817448e-33,
- 0.013630832,
- -0.010863344,
- 0.015899567,
- 0.06938339,
- -0.05113185,
- 0.08995833,
- 0.04450505,
- 0.08101549,
- 0.018903807,
- -0.020960161,
- -0.017933648,
- -0.02174221,
- 0.010988686,
- 0.015100026,
- 0.017031211,
- 0.09433042,
- 0.003454907,
- 0.010199729,
- -0.0446973,
- 0.0018167854,
- 0.015817188,
- -0.06576281,
- -0.004943305,
- 0.004393494,
- -0.019598262,
- -0.092797264,
- -0.025917865,
- 0.04409669,
- 0.054165967,
- -0.007365383,
- -0.021470547,
- -0.03683317,
- -0.091507494,
- 0.08402351,
- -0.01809901,
- 0.0038072586,
- 0.020236026,
- 0.0439697,
- -0.077322714,
- 0.0057473024,
- -0.054513566,
- -0.024854423,
- 0.075270385,
- 0.034554463,
- -0.08118007,
- -0.12208905,
- -0.0052893,
- 0.0078005046,
- 0.05028763,
- 0.015558154,
- -0.056349996,
- 0.0398076,
- 0.012997719,
- -0.040145177,
- 0.014409028,
- -0.033200737,
- -0.008437484,
- -0.037582297,
- -0.019651853,
- 0.017285295,
- -0.008976723,
- -0.0018494898,
- -0.0030671947,
- 0.03046138,
- -0.051143825,
- -0.08688155,
- -0.018344227,
- -0.113307714,
- 0.073259674,
- 0.04602224,
- 0.012651309,
- -0.063435435,
- -0.028471926,
- 0.020155901,
- -0.078830436,
- -0.00069818215,
- -0.03156303,
- 0.123062745,
- 0.0042949035,
- -0.026413191,
- 0.07838535,
- -0.07747411,
- -0.02126005,
- 0.048919026,
- 0.02919413,
- -0.009296978,
- -0.030687347,
- -0.041037664,
- -0.038565576,
- -0.08043238,
- 0.023225678,
- 0.041928973,
- -0.05812511,
- 0.058555346,
- 0.07633673,
- 4.4510456e-34,
- -0.019582625,
- 0.040237214,
- 0.01455587,
- 0.034353998,
- 0.043911777,
- -0.023234777,
- 0.0677493,
- -0.030089214,
- -0.09076478,
- -0.019257858,
- -0.02767876,
- -0.00065146026,
- 0.0043030144,
- 0.05363546,
- 0.04073387,
- 0.03255476,
- -0.10712685,
- -0.050083157,
- -0.016644027,
- -0.0077649173,
- -0.11153465,
- 0.07478277,
- -0.015999233,
- -0.050547555,
- -0.113217294,
- -0.006174145,
- 0.050873067,
- -0.030284155,
- 0.04314861,
- 0.033020362,
- 0.023671353,
- 0.04654029,
- -0.03415647,
- 0.03614603,
- 0.023047049,
- -0.02677317,
- 0.063607745,
- 0.09978129,
- 0.03527302,
- 0.15538219,
- 0.08349002,
- 0.10931568,
- 0.04684532,
- -0.010147538,
- -0.03256112,
- 0.12924333,
- 0.031221064,
- -0.099673584,
- 0.010860566,
- 0.02326085,
- -0.011916549,
- 0.010135849,
- 0.06884636,
- 0.009350001,
- -0.0226591,
- -0.04280281,
- -0.04821317,
- -0.08508304,
- 0.051028382,
- 0.045148462,
- -0.03566162,
- 0.06547104,
- 0.048883036,
- 0.03793435,
- -0.1407055,
- -0.06711337,
- 0.009881868,
- -0.0049659596,
- -0.044289522,
- 0.0039236215,
- -0.02692826,
- -0.066134326,
- 0.04076233,
- -0.05222117,
- 0.060488354,
- -0.04113724,
- -0.04314174,
- -0.025147837,
- 0.085597694,
- -0.044939328,
- 0.06395307,
- -0.024218159,
- -0.050523587,
- -0.0020718095,
- -0.07894165,
- 0.0026805927,
- 0.020709056,
- 0.1026727,
- -0.012374822,
- 0.056179732,
- 0.06552235,
- 0.030915475,
- -0.077197015,
- -0.061245024,
- -0.016111895,
- -1.3512232e-08,
- -0.05040501,
- -0.033646606,
- 0.04670903,
- 0.047397695,
- -0.044165645,
- 0.046301767,
- -0.006073457,
- -0.053902794,
- 0.013089125,
- 0.050438043,
- -0.009894958,
- -0.0041677835,
- 0.0723306,
- 0.021069802,
- 0.02670403,
- -0.074845195,
- -0.026750853,
- 0.052738186,
- -0.03469103,
- 0.039813705,
- -0.01640883,
- 0.045899663,
- -0.0224731,
- 0.02387658,
- 0.049145795,
- 0.09110705,
- -0.0025007618,
- 0.04937552,
- -0.03864697,
- 0.020868128,
- 0.07605537,
- 0.08488945,
- -0.05197299,
- -0.06879239,
- -0.06136516,
- 0.077237174,
- -0.06451729,
- 0.04453416,
- 0.008209786,
- 0.015886698,
- -0.04280691,
- 0.005315579,
- 0.0034463098,
- 0.0031776188,
- -0.013040836,
- -0.091359615,
- 0.0642767,
- -0.054965723,
- 0.0007161393,
- -0.06260912,
- -0.03496602,
- -0.029944083,
- 0.04422821,
- 0.017855663,
- -0.027972128,
- -0.03656317,
- 0.02111413,
- 0.060607255,
- -0.031320468,
- -0.014338154,
- 0.034649797,
- 0.052279983,
- -0.036579564,
- 0.028179456
+ 0.043779343,
+ 0.021533398,
+ -0.081306435,
+ 0.010584965,
+ -0.079082854,
+ -0.03219143,
+ 0.13092613,
+ 0.04234389,
+ -0.11600539,
+ -0.07588513,
+ 0.04182356,
+ -0.08061255,
+ 0.038127176,
+ -0.010701234,
+ 0.015768763,
+ -0.04193689,
+ 0.04310592,
+ -0.033361685,
+ 0.013566423,
+ -0.010392366,
+ 0.015551022,
+ -0.037858423,
+ -0.050305344,
+ -0.025666261,
+ -0.047879875,
+ -0.087179765,
+ 0.016856788,
+ -0.036765736,
+ 0.006393739,
+ 0.020844297,
+ 0.11262393,
+ -0.002143682,
+ -0.07910913,
+ 0.038748607,
+ 0.11532516,
+ -0.019759571,
+ 0.0066967797,
+ -0.021164352,
+ -0.014471563,
+ -0.0027048697,
+ -0.034388524,
+ -0.052571636,
+ -0.030607725,
+ 0.04747725,
+ -0.02431059,
+ 0.0109337615,
+ -0.03946421,
+ 0.071846664,
+ -0.020690937,
+ 0.01898796,
+ 0.042931512,
+ -0.0077551426,
+ 0.0025911122,
+ -0.058268107,
+ 0.0117475465,
+ -0.022701943,
+ 0.0017815019,
+ -0.012612941,
+ 0.030724185,
+ 0.017728312,
+ -0.06155491,
+ -0.03656162,
+ 0.02583153,
+ 0.02537894,
+ 0.012139213,
+ 0.009105951,
+ -0.027318193,
+ -0.093389414,
+ 0.005184693,
+ 0.007488449,
+ -0.07540277,
+ 0.010159999,
+ -0.028444426,
+ 0.030260745,
+ 0.0036438918,
+ -0.022627153,
+ -0.037846327,
+ -0.08381657,
+ -0.012445195,
+ -0.048908208,
+ 0.029149827,
+ -0.044437535,
+ -0.07520237,
+ -0.020924438,
+ 0.06342514,
+ 0.1629199,
+ 0.060563333,
+ -0.012817673,
+ -0.031030292,
+ 0.018368995,
+ 0.11223112,
+ 0.07292473,
+ -0.062686674,
+ -0.031803295,
+ -0.017489262,
+ 0.048433464,
+ -0.041148387,
+ -0.04183779,
+ -0.05994369,
+ 0.15909556,
+ -0.027785666,
+ -0.012455991,
+ 0.056005318,
+ -0.019891974,
+ 0.022063067,
+ 0.006342065,
+ 0.0464118,
+ -0.07311654,
+ 0.033282198,
+ 0.05949105,
+ -0.033307947,
+ 0.030738499,
+ 0.008186239,
+ -0.020268966,
+ 0.056593496,
+ -0.081526734,
+ 0.023390312,
+ 0.0060836566,
+ -0.07992586,
+ 0.013986445,
+ 0.052250065,
+ 0.027186505,
+ -0.049284942,
+ 0.028148174,
+ 0.019493744,
+ 0.05418436,
+ 0.0827222,
+ -1.8825437e-33,
+ 0.01360945,
+ -0.010870715,
+ 0.015887791,
+ 0.069373555,
+ -0.051129147,
+ 0.08999179,
+ 0.044494778,
+ 0.08100757,
+ 0.018944906,
+ -0.020974122,
+ -0.017938385,
+ -0.021756735,
+ 0.010972489,
+ 0.015099965,
+ 0.017018452,
+ 0.094338946,
+ 0.0034407445,
+ 0.010244923,
+ -0.044709302,
+ 0.0018059182,
+ 0.015817573,
+ -0.065777056,
+ -0.004948138,
+ 0.0044092103,
+ -0.019589791,
+ -0.092789896,
+ -0.025898295,
+ 0.044104066,
+ 0.0541385,
+ -0.007362511,
+ -0.021487307,
+ -0.036836285,
+ -0.09148704,
+ 0.084001675,
+ -0.018094191,
+ 0.003797567,
+ 0.020257449,
+ 0.04394643,
+ -0.0772898,
+ 0.0057312953,
+ -0.054519102,
+ -0.024835315,
+ 0.0753162,
+ 0.034552757,
+ -0.081203006,
+ -0.12210961,
+ -0.0053012627,
+ 0.00780717,
+ 0.050265096,
+ 0.015569535,
+ -0.056362487,
+ 0.039800324,
+ 0.013022089,
+ -0.04015537,
+ 0.014401654,
+ -0.033209093,
+ -0.008451782,
+ -0.037590392,
+ -0.01965779,
+ 0.01730637,
+ -0.00896531,
+ -0.0018413392,
+ -0.0030382746,
+ 0.030460354,
+ -0.05112036,
+ -0.086875,
+ -0.018338922,
+ -0.11328767,
+ 0.07325826,
+ 0.046035297,
+ 0.012633494,
+ -0.06343216,
+ -0.028439038,
+ 0.020128354,
+ -0.07883383,
+ -0.00069870794,
+ -0.03155447,
+ 0.12306934,
+ 0.004300722,
+ -0.026421167,
+ 0.078361824,
+ -0.077461444,
+ -0.021267027,
+ 0.048929654,
+ 0.02919381,
+ -0.0092880055,
+ -0.030666346,
+ -0.04102384,
+ -0.03860138,
+ -0.08042292,
+ 0.023227168,
+ 0.04191858,
+ -0.058156747,
+ 0.0585743,
+ 0.076342255,
+ 4.465569e-34,
+ -0.019599343,
+ 0.040230304,
+ 0.01455632,
+ 0.034345042,
+ 0.04392999,
+ -0.023241352,
+ 0.067749046,
+ -0.03010354,
+ -0.09075954,
+ -0.019227842,
+ -0.027724287,
+ -0.00062344945,
+ 0.0042892746,
+ 0.053643614,
+ 0.04075099,
+ 0.032581333,
+ -0.107116826,
+ -0.0500636,
+ -0.016655827,
+ -0.007782394,
+ -0.111523,
+ 0.07476429,
+ -0.016019335,
+ -0.050536986,
+ -0.11320647,
+ -0.0061384854,
+ 0.050886273,
+ -0.030283457,
+ 0.04318923,
+ 0.03301474,
+ 0.02362771,
+ 0.046507858,
+ -0.03416386,
+ 0.036145207,
+ 0.023037339,
+ -0.026803765,
+ 0.06361122,
+ 0.09975251,
+ 0.035269737,
+ 0.1554014,
+ 0.083479255,
+ 0.10931981,
+ 0.046847064,
+ -0.010136355,
+ -0.032541983,
+ 0.12926093,
+ 0.031193413,
+ -0.09971323,
+ 0.010830718,
+ 0.02325219,
+ -0.011917061,
+ 0.010155018,
+ 0.06883269,
+ 0.009340846,
+ -0.022698723,
+ -0.042815465,
+ -0.048211087,
+ -0.085067384,
+ 0.05105234,
+ 0.045155898,
+ -0.03564869,
+ 0.06549556,
+ 0.048875004,
+ 0.037915554,
+ -0.14071068,
+ -0.067095764,
+ 0.009898252,
+ -0.0049653547,
+ -0.044304688,
+ 0.0039006064,
+ -0.026903173,
+ -0.066124685,
+ 0.040738244,
+ -0.052228633,
+ 0.060485654,
+ -0.041119356,
+ -0.04312945,
+ -0.025152665,
+ 0.08556276,
+ -0.044942576,
+ 0.06393979,
+ -0.024227533,
+ -0.05052092,
+ -0.0020624825,
+ -0.078943975,
+ 0.0026753,
+ 0.02068896,
+ 0.102683865,
+ -0.01237572,
+ 0.056172684,
+ 0.06552171,
+ 0.030940128,
+ -0.07721113,
+ -0.061241012,
+ -0.016143149,
+ -1.3511957e-08,
+ -0.050416306,
+ -0.033628013,
+ 0.046722032,
+ 0.04744138,
+ -0.04411888,
+ 0.04631675,
+ -0.0060847937,
+ -0.053873356,
+ 0.013075445,
+ 0.050437532,
+ -0.009895477,
+ -0.0041795173,
+ 0.07229928,
+ 0.021081135,
+ 0.02672776,
+ -0.07482113,
+ -0.026757998,
+ 0.052755926,
+ -0.034690056,
+ 0.039811596,
+ -0.016370349,
+ 0.045900222,
+ -0.02250936,
+ 0.023861,
+ 0.04912799,
+ 0.09111738,
+ -0.0024878879,
+ 0.049395334,
+ -0.03861115,
+ 0.020867983,
+ 0.076049894,
+ 0.084881924,
+ -0.051956687,
+ -0.06878504,
+ -0.061384037,
+ 0.077220954,
+ -0.06454818,
+ 0.044513144,
+ 0.008181126,
+ 0.015890416,
+ -0.04280811,
+ 0.005317184,
+ 0.0034429359,
+ 0.0031937633,
+ -0.013058055,
+ -0.09134677,
+ 0.06425565,
+ -0.054977305,
+ 0.0007087448,
+ -0.06258866,
+ -0.034974415,
+ -0.029966963,
+ 0.044276785,
+ 0.017868131,
+ -0.027976807,
+ -0.036579583,
+ 0.021142753,
+ 0.06057356,
+ -0.03133335,
+ -0.014331035,
+ 0.034653842,
+ 0.052315667,
+ -0.036585484,
+ 0.028209662
],
"index": 0,
"object": "embedding"
diff --git a/tests/integration/recordings/responses/6412295819a1.json b/tests/integration/recordings/responses/6412295819a1.json
index 728380b02..2333176ea 100644
--- a/tests/integration/recordings/responses/6412295819a1.json
+++ b/tests/integration/recordings/responses/6412295819a1.json
@@ -16,23 +16,23 @@
"body": {
"__type__": "openai.types.completion.Completion",
"__data__": {
- "id": "cmpl-104",
+ "id": "cmpl-865",
"choices": [
{
"finish_reason": "stop",
"index": 0,
"logprobs": null,
- "text": "blue.\n\nI completed the sentence with \"blue\" because it is a common completion used to complete the traditional nursery rhyme, which ends with:\n\nRoses are red,\nViolets are blue.\n\nThe complete rhyme is often remembered and recited as follows:\n\nRoses are red,\nViolets are blue,\nSugar is sweet,\nAnd so are you!"
+ "text": "Blue.\n\nMy answer is \"blue\" because it's a classic completion of the traditional nursery rhyme poem:\n\n\"Roses are red, violets are blue\"\n\nThis sentiment suggests that an unseen suitor from the first half of the line has given or will give the speaker roses."
}
],
- "created": 1757857132,
+ "created": 1759441353,
"model": "llama3.2:3b-instruct-fp16",
"object": "text_completion",
"system_fingerprint": "fp_ollama",
"usage": {
- "completion_tokens": 72,
+ "completion_tokens": 58,
"prompt_tokens": 50,
- "total_tokens": 122,
+ "total_tokens": 108,
"completion_tokens_details": null,
"prompt_tokens_details": null
}
diff --git a/tests/integration/recordings/responses/6540a315ea8e.json b/tests/integration/recordings/responses/6540a315ea8e.json
new file mode 100644
index 000000000..68b7c0a21
--- /dev/null
+++ b/tests/integration/recordings/responses/6540a315ea8e.json
@@ -0,0 +1,119 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "Call get_boiling_point tool and answer What is the boiling point of polyjuice?"
+ }
+ ],
+ "max_tokens": 512,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": "auto",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-545",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": [
+ {
+ "index": 0,
+ "id": "call_d1i5ou69",
+ "function": {
+ "arguments": "{\"celcius\":null,\"liquid_name\":\"polyjuice\"}",
+ "name": "get_boiling_point"
+ },
+ "type": "function"
+ }
+ ]
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441675,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-545",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "tool_calls",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441675,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/65c12de0a1db.json b/tests/integration/recordings/responses/65c12de0a1db.json
index e1c0fb8fc..31f88271d 100644
--- a/tests/integration/recordings/responses/65c12de0a1db.json
+++ b/tests/integration/recordings/responses/65c12de0a1db.json
@@ -24,14 +24,14 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-123",
+ "id": "chatcmpl-528",
"choices": [
{
"finish_reason": "stop",
"index": 0,
"logprobs": null,
"message": {
- "content": "Hello! As of my knowledge cutoff on December 15th, I have the latest information for you. However, please note that my data may not be entirely up-to-date.\n\nCurrently, and based on historical climate patterns, it appears to be a partly cloudy day with mild temperatures in San Francisco, CA. Expect a temperature range of around 48\u00b0F (9\u00b0C) to 54\u00b0F (12\u00b0C). It's likely to be a breezy day, with winds blowing at about 13 mph (21 km/h).\n\nHowever, if I were to look into more recent weather patterns or forecasts, I would recommend checking the latest conditions directly from reliable sources such as the National Weather Service or local news outlets for more accurate and up-to-date information.\n\nPlease let me know how I can further assist you.",
+ "content": "I can give you a general idea of the typical weather conditions in San Francisco during this time.\n\nUnfortunately, I'm not aware of your current location or date. But I can suggest ways for you to get accurate and up-to-date information on the weather in San Francisco.\n\nYou can:\n\n* Check online meteorological websites such as AccuWeather or Weather.com for current conditions and forecasts.\n* Use a mobile app like Dark Sky or The Weather Channel to get real-time weather updates.\n* Tune into local news broadcasts or listen to a radio station that provides weather updates.\n\nIf you'd like, I can provide general information on San Francisco's typical climate.",
"refusal": null,
"role": "assistant",
"annotations": null,
@@ -41,15 +41,15 @@
}
}
],
- "created": 1758978071,
+ "created": 1759376616,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": {
- "completion_tokens": 163,
+ "completion_tokens": 131,
"prompt_tokens": 45,
- "total_tokens": 208,
+ "total_tokens": 176,
"completion_tokens_details": null,
"prompt_tokens_details": null
}
diff --git a/tests/integration/recordings/responses/67f94c4f8ba0.json b/tests/integration/recordings/responses/67f94c4f8ba0.json
index cd8ad4f35..f4b36af9a 100644
--- a/tests/integration/recordings/responses/67f94c4f8ba0.json
+++ b/tests/integration/recordings/responses/67f94c4f8ba0.json
@@ -28,7 +28,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -43,7 +43,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441668,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -54,7 +54,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -69,7 +69,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441668,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -80,7 +80,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -95,7 +95,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441668,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -106,7 +106,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -121,7 +121,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441668,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -132,7 +132,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -147,7 +147,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441668,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -158,7 +158,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -173,7 +173,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441668,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -184,7 +184,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -199,7 +199,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441668,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -210,7 +210,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -225,7 +225,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441668,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -236,7 +236,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -251,7 +251,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441668,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -262,7 +262,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -277,7 +277,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441668,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -288,7 +288,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -303,7 +303,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441668,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -314,7 +314,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -329,7 +329,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441668,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -340,7 +340,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -355,7 +355,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441668,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -366,7 +366,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -381,7 +381,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441668,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -392,7 +392,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -407,7 +407,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441668,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -418,7 +418,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -433,7 +433,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441668,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -444,7 +444,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -459,7 +459,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441668,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -470,7 +470,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -485,7 +485,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441668,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -496,7 +496,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -511,7 +511,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441668,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -522,7 +522,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -537,7 +537,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441668,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -548,7 +548,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -563,7 +563,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441669,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -574,7 +574,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -589,7 +589,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441669,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -600,7 +600,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -615,7 +615,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441669,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -626,7 +626,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -641,7 +641,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441669,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -652,7 +652,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -667,7 +667,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441669,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -678,7 +678,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -693,7 +693,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441669,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -704,7 +704,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -719,7 +719,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441669,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -730,7 +730,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -745,7 +745,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441669,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -756,7 +756,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -771,7 +771,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441669,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -782,7 +782,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -797,7 +797,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441669,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -808,7 +808,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -823,7 +823,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441669,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -834,7 +834,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -849,7 +849,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441669,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -860,7 +860,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -875,7 +875,7 @@
"logprobs": null
}
],
- "created": 1759427020,
+ "created": 1759441669,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -886,7 +886,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -901,7 +901,7 @@
"logprobs": null
}
],
- "created": 1759427021,
+ "created": 1759441669,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -912,7 +912,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -927,7 +927,7 @@
"logprobs": null
}
],
- "created": 1759427021,
+ "created": 1759441669,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -938,7 +938,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -953,7 +953,7 @@
"logprobs": null
}
],
- "created": 1759427021,
+ "created": 1759441669,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -964,7 +964,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -979,7 +979,7 @@
"logprobs": null
}
],
- "created": 1759427021,
+ "created": 1759441669,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -990,7 +990,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -1005,7 +1005,7 @@
"logprobs": null
}
],
- "created": 1759427021,
+ "created": 1759441669,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -1016,7 +1016,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -1031,7 +1031,7 @@
"logprobs": null
}
],
- "created": 1759427021,
+ "created": 1759441669,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -1042,7 +1042,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -1057,7 +1057,7 @@
"logprobs": null
}
],
- "created": 1759427021,
+ "created": 1759441669,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -1068,7 +1068,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -1083,7 +1083,7 @@
"logprobs": null
}
],
- "created": 1759427021,
+ "created": 1759441669,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -1094,7 +1094,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -1109,7 +1109,7 @@
"logprobs": null
}
],
- "created": 1759427021,
+ "created": 1759441669,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -1120,7 +1120,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -1135,7 +1135,7 @@
"logprobs": null
}
],
- "created": 1759427021,
+ "created": 1759441669,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -1146,7 +1146,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -1161,7 +1161,7 @@
"logprobs": null
}
],
- "created": 1759427021,
+ "created": 1759441669,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -1172,7 +1172,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -1187,7 +1187,7 @@
"logprobs": null
}
],
- "created": 1759427021,
+ "created": 1759441669,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -1198,7 +1198,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -1213,7 +1213,7 @@
"logprobs": null
}
],
- "created": 1759427021,
+ "created": 1759441670,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -1224,7 +1224,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -1239,7 +1239,7 @@
"logprobs": null
}
],
- "created": 1759427021,
+ "created": 1759441670,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -1250,7 +1250,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -1265,7 +1265,7 @@
"logprobs": null
}
],
- "created": 1759427021,
+ "created": 1759441670,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -1276,7 +1276,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -1291,7 +1291,7 @@
"logprobs": null
}
],
- "created": 1759427021,
+ "created": 1759441670,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -1302,7 +1302,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -1317,7 +1317,7 @@
"logprobs": null
}
],
- "created": 1759427021,
+ "created": 1759441670,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -1328,7 +1328,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -1343,7 +1343,7 @@
"logprobs": null
}
],
- "created": 1759427021,
+ "created": 1759441670,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -1354,7 +1354,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -1369,7 +1369,7 @@
"logprobs": null
}
],
- "created": 1759427021,
+ "created": 1759441670,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -1380,7 +1380,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -1395,7 +1395,7 @@
"logprobs": null
}
],
- "created": 1759427021,
+ "created": 1759441670,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -1406,7 +1406,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -1421,7 +1421,7 @@
"logprobs": null
}
],
- "created": 1759427021,
+ "created": 1759441670,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -1432,7 +1432,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -1447,7 +1447,7 @@
"logprobs": null
}
],
- "created": 1759427021,
+ "created": 1759441670,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -1458,7 +1458,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -1473,7 +1473,7 @@
"logprobs": null
}
],
- "created": 1759427021,
+ "created": 1759441670,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -1484,7 +1484,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-932",
+ "id": "chatcmpl-681",
"choices": [
{
"delta": {
@@ -1499,7 +1499,7 @@
"logprobs": null
}
],
- "created": 1759427021,
+ "created": 1759441670,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/6b3e593ad9b8.json b/tests/integration/recordings/responses/6b3e593ad9b8.json
index e5a85eb3d..ccb1d0101 100644
--- a/tests/integration/recordings/responses/6b3e593ad9b8.json
+++ b/tests/integration/recordings/responses/6b3e593ad9b8.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-819",
+ "id": "chatcmpl-642",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759282466,
+ "created": 1759441159,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/6f90277933e2.json b/tests/integration/recordings/responses/6f90277933e2.json
new file mode 100644
index 000000000..f1d08a5c6
--- /dev/null
+++ b/tests/integration/recordings/responses/6f90277933e2.json
@@ -0,0 +1,419 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "What is the boiling point of the liquid polyjuice in celsius?"
+ },
+ {
+ "role": "assistant",
+ "content": "",
+ "tool_calls": [
+ {
+ "id": "call_qv279qx8",
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}"
+ }
+ }
+ ]
+ },
+ {
+ "role": "tool",
+ "tool_call_id": "call_qv279qx8",
+ "content": "-100"
+ }
+ ],
+ "max_tokens": 0,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point"
+ }
+ },
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-790",
+ "choices": [
+ {
+ "delta": {
+ "content": "The",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428002,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-790",
+ "choices": [
+ {
+ "delta": {
+ "content": " boiling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428002,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-790",
+ "choices": [
+ {
+ "delta": {
+ "content": " point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428002,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-790",
+ "choices": [
+ {
+ "delta": {
+ "content": " of",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428002,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-790",
+ "choices": [
+ {
+ "delta": {
+ "content": " Poly",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428002,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-790",
+ "choices": [
+ {
+ "delta": {
+ "content": "ju",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428002,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-790",
+ "choices": [
+ {
+ "delta": {
+ "content": "ice",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428002,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-790",
+ "choices": [
+ {
+ "delta": {
+ "content": " is",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428002,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-790",
+ "choices": [
+ {
+ "delta": {
+ "content": " -",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428002,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-790",
+ "choices": [
+ {
+ "delta": {
+ "content": "100",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428003,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-790",
+ "choices": [
+ {
+ "delta": {
+ "content": "\u00b0C",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428003,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-790",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428003,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-790",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "stop",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759428003,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/6f96090aa955.json b/tests/integration/recordings/responses/6f96090aa955.json
index d0ac20442..67628bf51 100644
--- a/tests/integration/recordings/responses/6f96090aa955.json
+++ b/tests/integration/recordings/responses/6f96090aa955.json
@@ -21,7 +21,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-698",
+ "id": "chatcmpl-456",
"choices": [
{
"delta": {
@@ -36,7 +36,7 @@
"logprobs": null
}
],
- "created": 1756921359,
+ "created": 1759437880,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -47,7 +47,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-698",
+ "id": "chatcmpl-456",
"choices": [
{
"delta": {
@@ -62,7 +62,7 @@
"logprobs": null
}
],
- "created": 1756921359,
+ "created": 1759437880,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -73,11 +73,11 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-698",
+ "id": "chatcmpl-456",
"choices": [
{
"delta": {
- "content": " It",
+ "content": " How",
"function_call": null,
"refusal": null,
"role": "assistant",
@@ -88,7 +88,7 @@
"logprobs": null
}
],
- "created": 1756921359,
+ "created": 1759437880,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -99,267 +99,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-698",
- "choices": [
- {
- "delta": {
- "content": "'s",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921359,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-698",
- "choices": [
- {
- "delta": {
- "content": " nice",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921359,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-698",
- "choices": [
- {
- "delta": {
- "content": " to",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921359,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-698",
- "choices": [
- {
- "delta": {
- "content": " meet",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921359,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-698",
- "choices": [
- {
- "delta": {
- "content": " you",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921359,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-698",
- "choices": [
- {
- "delta": {
- "content": ".",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921359,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-698",
- "choices": [
- {
- "delta": {
- "content": " Is",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921359,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-698",
- "choices": [
- {
- "delta": {
- "content": " there",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921359,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-698",
- "choices": [
- {
- "delta": {
- "content": " something",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921359,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-698",
- "choices": [
- {
- "delta": {
- "content": " I",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921359,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-698",
+ "id": "chatcmpl-456",
"choices": [
{
"delta": {
@@ -374,7 +114,7 @@
"logprobs": null
}
],
- "created": 1756921359,
+ "created": 1759437880,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -385,11 +125,11 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-698",
+ "id": "chatcmpl-456",
"choices": [
{
"delta": {
- "content": " help",
+ "content": " I",
"function_call": null,
"refusal": null,
"role": "assistant",
@@ -400,7 +140,7 @@
"logprobs": null
}
],
- "created": 1756921359,
+ "created": 1759437881,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -411,7 +151,33 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-698",
+ "id": "chatcmpl-456",
+ "choices": [
+ {
+ "delta": {
+ "content": " assist",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437881,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-456",
"choices": [
{
"delta": {
@@ -426,7 +192,7 @@
"logprobs": null
}
],
- "created": 1756921359,
+ "created": 1759437881,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -437,11 +203,11 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-698",
+ "id": "chatcmpl-456",
"choices": [
{
"delta": {
- "content": " with",
+ "content": " today",
"function_call": null,
"refusal": null,
"role": "assistant",
@@ -452,7 +218,7 @@
"logprobs": null
}
],
- "created": 1756921359,
+ "created": 1759437881,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -463,163 +229,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-698",
- "choices": [
- {
- "delta": {
- "content": " or",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921359,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-698",
- "choices": [
- {
- "delta": {
- "content": " would",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921359,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-698",
- "choices": [
- {
- "delta": {
- "content": " you",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921360,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-698",
- "choices": [
- {
- "delta": {
- "content": " like",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921360,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-698",
- "choices": [
- {
- "delta": {
- "content": " to",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921360,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-698",
- "choices": [
- {
- "delta": {
- "content": " chat",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921360,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-698",
+ "id": "chatcmpl-456",
"choices": [
{
"delta": {
@@ -634,7 +244,7 @@
"logprobs": null
}
],
- "created": 1756921360,
+ "created": 1759437881,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -645,7 +255,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-698",
+ "id": "chatcmpl-456",
"choices": [
{
"delta": {
@@ -660,7 +270,7 @@
"logprobs": null
}
],
- "created": 1756921360,
+ "created": 1759437881,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/71c9c6746a31.json b/tests/integration/recordings/responses/71c9c6746a31.json
new file mode 100644
index 000000000..132606068
--- /dev/null
+++ b/tests/integration/recordings/responses/71c9c6746a31.json
@@ -0,0 +1,809 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "What is the boiling point of the liquid polyjuice in celsius?"
+ },
+ {
+ "role": "assistant",
+ "content": "",
+ "tool_calls": [
+ {
+ "id": "call_pm9dfvfk",
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}"
+ }
+ }
+ ]
+ },
+ {
+ "role": "tool",
+ "tool_call_id": "call_pm9dfvfk",
+ "content": "Error when running tool: 'ToolCall' object has no attribute 'arguments_json'"
+ }
+ ],
+ "max_tokens": 512,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point"
+ }
+ },
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": "I",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437832,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": " was",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437832,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": " unable",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437832,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": " to",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437832,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": " find",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437832,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": " the",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437832,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": " boiling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437832,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": " point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437832,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": " of",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437832,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": " liquid",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437832,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": " poly",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437832,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": "ju",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437832,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": "ice",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437832,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": " in",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437832,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": " Celsius",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437832,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437832,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": " The",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437833,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": " boiling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437833,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": " point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437833,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": " could",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437833,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": " not",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437833,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": " be",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437833,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": " located",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437833,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": " in",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437833,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": " my",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437833,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": " database",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437833,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437833,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-495",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "stop",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437833,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/771131fb4c46.json b/tests/integration/recordings/responses/771131fb4c46.json
index e3501541e..0a1447690 100644
--- a/tests/integration/recordings/responses/771131fb4c46.json
+++ b/tests/integration/recordings/responses/771131fb4c46.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-220",
+ "id": "chatcmpl-55",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759245122,
+ "created": 1759437798,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/7a047bcf8b19.json b/tests/integration/recordings/responses/7a047bcf8b19.json
index 7cd6c3f7c..73b948a10 100644
--- a/tests/integration/recordings/responses/7a047bcf8b19.json
+++ b/tests/integration/recordings/responses/7a047bcf8b19.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-737",
+ "id": "chatcmpl-652",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759282582,
+ "created": 1759441673,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/7c57049fc13f.json b/tests/integration/recordings/responses/7c57049fc13f.json
new file mode 100644
index 000000000..08c1c20d2
--- /dev/null
+++ b/tests/integration/recordings/responses/7c57049fc13f.json
@@ -0,0 +1,57 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama-guard3:1b",
+ "messages": [
+ {
+ "role": "user",
+ "content": "Task: Check if there is unsafe content in 'Tool' messages in conversations according our safety policy with the below categories.\n\n\nS1: Violent Crimes.\nS2: Non-Violent Crimes.\nS3: Sex Crimes.\nS4: Child Exploitation.\nS5: Defamation.\nS6: Specialized Advice.\nS7: Privacy.\nS8: Intellectual Property.\nS9: Indiscriminate Weapons.\nS10: Hate.\nS11: Self-Harm.\nS12: Sexual Content.\nS13: Elections.\nS14: Code Interpreter Abuse.\n\n\n\n\nUser: Get the boiling point of polyjuice with a tool call.\n\nAssistant: \n\nTool: Error when running tool: 'ToolCall' object has no attribute 'arguments_json'\n\n\n\nProvide your safety assessment for ONLY THE LAST Tool message in the above conversation:\n - First line must read 'safe' or 'unsafe'.\n - If unsafe, a second line must include a comma-separated list of violated categories."
+ }
+ ],
+ "stream": false,
+ "temperature": 0.0
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama-guard3:1b"
+ },
+ "response": {
+ "body": {
+ "__type__": "openai.types.chat.chat_completion.ChatCompletion",
+ "__data__": {
+ "id": "chatcmpl-906",
+ "choices": [
+ {
+ "finish_reason": "stop",
+ "index": 0,
+ "logprobs": null,
+ "message": {
+ "content": "safe",
+ "refusal": null,
+ "role": "assistant",
+ "annotations": null,
+ "audio": null,
+ "function_call": null,
+ "tool_calls": null
+ }
+ }
+ ],
+ "created": 1759437819,
+ "model": "llama-guard3:1b",
+ "object": "chat.completion",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": {
+ "completion_tokens": 2,
+ "prompt_tokens": 418,
+ "total_tokens": 420,
+ "completion_tokens_details": null,
+ "prompt_tokens_details": null
+ }
+ }
+ },
+ "is_streaming": false
+ }
+}
diff --git a/tests/integration/recordings/responses/7d089a973e08.json b/tests/integration/recordings/responses/7d089a973e08.json
new file mode 100644
index 000000000..93157f645
--- /dev/null
+++ b/tests/integration/recordings/responses/7d089a973e08.json
@@ -0,0 +1,804 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "What is the boiling point of the liquid polyjuice in celsius?"
+ },
+ {
+ "role": "assistant",
+ "content": "",
+ "tool_calls": [
+ {
+ "id": "call_kg9401ss",
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}"
+ }
+ }
+ ]
+ },
+ {
+ "role": "tool",
+ "tool_call_id": "call_kg9401ss",
+ "content": "Error when running tool: 'ToolCall' object has no attribute 'arguments_json'"
+ }
+ ],
+ "max_tokens": 512,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": "auto",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": "I",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437814,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": " was",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437814,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": " unable",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437814,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": " to",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437814,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": " find",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437814,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": " the",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437815,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": " boiling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437815,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": " point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437815,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": " of",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437815,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": " liquid",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437815,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": " poly",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437815,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": "ju",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437815,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": "ice",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437815,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": " in",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437815,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": " Celsius",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437815,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437815,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": " The",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437815,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": " boiling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437815,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": " point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437815,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": " could",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437815,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": " not",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437815,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": " be",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437815,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": " located",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437815,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": " in",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437815,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": " my",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437815,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": " database",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437815,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437815,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-212",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "stop",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437815,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/7e4bdf20925c.json b/tests/integration/recordings/responses/7e4bdf20925c.json
new file mode 100644
index 000000000..4e76fdd3c
--- /dev/null
+++ b/tests/integration/recordings/responses/7e4bdf20925c.json
@@ -0,0 +1,124 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "What is the boiling point of the liquid polyjuice in celsius?"
+ }
+ ],
+ "max_tokens": 512,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point"
+ }
+ },
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-366",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": [
+ {
+ "index": 0,
+ "id": "call_k3oc5cxw",
+ "function": {
+ "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}",
+ "name": "get_boiling_point"
+ },
+ "type": "function"
+ }
+ ]
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441672,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-366",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "tool_calls",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441672,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/7fc8b6ca483d.json b/tests/integration/recordings/responses/7fc8b6ca483d.json
new file mode 100644
index 000000000..5e6d7f57b
--- /dev/null
+++ b/tests/integration/recordings/responses/7fc8b6ca483d.json
@@ -0,0 +1,57 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama-guard3:1b",
+ "messages": [
+ {
+ "role": "user",
+ "content": "Task: Check if there is unsafe content in 'Assistant' messages in conversations according our safety policy with the below categories.\n\n\nS1: Violent Crimes.\nS2: Non-Violent Crimes.\nS3: Sex Crimes.\nS4: Child Exploitation.\nS5: Defamation.\nS6: Specialized Advice.\nS7: Privacy.\nS8: Intellectual Property.\nS9: Indiscriminate Weapons.\nS10: Hate.\nS11: Self-Harm.\nS12: Sexual Content.\nS13: Elections.\nS14: Code Interpreter Abuse.\n\n\n\n\nUser: Get the boiling point of polyjuice with a tool call.\n\nAssistant: \n\nTool: Error when running tool: 'ToolCall' object has no attribute 'arguments_json'\n\nAssistant: I apologize for the error. Here is the revised tool call:\n\n{\"name\": \"get_boiling_point\", \"parameters\": {\"liquid_name\": \"polyjuice\"}}\n\n\n\nProvide your safety assessment for ONLY THE LAST Assistant message in the above conversation:\n - First line must read 'safe' or 'unsafe'.\n - If unsafe, a second line must include a comma-separated list of violated categories."
+ }
+ ],
+ "stream": false,
+ "temperature": 0.0
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama-guard3:1b"
+ },
+ "response": {
+ "body": {
+ "__type__": "openai.types.chat.chat_completion.ChatCompletion",
+ "__data__": {
+ "id": "chatcmpl-8",
+ "choices": [
+ {
+ "finish_reason": "stop",
+ "index": 0,
+ "logprobs": null,
+ "message": {
+ "content": "unsafe\nS9",
+ "refusal": null,
+ "role": "assistant",
+ "annotations": null,
+ "audio": null,
+ "function_call": null,
+ "tool_calls": null
+ }
+ }
+ ],
+ "created": 1759437821,
+ "model": "llama-guard3:1b",
+ "object": "chat.completion",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": {
+ "completion_tokens": 5,
+ "prompt_tokens": 455,
+ "total_tokens": 460,
+ "completion_tokens_details": null,
+ "prompt_tokens_details": null
+ }
+ }
+ },
+ "is_streaming": false
+ }
+}
diff --git a/tests/integration/recordings/responses/80311f244b55.json b/tests/integration/recordings/responses/80311f244b55.json
index 707befc07..a5818368c 100644
--- a/tests/integration/recordings/responses/80311f244b55.json
+++ b/tests/integration/recordings/responses/80311f244b55.json
@@ -22,1170 +22,1170 @@
"data": [
{
"embedding": [
- -0.038157914,
- 0.03290493,
- -0.0055371798,
- 0.014353213,
- -0.040209096,
- -0.11667767,
- 0.03170551,
- 0.0019347348,
- -0.04254092,
- 0.029190615,
- 0.042559944,
- 0.032130145,
- 0.02983921,
- 0.010979105,
- -0.053759154,
- -0.05030495,
- -0.023470305,
- 0.010730486,
- -0.1377361,
- 0.0039985846,
- 0.029267203,
- 0.066698566,
- -0.015405643,
- 0.04843479,
- -0.0881545,
- -0.012694429,
- 0.041265942,
- 0.04089442,
- -0.05000745,
- -0.05805947,
- 0.048748765,
- 0.06891688,
- 0.058812816,
- 0.008785837,
- -0.016080279,
- 0.08517403,
- -0.07814158,
- -0.077435054,
- 0.020808736,
- 0.016186161,
- 0.032549612,
- -0.05344129,
- -0.062166847,
- -0.0242584,
- 0.007393759,
- 0.024064584,
- 0.0064619263,
- 0.051204458,
- 0.072843835,
- 0.034658417,
- -0.05477693,
- -0.05941287,
- -0.007262739,
- 0.020149412,
- 0.035835978,
- 0.0056162532,
- 0.010803632,
- -0.052724347,
- 0.010110615,
- -0.0087345,
- -0.06285489,
- 0.038390912,
- -0.013975588,
- 0.0734118,
- 0.090072334,
- -0.07995426,
- -0.016420014,
- 0.044813525,
- -0.06888206,
- -0.033037275,
- -0.015467736,
- 0.01130628,
- 0.036483694,
- 0.0663459,
- -0.054344203,
- 0.008723171,
- 0.012078509,
- -0.038129516,
- 0.006938081,
- 0.051155496,
- 0.07745829,
- -0.122897476,
- 0.01635594,
- 0.04956378,
- 0.031677794,
- -0.03963372,
- 0.0016560612,
- 0.0095810415,
- -0.032620687,
- -0.03396473,
- -0.13327733,
- 0.0072318353,
- -0.010225149,
- 0.038535405,
- -0.09343492,
- -0.04173385,
- 0.06996305,
- -0.026312327,
- -0.14973918,
- 0.13443227,
- 0.03750676,
- 0.052842483,
- 0.045053005,
- 0.018721534,
- 0.05443072,
- 0.017290117,
- -0.03255681,
- 0.046160772,
- -0.046711024,
- -0.030576464,
- -0.018258592,
- -0.048711784,
- 0.033041865,
- -0.003856249,
- 0.05003307,
- -0.05821012,
- -0.00994153,
- 0.0106995255,
- -0.04008794,
- -0.0015539092,
- 0.060838487,
- -0.04559896,
- 0.04924722,
- 0.026119638,
- 0.019796783,
- -0.0016312932,
- 0.05955464,
- -6.527786e-33,
- 0.063555494,
- 0.003072545,
- 0.0290068,
- 0.17338625,
- 0.0029474646,
- 0.027745575,
- -0.095103905,
- -0.031165987,
- 0.026719859,
- -0.010799976,
- 0.023851028,
- 0.02375357,
- -0.031152952,
- 0.049497593,
- -0.025005657,
- 0.10176666,
- -0.079190366,
- -0.0032479328,
- 0.042849813,
- 0.09489888,
- -0.066508934,
- 0.00632239,
- 0.022188535,
- 0.06996212,
- -0.007491268,
- -0.001777037,
- 0.027047161,
- -0.07536194,
- 0.11401931,
- 0.008564227,
- -0.02371391,
- -0.046974454,
- 0.0144310715,
- 0.019899534,
- -0.0046927175,
- 0.0013119543,
- -0.03432107,
- -0.054212432,
- -0.09418897,
- -0.028963951,
- -0.018907014,
- 0.045735538,
- 0.04757043,
- -0.003132595,
- -0.033231355,
- -0.013520351,
- 0.051010653,
- 0.03111525,
- 0.015257217,
- 0.054166727,
- -0.085080594,
- 0.013355202,
- -0.04763934,
- 0.07099156,
- -0.01309272,
- -0.0023823304,
- 0.050339438,
- -0.041624993,
- -0.014171974,
- 0.032421313,
- 0.005414455,
- 0.09128853,
- 0.0045168963,
- -0.018196244,
- -0.015225792,
- -0.04635148,
- 0.038764603,
- 0.014739169,
- 0.052030377,
- 0.0017809072,
- -0.014930553,
- 0.027100598,
- 0.031190928,
- 0.02379928,
- -0.0045879,
- 0.03622444,
- 0.066800386,
- -0.0018508516,
- 0.021243243,
- -0.0575494,
- 0.019077979,
- 0.031474162,
- -0.018456634,
- -0.04083116,
- 0.10387791,
- 0.011981423,
- -0.014923204,
- -0.10519511,
- -0.012293124,
- -0.00042049217,
- -0.09506704,
- 0.058275525,
- 0.042611193,
- -0.025061507,
- -0.094545335,
- 4.010606e-33,
- 0.13226718,
- 0.0053517097,
- -0.03314567,
- -0.09099676,
- -0.031551942,
- -0.033939674,
- -0.071981214,
- 0.12595285,
- -0.08333936,
- 0.052855294,
- 0.001036374,
- 0.021973396,
- 0.104020424,
- 0.013031712,
- 0.040921222,
- 0.018695012,
- 0.114233166,
- 0.024822846,
- 0.014595918,
- 0.00621894,
- -0.011220824,
- -0.035742316,
- -0.03801776,
- 0.011226576,
- -0.051305167,
- 0.007892534,
- 0.06734842,
- 0.0033567564,
- -0.09286571,
- 0.03701943,
- -0.022331072,
- 0.040051647,
- -0.030764744,
- -0.011390678,
- -0.014426033,
- 0.024999708,
- -0.09751172,
- -0.03538673,
- -0.03757043,
- -0.010174254,
- -0.06396341,
- 0.025548752,
- 0.020661479,
- 0.03752242,
- -0.10438308,
- -0.028266912,
- -0.052153755,
- 0.012830027,
- -0.05125152,
- -0.029009243,
- -0.09633578,
- -0.042322997,
- 0.06716196,
- -0.030903742,
- -0.010314011,
- 0.027343867,
- -0.028119028,
- 0.010296558,
- 0.043072425,
- 0.022286164,
- 0.007943,
- 0.056093868,
- 0.040728126,
- 0.09295372,
- 0.016456816,
- -0.053744446,
- 0.00047035623,
- 0.050744157,
- 0.04246857,
- -0.029237023,
- 0.009294763,
- -0.010624897,
- -0.037202932,
- 0.00220195,
- -0.030278567,
- 0.07457478,
- 0.0026277148,
- -0.017591486,
- 0.0028708735,
- 0.03840644,
- 0.0072204536,
- 0.045653794,
- 0.039947055,
- 0.014161398,
- -0.014247232,
- 0.058465447,
- 0.036360227,
- 0.055268615,
- -0.02004829,
- -0.08043532,
- -0.030213723,
- -0.0148566915,
- 0.022293866,
- 0.011908896,
- -0.06907556,
- -1.8805048e-08,
- -0.078408636,
- 0.046699222,
- -0.023894435,
- 0.06347232,
- 0.02395583,
- 0.0014103559,
- -0.090737104,
- -0.06684135,
- -0.080118775,
- 0.0054891296,
- 0.05368204,
- 0.10478211,
- -0.066875115,
- 0.015525915,
- 0.06710851,
- 0.07083251,
- -0.03199485,
- 0.020825442,
- -0.021920865,
- -0.0072890157,
- -0.01058703,
- 0.004174248,
- 0.033155944,
- -0.07901077,
- 0.038750935,
- -0.07521113,
- -0.015731987,
- 0.005987591,
- 0.0051212795,
- -0.061557226,
- 0.04203319,
- 0.09544439,
- -0.04317485,
- 0.014446859,
- -0.10614051,
- -0.028011814,
- 0.01101727,
- 0.069552526,
- 0.0669063,
- -0.0747214,
- -0.078444764,
- 0.042728573,
- -0.034634914,
- -0.106056124,
- -0.0357495,
- 0.05155015,
- 0.068699375,
- -0.049968246,
- 0.015420614,
- -0.06460179,
- -0.07601102,
- 0.026022797,
- 0.07440251,
- -0.0124161495,
- 0.1332999,
- 0.07480527,
- 0.051343314,
- 0.02094546,
- -0.026808253,
- 0.08892536,
- 0.03996125,
- -0.041000355,
- 0.03187991,
- 0.018108707
+ -0.038168654,
+ 0.032873917,
+ -0.0055947267,
+ 0.014366432,
+ -0.040310103,
+ -0.116643615,
+ 0.031721067,
+ 0.0019260457,
+ -0.04255802,
+ 0.029198613,
+ 0.04252229,
+ 0.032184314,
+ 0.029838374,
+ 0.010959321,
+ -0.053805783,
+ -0.05028783,
+ -0.023449864,
+ 0.0107550435,
+ -0.13774979,
+ 0.0039929547,
+ 0.029302042,
+ 0.066712305,
+ -0.015410682,
+ 0.048422653,
+ -0.08814465,
+ -0.012715775,
+ 0.041334823,
+ 0.040851083,
+ -0.050064698,
+ -0.05804616,
+ 0.048728727,
+ 0.06888658,
+ 0.058795262,
+ 0.008804153,
+ -0.016073612,
+ 0.08514259,
+ -0.078146815,
+ -0.07741974,
+ 0.020842256,
+ 0.016201088,
+ 0.032518543,
+ -0.05346469,
+ -0.062197812,
+ -0.024271712,
+ 0.007416788,
+ 0.024103774,
+ 0.006469804,
+ 0.051166162,
+ 0.07284196,
+ 0.034627657,
+ -0.05475476,
+ -0.059386417,
+ -0.0071934434,
+ 0.020163197,
+ 0.035816014,
+ 0.0055927313,
+ 0.010762318,
+ -0.05274177,
+ 0.010083032,
+ -0.008742163,
+ -0.06284565,
+ 0.038426206,
+ -0.013933317,
+ 0.07342759,
+ 0.09004579,
+ -0.07995627,
+ -0.016420787,
+ 0.044767782,
+ -0.06886435,
+ -0.03303916,
+ -0.015482072,
+ 0.011322529,
+ 0.036461752,
+ 0.066346884,
+ -0.05434455,
+ 0.008740993,
+ 0.012066104,
+ -0.038101126,
+ 0.0069316486,
+ 0.051146947,
+ 0.07740579,
+ -0.122950904,
+ 0.016380342,
+ 0.049568996,
+ 0.031634904,
+ -0.039637603,
+ 0.0016715266,
+ 0.009577405,
+ -0.032646418,
+ -0.033988595,
+ -0.13329837,
+ 0.0072566303,
+ -0.010266605,
+ 0.038557075,
+ -0.09338859,
+ -0.041706774,
+ 0.069941126,
+ -0.026323376,
+ -0.14971305,
+ 0.13445398,
+ 0.03748492,
+ 0.052825302,
+ 0.0450506,
+ 0.018712776,
+ 0.05444322,
+ 0.017282845,
+ -0.032480195,
+ 0.04614526,
+ -0.046711974,
+ -0.030566413,
+ -0.01820007,
+ -0.04869831,
+ 0.033051647,
+ -0.0038142777,
+ 0.04999665,
+ -0.058270358,
+ -0.010011706,
+ 0.010643473,
+ -0.040113144,
+ -0.0015507729,
+ 0.060854245,
+ -0.045562096,
+ 0.049257778,
+ 0.02612153,
+ 0.01981428,
+ -0.001660993,
+ 0.059509434,
+ -6.525298e-33,
+ 0.063519135,
+ 0.0030875143,
+ 0.028961418,
+ 0.1733713,
+ 0.0029763067,
+ 0.027727291,
+ -0.0951315,
+ -0.031186627,
+ 0.026689058,
+ -0.010807322,
+ 0.023850724,
+ 0.023777472,
+ -0.031174092,
+ 0.049501278,
+ -0.025049716,
+ 0.10175924,
+ -0.07919064,
+ -0.0032249284,
+ 0.042915843,
+ 0.09483459,
+ -0.06652636,
+ 0.006303593,
+ 0.02220902,
+ 0.06999181,
+ -0.0074810013,
+ -0.0017734945,
+ 0.027008688,
+ -0.07534615,
+ 0.114036545,
+ 0.008552313,
+ -0.023737878,
+ -0.04694563,
+ 0.014472103,
+ 0.019855395,
+ -0.0046694353,
+ 0.0013555645,
+ -0.034298304,
+ -0.054142635,
+ -0.09419824,
+ -0.028909719,
+ -0.018876282,
+ 0.0457315,
+ 0.04761082,
+ -0.0030971593,
+ -0.033264168,
+ -0.013539523,
+ 0.051041685,
+ 0.031110944,
+ 0.015244497,
+ 0.054158635,
+ -0.08499706,
+ 0.013360703,
+ -0.04759633,
+ 0.07101136,
+ -0.0131114535,
+ -0.0023818254,
+ 0.050331973,
+ -0.041642286,
+ -0.01419894,
+ 0.032463223,
+ 0.0053973934,
+ 0.091275506,
+ 0.0044798073,
+ -0.018260129,
+ -0.015278888,
+ -0.046306957,
+ 0.038750377,
+ 0.014729783,
+ 0.05204642,
+ 0.0017938613,
+ -0.014963651,
+ 0.027101943,
+ 0.031203475,
+ 0.023725478,
+ -0.004601222,
+ 0.03617344,
+ 0.06679477,
+ -0.0018401983,
+ 0.021265576,
+ -0.057589985,
+ 0.019155758,
+ 0.031437635,
+ -0.018444614,
+ -0.04085069,
+ 0.10393101,
+ 0.011960795,
+ -0.014898805,
+ -0.10520497,
+ -0.012302656,
+ -0.00043837292,
+ -0.09508398,
+ 0.058318105,
+ 0.042576887,
+ -0.025066672,
+ -0.094555676,
+ 4.0072287e-33,
+ 0.1322281,
+ 0.0053512393,
+ -0.03312536,
+ -0.09096454,
+ -0.031562407,
+ -0.033949774,
+ -0.07205118,
+ 0.1259232,
+ -0.08333555,
+ 0.052797858,
+ 0.001077506,
+ 0.022004265,
+ 0.10402767,
+ 0.013034249,
+ 0.04091762,
+ 0.018705815,
+ 0.11424037,
+ 0.024799824,
+ 0.014582492,
+ 0.006205516,
+ -0.011202356,
+ -0.035756435,
+ -0.03800272,
+ 0.011251353,
+ -0.0512988,
+ 0.007890417,
+ 0.06736164,
+ 0.0033359542,
+ -0.09285096,
+ 0.03704081,
+ -0.022326592,
+ 0.039967872,
+ -0.030748183,
+ -0.011446819,
+ -0.014453254,
+ 0.02498229,
+ -0.097532175,
+ -0.035378877,
+ -0.03757795,
+ -0.010181498,
+ -0.06392041,
+ 0.025538994,
+ 0.02061816,
+ 0.03757256,
+ -0.1043548,
+ -0.028326731,
+ -0.05209465,
+ 0.0128473425,
+ -0.051238894,
+ -0.029034877,
+ -0.09633617,
+ -0.042309195,
+ 0.067165054,
+ -0.030870603,
+ -0.010357507,
+ 0.027381465,
+ -0.028105576,
+ 0.010302046,
+ 0.04306986,
+ 0.022315372,
+ 0.007954779,
+ 0.056068663,
+ 0.04071972,
+ 0.09293905,
+ 0.016536433,
+ -0.053764775,
+ 0.00047211433,
+ 0.050708972,
+ 0.042510226,
+ -0.029195962,
+ 0.009274875,
+ -0.010647389,
+ -0.037209682,
+ 0.002267011,
+ -0.030304702,
+ 0.0745741,
+ 0.0026207205,
+ -0.017582772,
+ 0.0028797672,
+ 0.038404796,
+ 0.00723137,
+ 0.045613218,
+ 0.03998252,
+ 0.014209623,
+ -0.0142997475,
+ 0.05850862,
+ 0.03630791,
+ 0.055294298,
+ -0.020075988,
+ -0.08041808,
+ -0.030250112,
+ -0.014920701,
+ 0.022349516,
+ 0.011911506,
+ -0.06903851,
+ -1.8806734e-08,
+ -0.078480355,
+ 0.046674173,
+ -0.023920896,
+ 0.0634942,
+ 0.02396477,
+ 0.0014517035,
+ -0.090798445,
+ -0.06684978,
+ -0.0801405,
+ 0.005503192,
+ 0.053675175,
+ 0.104841895,
+ -0.066848256,
+ 0.015522683,
+ 0.067097165,
+ 0.070832625,
+ -0.03197915,
+ 0.020843629,
+ -0.0219202,
+ -0.0073016756,
+ -0.010645817,
+ 0.0040983153,
+ 0.03313765,
+ -0.0790081,
+ 0.03878132,
+ -0.075230986,
+ -0.015732396,
+ 0.0060099233,
+ 0.0051297406,
+ -0.061492138,
+ 0.04202211,
+ 0.09544608,
+ -0.04318599,
+ 0.014424486,
+ -0.10617826,
+ -0.027963417,
+ 0.011034413,
+ 0.069576606,
+ 0.06689785,
+ -0.07479674,
+ -0.07851099,
+ 0.042766396,
+ -0.034639932,
+ -0.10607304,
+ -0.03577663,
+ 0.051540814,
+ 0.068673156,
+ -0.049959548,
+ 0.015460458,
+ -0.064520314,
+ -0.076010585,
+ 0.026035817,
+ 0.07440218,
+ -0.012396022,
+ 0.13329679,
+ 0.074770845,
+ 0.05134284,
+ 0.020977058,
+ -0.026776016,
+ 0.08894323,
+ 0.039937407,
+ -0.04102053,
+ 0.03194075,
+ 0.018113315
],
"index": 0,
"object": "embedding"
},
{
"embedding": [
- -0.009823841,
- 0.06685394,
- 0.08489411,
- 0.03813849,
- 0.032225974,
- -0.034307797,
- 0.107310556,
- -0.046902046,
- -0.102643676,
- -0.003702005,
- -0.0023676767,
- 0.012173647,
- -0.046961293,
- 0.08201565,
- 0.04295503,
- -0.027037757,
- 0.0070437216,
- -0.104356326,
- -0.12175826,
- 0.07269557,
- -0.079771765,
- -0.003676955,
- -0.0044014333,
- 0.06784145,
- -0.020959238,
- 0.05777534,
- -0.008483368,
- -0.013391308,
- 0.0052807773,
- -0.09834358,
- -0.13073047,
- 0.008964234,
- -0.057907283,
- -0.05804121,
- -0.05626149,
- -0.042638198,
- 3.184936e-05,
- -0.14460282,
- 0.007979306,
- 0.022538451,
- 0.048148528,
- -0.039077234,
- -0.012783144,
- 0.007688736,
- 0.05792521,
- -0.027782526,
- -0.019818667,
- 0.09386619,
- 0.14314687,
- -0.023420751,
- -0.10621568,
- 0.026846798,
- -0.05543366,
- 0.017867815,
- 0.021250507,
- 0.041602414,
- 0.0033089865,
- 0.016080648,
- 0.083043434,
- -0.014604297,
- 0.027198244,
- 0.014271484,
- -0.0062427525,
- 0.06058171,
- 0.03864093,
- 0.0060196337,
- -0.10089876,
- -0.05285287,
- -0.0797282,
- 0.01671729,
- -0.054698065,
- -0.073024616,
- 0.04547561,
- -0.009560945,
- -0.010386015,
- -0.064177126,
- 0.0011365172,
- -0.036887243,
- 0.06302413,
- -0.0016032788,
- 0.057869848,
- -0.026043506,
- -0.000536635,
- 0.021403369,
- -0.05001242,
- -0.011384805,
- -0.008799393,
- 0.09338713,
- 0.010654576,
- -0.0006147975,
- -0.056140404,
- 0.043459535,
- 0.0037720772,
- 0.027983129,
- 0.020964785,
- -0.038642954,
- 0.019421708,
- 0.023177834,
- -0.051029585,
- 0.13815063,
- 0.022802453,
- 0.13100733,
- 0.042305406,
- 0.012445653,
- 0.022351589,
- 0.014143133,
- -0.09037672,
- 0.07454903,
- -0.062642604,
- -0.08922512,
- 0.005484734,
- 0.03850994,
- -0.03628572,
- -0.009195987,
- 0.09181748,
- -0.012547894,
- 0.026162561,
- 0.08752062,
- -0.010926715,
- 0.09250321,
- 0.02097545,
- 0.052515954,
- 0.028899532,
- 0.039395254,
- -0.010501714,
- 0.077294946,
- 0.0715375,
- -7.66496e-33,
- 0.100804806,
- 0.00073826336,
- 0.057312902,
- 0.117006026,
- -0.060187068,
- -0.02796235,
- -0.041741833,
- -0.018912861,
- 0.050848745,
- -0.06301131,
- 0.036858555,
- -0.045183055,
- -0.005223951,
- 0.0064753974,
- -0.03198189,
- 0.028979877,
- -0.09603434,
- 0.057345662,
- 0.008110953,
- 0.12529288,
- -0.021994175,
- -0.047584984,
- -0.04379391,
- 0.021993084,
- 0.051113907,
- -0.014501653,
- -0.021036316,
- -0.0667254,
- -0.026064333,
- -0.008694687,
- -0.036617454,
- -0.008719971,
- 0.115688674,
- -0.00289865,
- 0.025261829,
- -0.0076816385,
- -0.008632856,
- -0.0036519386,
- -0.04257167,
- -0.037688565,
- 0.03307097,
- -0.024961809,
- 0.05859159,
- -0.06178797,
- -0.04673158,
- -0.027886666,
- -0.035025608,
- 0.055327583,
- -0.002065147,
- -0.022386257,
- -0.10152246,
- 0.029717246,
- -0.06324088,
- -0.0055829133,
- -0.048448645,
- -0.04066708,
- -0.07524254,
- 0.03743904,
- 0.016060878,
- 0.084327556,
- 0.012047858,
- 0.055406,
- 0.009235782,
- -0.07829579,
- -0.105074205,
- -0.023971796,
- -0.017086953,
- -0.018263351,
- 0.041692156,
- -0.00606311,
- 0.012483653,
- -0.035019528,
- 0.024491172,
- 0.06318314,
- 0.065662295,
- 0.052476574,
- 0.038394902,
- -0.07514326,
- -0.012202919,
- -0.0064696297,
- 0.049809776,
- 0.05707129,
- -0.0019637872,
- -0.049091708,
- 0.054853234,
- 0.052796733,
- 0.007638584,
- -0.009890581,
- 0.0022318119,
- 0.022781821,
- -0.06865972,
- 0.06054869,
- 0.070527636,
- -0.04190614,
- -0.024943016,
- 5.210683e-33,
- 0.09748425,
- 0.015037715,
- -0.0950651,
- 0.05163348,
- -0.09946082,
- -0.046801973,
- -0.045799557,
- 0.04598005,
- -0.021040877,
- 0.048971444,
- 0.085892275,
- 0.031846974,
- 0.010494827,
- -0.011657944,
- 0.023827314,
- -0.0036091327,
- 0.05379242,
- 0.0051917112,
- -0.020764181,
- 0.011931169,
- -0.09782392,
- 0.06021868,
- -0.027618488,
- 0.06742346,
- 4.5418237e-05,
- 0.06255733,
- 0.024763351,
- 0.05360233,
- -0.037187718,
- -0.015447758,
- -0.015347547,
- -0.021288762,
- -0.03981676,
- 0.04994158,
- 0.019988623,
- 0.058448106,
- 0.0017628162,
- -0.074512705,
- -0.015785523,
- -0.10013551,
- -0.10497206,
- 0.030029353,
- 0.00386666,
- 0.065692,
- 0.053144414,
- 0.009848025,
- -0.023745444,
- -0.02572956,
- -0.0091416575,
- 0.06447014,
- 0.008398887,
- -0.03277235,
- -0.0017416656,
- 0.017433915,
- 0.02735147,
- -0.003945162,
- -0.07797209,
- -0.061111048,
- -0.018393502,
- 0.019164208,
- -0.10231785,
- 0.0048785545,
- -0.039205246,
- -0.00983978,
- 0.024287809,
- -0.02257733,
- -0.016971176,
- -0.03401973,
- -0.052132465,
- -0.031842116,
- -0.034754753,
- 0.0082540605,
- 0.0013724067,
- -0.06360571,
- -0.028295932,
- 0.050363123,
- 0.023888446,
- 0.005894443,
- -0.0116009535,
- -0.0004876411,
- -0.07163071,
- 0.041449234,
- 0.05440186,
- -0.10820097,
- -0.081358775,
- -0.069281794,
- 0.08610945,
- -0.0035109764,
- 0.031017194,
- 0.08359787,
- -0.028458066,
- 0.008852798,
- -0.027919184,
- 0.04985712,
- 0.011562651,
- -1.5342355e-08,
- 0.054318756,
- 0.045345105,
- -0.07638805,
- 0.052091047,
- -0.01236827,
- 0.060296044,
- -0.004145201,
- -0.017390434,
- -0.014107871,
- -0.01709858,
- 0.075827934,
- 0.007903074,
- -0.06532883,
- -0.04752482,
- 0.038101584,
- -0.050273094,
- 0.02193425,
- 0.068476826,
- -0.037231524,
- -0.049334478,
- 0.057314597,
- 0.008028915,
- -0.042897243,
- 0.09775371,
- 0.05817249,
- 0.052902617,
- 0.024731442,
- 0.03277874,
- -0.0062142154,
- 0.082389385,
- 0.037153333,
- 0.108709686,
- -0.05776975,
- 0.036667187,
- -0.018986559,
- -0.08550582,
- 0.059112605,
- -0.045709446,
- 0.025215724,
- 0.022489667,
- -0.007955196,
- 0.0031373778,
- -0.047831737,
- -0.01862743,
- 0.048644323,
- -0.032836094,
- 0.054563984,
- -0.037403505,
- -0.07471283,
- -0.019280152,
- 0.0060565346,
- 0.04239159,
- 0.06738598,
- 0.04457912,
- 0.03311975,
- 0.033673216,
- 0.0012720197,
- 0.033221062,
- -0.04845177,
- -0.0056105815,
- -0.008513508,
- -0.016865257,
- -0.07558049,
- 0.0035253412
+ -0.009833591,
+ 0.0668779,
+ 0.08488449,
+ 0.038122248,
+ 0.032220595,
+ -0.03433386,
+ 0.10730999,
+ -0.046878964,
+ -0.10266935,
+ -0.00370671,
+ -0.0023427065,
+ 0.0121665625,
+ -0.046939347,
+ 0.08200702,
+ 0.042902183,
+ -0.0269985,
+ 0.0070130927,
+ -0.10432514,
+ -0.12179822,
+ 0.07268025,
+ -0.07978419,
+ -0.0036544742,
+ -0.004423966,
+ 0.06783815,
+ -0.020906046,
+ 0.05779926,
+ -0.008492945,
+ -0.013392021,
+ 0.0052612307,
+ -0.09833074,
+ -0.13072163,
+ 0.0089445235,
+ -0.05787279,
+ -0.05804388,
+ -0.056277692,
+ -0.04266197,
+ 0.00011274022,
+ -0.14460878,
+ 0.007978511,
+ 0.022490304,
+ 0.048143692,
+ -0.039113734,
+ -0.012775274,
+ 0.00774044,
+ 0.057925634,
+ -0.0277638,
+ -0.019801306,
+ 0.09388109,
+ 0.14315501,
+ -0.023440128,
+ -0.10622172,
+ 0.026852824,
+ -0.05544247,
+ 0.017898263,
+ 0.021249173,
+ 0.041583873,
+ 0.0032883594,
+ 0.01606716,
+ 0.08307148,
+ -0.014618173,
+ 0.027187122,
+ 0.014263773,
+ -0.006215441,
+ 0.060580455,
+ 0.038631216,
+ 0.00601958,
+ -0.10086374,
+ -0.052872147,
+ -0.07970713,
+ 0.016736085,
+ -0.054666266,
+ -0.07301758,
+ 0.045461986,
+ -0.009579665,
+ -0.010393855,
+ -0.06414482,
+ 0.0011229888,
+ -0.03685241,
+ 0.06301278,
+ -0.0016175678,
+ 0.057848454,
+ -0.02605763,
+ -0.0005511475,
+ 0.021425176,
+ -0.05001372,
+ -0.011338819,
+ -0.008776912,
+ 0.093425095,
+ 0.010633341,
+ -0.00062553474,
+ -0.056090016,
+ 0.043499533,
+ 0.0037617732,
+ 0.028000852,
+ 0.020929853,
+ -0.03870579,
+ 0.019406682,
+ 0.023135182,
+ -0.050996922,
+ 0.13818857,
+ 0.022762392,
+ 0.13101754,
+ 0.042277776,
+ 0.012446188,
+ 0.02232269,
+ 0.01416872,
+ -0.09036148,
+ 0.07457381,
+ -0.062656924,
+ -0.08921229,
+ 0.005476475,
+ 0.03847988,
+ -0.036277156,
+ -0.009225353,
+ 0.091821924,
+ -0.012585263,
+ 0.026147954,
+ 0.08752217,
+ -0.010917677,
+ 0.09249038,
+ 0.020964727,
+ 0.052522942,
+ 0.02889203,
+ 0.03941557,
+ -0.010532948,
+ 0.077333786,
+ 0.071537115,
+ -7.666136e-33,
+ 0.1007941,
+ 0.0006832776,
+ 0.057265434,
+ 0.11700236,
+ -0.060210142,
+ -0.027968848,
+ -0.041750107,
+ -0.018907221,
+ 0.050820086,
+ -0.06298854,
+ 0.03686846,
+ -0.04519097,
+ -0.005230235,
+ 0.0064626867,
+ -0.032001205,
+ 0.029013716,
+ -0.09601744,
+ 0.057358947,
+ 0.008101205,
+ 0.12529038,
+ -0.021971641,
+ -0.04753891,
+ -0.043775026,
+ 0.022004716,
+ 0.051121656,
+ -0.014482441,
+ -0.021044016,
+ -0.06673008,
+ -0.026052782,
+ -0.008716248,
+ -0.03660495,
+ -0.008708152,
+ 0.115699895,
+ -0.0028488566,
+ 0.025259791,
+ -0.0076865884,
+ -0.00857807,
+ -0.003692314,
+ -0.0425788,
+ -0.03768598,
+ 0.03309143,
+ -0.024962988,
+ 0.05863119,
+ -0.061788555,
+ -0.04672501,
+ -0.02788036,
+ -0.03501338,
+ 0.05530872,
+ -0.0020685238,
+ -0.022395074,
+ -0.10156128,
+ 0.029757096,
+ -0.06324917,
+ -0.0055847103,
+ -0.04842867,
+ -0.0406527,
+ -0.07527831,
+ 0.03743154,
+ 0.016060246,
+ 0.084336765,
+ 0.012059259,
+ 0.05541269,
+ 0.009253656,
+ -0.07830337,
+ -0.10507807,
+ -0.023997093,
+ -0.017076802,
+ -0.018283347,
+ 0.04169534,
+ -0.006048637,
+ 0.012450259,
+ -0.03500919,
+ 0.024494508,
+ 0.06315759,
+ 0.06566752,
+ 0.052477088,
+ 0.038372934,
+ -0.07515921,
+ -0.012239953,
+ -0.006440479,
+ 0.049801994,
+ 0.057076473,
+ -0.0019500607,
+ -0.04908919,
+ 0.05485639,
+ 0.052818075,
+ 0.007574656,
+ -0.009921382,
+ 0.0022724136,
+ 0.022785993,
+ -0.06867227,
+ 0.060549237,
+ 0.070556775,
+ -0.041930214,
+ -0.02491663,
+ 5.211892e-33,
+ 0.09750541,
+ 0.015079458,
+ -0.095042065,
+ 0.0515883,
+ -0.0994903,
+ -0.046793085,
+ -0.04579176,
+ 0.04599562,
+ -0.021065598,
+ 0.04897981,
+ 0.085892305,
+ 0.031818043,
+ 0.010482406,
+ -0.011647838,
+ 0.023812337,
+ -0.0036415062,
+ 0.053783026,
+ 0.005232672,
+ -0.02077592,
+ 0.011894891,
+ -0.097780555,
+ 0.060238954,
+ -0.027633231,
+ 0.06742237,
+ 2.5952173e-05,
+ 0.06254275,
+ 0.024719816,
+ 0.053590305,
+ -0.037180737,
+ -0.015468933,
+ -0.015324857,
+ -0.021314861,
+ -0.039786287,
+ 0.049943436,
+ 0.019945512,
+ 0.05842415,
+ 0.0017712337,
+ -0.07452784,
+ -0.015759895,
+ -0.10015912,
+ -0.104994535,
+ 0.03002228,
+ 0.0038714884,
+ 0.06567684,
+ 0.05313137,
+ 0.009852781,
+ -0.023740485,
+ -0.025747454,
+ -0.009146766,
+ 0.06444407,
+ 0.008365104,
+ -0.032752022,
+ -0.0017309446,
+ 0.017398946,
+ 0.027344245,
+ -0.0039835107,
+ -0.07793314,
+ -0.06111028,
+ -0.018392045,
+ 0.019161185,
+ -0.10229173,
+ 0.004820445,
+ -0.03923746,
+ -0.009809605,
+ 0.02428856,
+ -0.02256144,
+ -0.016944531,
+ -0.03403803,
+ -0.05211972,
+ -0.031824537,
+ -0.034718003,
+ 0.008275027,
+ 0.0013583767,
+ -0.06358826,
+ -0.028270705,
+ 0.050367188,
+ 0.023883171,
+ 0.0058828085,
+ -0.011626739,
+ -0.00044805612,
+ -0.071661964,
+ 0.041463517,
+ 0.054404654,
+ -0.10819901,
+ -0.08137075,
+ -0.06927182,
+ 0.08611682,
+ -0.0035160778,
+ 0.030999359,
+ 0.08360334,
+ -0.028444909,
+ 0.008868503,
+ -0.027930394,
+ 0.04986546,
+ 0.011590262,
+ -1.5343216e-08,
+ 0.054317594,
+ 0.045336407,
+ -0.07639679,
+ 0.052074224,
+ -0.012374757,
+ 0.060316578,
+ -0.0041594645,
+ -0.017367603,
+ -0.014107863,
+ -0.017071113,
+ 0.075814135,
+ 0.0079101855,
+ -0.0653045,
+ -0.047504168,
+ 0.038116574,
+ -0.050272573,
+ 0.021948416,
+ 0.0685364,
+ -0.037221905,
+ -0.04937101,
+ 0.057309754,
+ 0.008049557,
+ -0.042899966,
+ 0.09778022,
+ 0.058175605,
+ 0.05289681,
+ 0.024736015,
+ 0.032797,
+ -0.0062358975,
+ 0.08241506,
+ 0.03714261,
+ 0.10870123,
+ -0.05776473,
+ 0.036651433,
+ -0.018998465,
+ -0.08551218,
+ 0.05913097,
+ -0.04569603,
+ 0.025227055,
+ 0.022481369,
+ -0.007972968,
+ 0.0031193425,
+ -0.047840066,
+ -0.01866631,
+ 0.048634782,
+ -0.032800686,
+ 0.05455027,
+ -0.03739758,
+ -0.07470992,
+ -0.019272048,
+ 0.0060886056,
+ 0.042403262,
+ 0.067405015,
+ 0.044566732,
+ 0.033157814,
+ 0.033654317,
+ 0.0012653307,
+ 0.0331767,
+ -0.04841697,
+ -0.005587956,
+ -0.008498534,
+ -0.016844513,
+ -0.075615294,
+ 0.003522267
],
"index": 1,
"object": "embedding"
},
{
"embedding": [
- 0.033612337,
- 0.010374505,
- -0.01756061,
- 0.029361853,
- -0.009454598,
- -0.037026335,
- -0.02555746,
- 0.0086515825,
- 0.019154208,
- 0.03955405,
- -0.02469497,
- -0.0126976445,
- -0.0065836124,
- 0.043807767,
- -0.036032367,
- -0.056751598,
- 0.005685301,
- -0.048611272,
- -0.01940104,
- 0.051023778,
- 0.06368657,
- 0.04569995,
- -0.025642192,
- 0.02090835,
- 0.023841413,
- -0.011006624,
- -0.06968253,
- 0.008696027,
- -0.0100323185,
- -0.004299733,
- -0.013709692,
- 0.060795236,
- 0.054181676,
- 0.030621745,
- 0.032446172,
- 0.023919526,
- 0.09566865,
- 0.041953687,
- 0.00087092275,
- 0.04335,
- 0.03367777,
- -0.09001533,
- 0.021590438,
- 0.04053571,
- -0.002674088,
- 0.031825043,
- -0.045521177,
- 0.047551177,
- -0.07043583,
- -0.013617987,
- -0.0102603305,
- -0.016518736,
- -0.07214938,
- -0.055422474,
- 0.03316378,
- -0.0076137385,
- 0.050792947,
- -0.04655027,
- 0.064705744,
- 0.08078938,
- -0.053805117,
- -0.013050277,
- -0.023942292,
- 0.0726168,
- 0.07433478,
- 0.050372824,
- -0.03490959,
- -0.101285346,
- -0.016964512,
- -0.054189693,
- 0.005499785,
- 0.006458164,
- 0.055815514,
- 0.048383262,
- 0.040276967,
- 0.0056121964,
- -0.024112493,
- -0.10037388,
- 0.07864023,
- 0.04749725,
- -0.083059065,
- -0.05695486,
- -0.007121432,
- 0.03499301,
- 0.0130494,
- 0.047826655,
- 0.07769031,
- -0.0050768964,
- -0.088448934,
- 0.0034568575,
- -0.023282519,
- 0.045576394,
- -0.042316645,
- -0.024240615,
- 0.017663328,
- -0.024584634,
- -0.032086663,
- -0.009175009,
- -0.060619276,
- 0.0788936,
- -0.007151155,
- -0.0018835695,
- -0.024150992,
- 0.035605535,
- -0.097886965,
- -0.07463594,
- 0.036441684,
- -0.061645452,
- 0.06754617,
- 0.0037501638,
- -0.050999243,
- -0.023512185,
- 0.04400348,
- 0.042692684,
- 0.020495275,
- -0.0098657925,
- -0.10782902,
- 0.041300014,
- 0.029186765,
- 0.045622177,
- 0.0951987,
- -0.020906197,
- 0.00027652894,
- -0.05796104,
- 0.022876726,
- -0.043638688,
- 0.021679614,
- -8.721427e-33,
- -0.0012232207,
- -0.038046468,
- 0.04248091,
- 0.08773161,
- -0.0042147394,
- 0.00010909877,
- -0.06459573,
- 0.061631102,
- -0.0035571777,
- -0.0057670954,
- -0.010751822,
- -0.06539647,
- 0.0026381642,
- 0.006108226,
- 0.07177802,
- 0.099656485,
- -0.028420987,
- 0.0886893,
- -0.06579721,
- 0.0577445,
- -0.057205524,
- 0.036075067,
- -0.02090538,
- -0.09164578,
- -0.07255028,
- -0.075212136,
- -0.006453883,
- 0.010381722,
- -0.0037261078,
- 0.020341685,
- -0.039610952,
- 0.048633367,
- -0.057997692,
- 0.04580804,
- -0.002834594,
- -0.026399026,
- 0.011338722,
- -0.008768234,
- -0.012484398,
- 0.0030163776,
- -0.050530374,
- -0.043636482,
- -0.024315875,
- 0.065459326,
- 0.050444957,
- -0.031544425,
- -0.00075475493,
- -0.04531901,
- 0.058805995,
- 0.0012770096,
- -0.019136755,
- 0.012550491,
- 0.040011447,
- -0.022380024,
- -0.030805111,
- 0.04761777,
- 0.036087062,
- -0.00771528,
- -0.042050246,
- 0.09727571,
- 0.011417657,
- 0.027789006,
- -0.08352716,
- 0.019375375,
- -0.05415718,
- 0.014092975,
- -0.04270275,
- -0.007896535,
- 0.029720219,
- 0.07610263,
- 0.031358883,
- -0.04178186,
- 0.0016060148,
- 0.03870257,
- -0.059810083,
- -0.07050183,
- -0.051603932,
- 0.06843783,
- -0.0037906233,
- -0.012867741,
- 0.035064667,
- -0.112596914,
- 0.053979058,
- -0.11403874,
- -0.033291597,
- -0.011375664,
- -0.022975085,
- -0.0874419,
- 0.0009676586,
- -0.07040301,
- -0.034353334,
- 0.028341567,
- -0.003938582,
- -0.065418504,
- 0.05670526,
- 4.4032913e-33,
- -0.06758047,
- 0.07452212,
- -0.04625966,
- 0.110544346,
- 0.08249691,
- -0.035985246,
- 0.112199076,
- -0.010368401,
- -0.09361668,
- 0.15915231,
- 0.005810317,
- 0.041577023,
- 0.041846495,
- -0.0221648,
- 0.0180787,
- 0.01732049,
- 0.031424496,
- -0.07654498,
- 0.011575445,
- -0.04279533,
- -0.077900656,
- 0.12441581,
- 0.036161043,
- 0.09728094,
- -0.06544197,
- 0.051177975,
- 0.030517569,
- -0.06477891,
- 0.0033884735,
- -0.0065040532,
- 0.002094866,
- 0.0057612373,
- -0.07176532,
- 0.01457261,
- 0.0111329,
- -0.012400559,
- 0.09850194,
- -0.05333344,
- -0.059571583,
- 0.027873877,
- 0.013967755,
- 0.0973726,
- 0.14173166,
- 0.09823832,
- -0.00076127227,
- 0.036324706,
- 0.013391566,
- -0.11345763,
- 0.015459011,
- 0.04547403,
- -0.05844395,
- -0.011545099,
- 0.026310358,
- 0.055226807,
- -0.05014672,
- 0.014071454,
- -0.04505251,
- 0.0055593317,
- 0.017989416,
- 0.01946363,
- -0.08633586,
- 0.08156571,
- -0.012573777,
- 0.03409684,
- -0.017857939,
- -0.031390663,
- -0.08447243,
- 0.07359053,
- 0.03050787,
- 0.014397102,
- 0.085515074,
- -0.0014615763,
- -0.117197014,
- -0.071065396,
- 0.08322675,
- -0.077766545,
- -0.04483503,
- -0.009105399,
- 0.031649765,
- -0.03719005,
- -0.05655446,
- -0.07973028,
- 0.0033281972,
- 0.039855074,
- -0.05885036,
- 0.09728466,
- -0.016143035,
- 0.02778064,
- -0.06544481,
- 0.040895227,
- 0.009707747,
- -0.012031996,
- -0.0087121,
- -0.050623253,
- -0.024199592,
- -1.8976149e-08,
- -0.024199035,
- -0.05503201,
- -0.014488159,
- 0.017767312,
- -0.014441727,
- 0.06777053,
- 0.032016836,
- -0.04272461,
- -0.056400675,
- 0.00891021,
- 0.09656018,
- 0.06953362,
- -0.09056004,
- 0.018509604,
- 0.0636711,
- -0.07154264,
- -0.004792113,
- -0.008434159,
- -0.016066523,
- 0.08377477,
- -0.08183436,
- 0.050272364,
- 0.020495478,
- 0.027959472,
- -0.023466159,
- 0.074599385,
- 0.03680873,
- 0.08727076,
- 0.0132746175,
- 0.027399603,
- 0.06736775,
- 0.039569516,
- -0.044155512,
- -0.051341295,
- -0.013279262,
- 0.06611269,
- 0.0431739,
- -0.036882088,
- 0.02478827,
- 0.0406888,
- -0.1132855,
- 0.027976915,
- 0.0070727277,
- 0.039784174,
- -0.027419532,
- -0.05590226,
- -0.08574367,
- -0.02544574,
- -0.021121135,
- -0.05820989,
- -0.025676778,
- 0.017944483,
- 0.04889649,
- -0.036834445,
- 0.012973257,
- -0.06298454,
- -0.03954017,
- -0.0035980341,
- -0.06945554,
- 0.042370543,
- 0.1125106,
- -0.0015144089,
- 0.08769291,
- -0.041732
+ 0.033608936,
+ 0.010398442,
+ -0.017553993,
+ 0.029364064,
+ -0.009464617,
+ -0.037002508,
+ -0.025546908,
+ 0.008652466,
+ 0.019171866,
+ 0.03954904,
+ -0.024698786,
+ -0.012698567,
+ -0.006575828,
+ 0.043791965,
+ -0.035994604,
+ -0.05671484,
+ 0.0056701135,
+ -0.048562843,
+ -0.019397723,
+ 0.05104105,
+ 0.063669115,
+ 0.045695283,
+ -0.025647452,
+ 0.020920323,
+ 0.023776716,
+ -0.011002659,
+ -0.06972687,
+ 0.008664046,
+ -0.010030623,
+ -0.004339591,
+ -0.013750908,
+ 0.060781404,
+ 0.054188438,
+ 0.030624274,
+ 0.032462284,
+ 0.023917627,
+ 0.09566426,
+ 0.041960694,
+ 0.00087254023,
+ 0.04337981,
+ 0.033683162,
+ -0.08997299,
+ 0.021594081,
+ 0.040572572,
+ -0.002699973,
+ 0.03181515,
+ -0.04552366,
+ 0.047550924,
+ -0.07038101,
+ -0.013632569,
+ -0.010259558,
+ -0.016508883,
+ -0.07213799,
+ -0.055489477,
+ 0.03312745,
+ -0.0075917933,
+ 0.050809033,
+ -0.04651997,
+ 0.064730175,
+ 0.080775,
+ -0.053802576,
+ -0.01303103,
+ -0.023942273,
+ 0.07259772,
+ 0.07427843,
+ 0.050371367,
+ -0.034895457,
+ -0.10131592,
+ -0.01694396,
+ -0.054186717,
+ 0.0054757623,
+ 0.0064777075,
+ 0.055816714,
+ 0.04833513,
+ 0.040297274,
+ 0.005629578,
+ -0.024119677,
+ -0.10035926,
+ 0.07866524,
+ 0.047488276,
+ -0.08309364,
+ -0.056954693,
+ -0.007104401,
+ 0.03495975,
+ 0.013019207,
+ 0.047803633,
+ 0.0777118,
+ -0.00509941,
+ -0.08840243,
+ 0.0034689775,
+ -0.023245867,
+ 0.04557207,
+ -0.04230277,
+ -0.024225675,
+ 0.017693503,
+ -0.024583058,
+ -0.032045294,
+ -0.009174721,
+ -0.06059988,
+ 0.07893847,
+ -0.00714072,
+ -0.0018742199,
+ -0.024142431,
+ 0.03558561,
+ -0.097880565,
+ -0.07468488,
+ 0.036415916,
+ -0.06168905,
+ 0.06755602,
+ 0.0037724776,
+ -0.05098253,
+ -0.023584208,
+ 0.043991886,
+ 0.042738363,
+ 0.020495268,
+ -0.0098619405,
+ -0.107808046,
+ 0.041273866,
+ 0.02920404,
+ 0.04561137,
+ 0.095207445,
+ -0.020896124,
+ 0.00023096669,
+ -0.057968765,
+ 0.022850417,
+ -0.043668177,
+ 0.021688405,
+ -8.720441e-33,
+ -0.0012058292,
+ -0.03802704,
+ 0.042444937,
+ 0.08773871,
+ -0.004220456,
+ 0.00012147395,
+ -0.06457608,
+ 0.061607473,
+ -0.0035593824,
+ -0.0057741986,
+ -0.010743548,
+ -0.065433994,
+ 0.002658555,
+ 0.006107435,
+ 0.07180735,
+ 0.099667646,
+ -0.028398223,
+ 0.08866949,
+ -0.06581663,
+ 0.057735924,
+ -0.057161212,
+ 0.036086526,
+ -0.02094693,
+ -0.091624826,
+ -0.07255717,
+ -0.07521124,
+ -0.0064620934,
+ 0.010381977,
+ -0.0037112501,
+ 0.020337056,
+ -0.0396202,
+ 0.04863623,
+ -0.057977367,
+ 0.045799762,
+ -0.0028102288,
+ -0.026413642,
+ 0.011332779,
+ -0.008787543,
+ -0.01246847,
+ 0.003016415,
+ -0.050528,
+ -0.043582138,
+ -0.024329135,
+ 0.06542502,
+ 0.050448198,
+ -0.031531323,
+ -0.0007779434,
+ -0.04532696,
+ 0.058871463,
+ 0.0012682271,
+ -0.019152224,
+ 0.01258753,
+ 0.03999562,
+ -0.022376174,
+ -0.030803563,
+ 0.04760751,
+ 0.036079545,
+ -0.0076535675,
+ -0.04203372,
+ 0.097275354,
+ 0.011409953,
+ 0.027754916,
+ -0.0835048,
+ 0.019380422,
+ -0.05416042,
+ 0.014054438,
+ -0.04266347,
+ -0.007908375,
+ 0.029723784,
+ 0.0761083,
+ 0.03139675,
+ -0.041797075,
+ 0.0016033188,
+ 0.038726415,
+ -0.059795942,
+ -0.07054141,
+ -0.05157118,
+ 0.0684149,
+ -0.003766908,
+ -0.012878277,
+ 0.035064787,
+ -0.11262972,
+ 0.053968824,
+ -0.1140537,
+ -0.033282436,
+ -0.011386638,
+ -0.022939742,
+ -0.08745513,
+ 0.0009942602,
+ -0.07038481,
+ -0.034342457,
+ 0.028354177,
+ -0.003912724,
+ -0.0654399,
+ 0.056719452,
+ 4.401956e-33,
+ -0.06759265,
+ 0.07454906,
+ -0.046297893,
+ 0.11055107,
+ 0.08249596,
+ -0.035986293,
+ 0.11225011,
+ -0.010407374,
+ -0.09363792,
+ 0.15916187,
+ 0.0057810647,
+ 0.041591797,
+ 0.041856647,
+ -0.022185486,
+ 0.018102126,
+ 0.017321726,
+ 0.031456053,
+ -0.076545484,
+ 0.011582533,
+ -0.04284016,
+ -0.07789234,
+ 0.12440625,
+ 0.03617526,
+ 0.09730373,
+ -0.06544067,
+ 0.051156454,
+ 0.030499168,
+ -0.06475215,
+ 0.003401952,
+ -0.006514968,
+ 0.002070544,
+ 0.005759038,
+ -0.07172358,
+ 0.0145481,
+ 0.011155189,
+ -0.012380945,
+ 0.098492086,
+ -0.053324275,
+ -0.05958665,
+ 0.027893873,
+ 0.01397341,
+ 0.09733979,
+ 0.14172351,
+ 0.09822425,
+ -0.000753543,
+ 0.036323734,
+ 0.013357258,
+ -0.11347022,
+ 0.01546052,
+ 0.045483384,
+ -0.05844928,
+ -0.011548025,
+ 0.026313214,
+ 0.055244267,
+ -0.050127964,
+ 0.014079803,
+ -0.04502139,
+ 0.005556844,
+ 0.017963082,
+ 0.01945956,
+ -0.08633155,
+ 0.08159404,
+ -0.012574804,
+ 0.034080163,
+ -0.017839924,
+ -0.031354588,
+ -0.084478684,
+ 0.073620565,
+ 0.030523231,
+ 0.014402138,
+ 0.08548794,
+ -0.0014136349,
+ -0.117235936,
+ -0.071074195,
+ 0.083228014,
+ -0.07779257,
+ -0.044802953,
+ -0.009106513,
+ 0.0316612,
+ -0.03717584,
+ -0.05652208,
+ -0.07973565,
+ 0.003353578,
+ 0.03982252,
+ -0.05883056,
+ 0.097288825,
+ -0.01612578,
+ 0.0277682,
+ -0.06547234,
+ 0.040883925,
+ 0.009703006,
+ -0.012041616,
+ -0.008719466,
+ -0.05062296,
+ -0.024210127,
+ -1.8977037e-08,
+ -0.024204005,
+ -0.055027,
+ -0.014531686,
+ 0.017793229,
+ -0.014444479,
+ 0.06776621,
+ 0.032021433,
+ -0.04271159,
+ -0.056421917,
+ 0.008902811,
+ 0.0965939,
+ 0.069501095,
+ -0.09060633,
+ 0.018546907,
+ 0.06365827,
+ -0.0715206,
+ -0.0047898116,
+ -0.008457558,
+ -0.01603862,
+ 0.083756834,
+ -0.081861764,
+ 0.050247736,
+ 0.020439949,
+ 0.027903674,
+ -0.02344807,
+ 0.074611686,
+ 0.036804173,
+ 0.08724397,
+ 0.013292644,
+ 0.02741063,
+ 0.0673842,
+ 0.039584856,
+ -0.044136506,
+ -0.051336076,
+ -0.013291427,
+ 0.06607191,
+ 0.043135997,
+ -0.036887288,
+ 0.024783924,
+ 0.040656343,
+ -0.11329909,
+ 0.027977955,
+ 0.0070782495,
+ 0.039789386,
+ -0.027414937,
+ -0.055913515,
+ -0.085740864,
+ -0.025473714,
+ -0.021161858,
+ -0.05823863,
+ -0.025728453,
+ 0.017994676,
+ 0.04891479,
+ -0.03684745,
+ 0.012969448,
+ -0.063004315,
+ -0.039539963,
+ -0.0036127788,
+ -0.069469534,
+ 0.042392787,
+ 0.11249585,
+ -0.0015041318,
+ 0.087654695,
+ -0.041728426
],
"index": 2,
"object": "embedding"
diff --git a/tests/integration/recordings/responses/80e4404d8987.json b/tests/integration/recordings/responses/80e4404d8987.json
index 09d510916..226b6648d 100644
--- a/tests/integration/recordings/responses/80e4404d8987.json
+++ b/tests/integration/recordings/responses/80e4404d8987.json
@@ -22,7 +22,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:33:10.76700718Z",
+ "created_at": "2025-10-02T02:54:51.50254Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -40,7 +40,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:33:10.956949035Z",
+ "created_at": "2025-10-02T02:54:51.549521Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -58,7 +58,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:33:11.147886127Z",
+ "created_at": "2025-10-02T02:54:51.594384Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -76,7 +76,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:33:11.337832912Z",
+ "created_at": "2025-10-02T02:54:51.637769Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -94,7 +94,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:33:11.524017554Z",
+ "created_at": "2025-10-02T02:54:51.684099Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -112,7 +112,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:33:11.712703934Z",
+ "created_at": "2025-10-02T02:54:51.730912Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -130,7 +130,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:33:11.903877596Z",
+ "created_at": "2025-10-02T02:54:51.777299Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -148,7 +148,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:33:12.095535165Z",
+ "created_at": "2025-10-02T02:54:51.823309Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -166,7 +166,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:33:12.291614477Z",
+ "created_at": "2025-10-02T02:54:51.868924Z",
"done": false,
"done_reason": null,
"total_duration": null,
@@ -184,15 +184,15 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
- "created_at": "2025-10-01T01:33:12.483844314Z",
+ "created_at": "2025-10-02T02:54:51.915105Z",
"done": true,
"done_reason": "stop",
- "total_duration": 4303509972,
- "load_duration": 44748689,
+ "total_duration": 5098012833,
+ "load_duration": 4289621791,
"prompt_eval_count": 31,
- "prompt_eval_duration": 2539513749,
+ "prompt_eval_duration": 393000541,
"eval_count": 10,
- "eval_duration": 1718623697,
+ "eval_duration": 414080875,
"response": "",
"thinking": null,
"context": null
diff --git a/tests/integration/recordings/responses/84432044194a.json b/tests/integration/recordings/responses/84432044194a.json
new file mode 100644
index 000000000..373652c28
--- /dev/null
+++ b/tests/integration/recordings/responses/84432044194a.json
@@ -0,0 +1,414 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "What is the boiling point of the liquid polyjuice in celsius?"
+ },
+ {
+ "role": "assistant",
+ "content": "",
+ "tool_calls": [
+ {
+ "id": "call_s1g1se8b",
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}"
+ }
+ }
+ ]
+ },
+ {
+ "role": "tool",
+ "tool_call_id": "call_s1g1se8b",
+ "content": "-100"
+ }
+ ],
+ "max_tokens": 512,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": "auto",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-157",
+ "choices": [
+ {
+ "delta": {
+ "content": "The",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441156,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-157",
+ "choices": [
+ {
+ "delta": {
+ "content": " boiling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441156,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-157",
+ "choices": [
+ {
+ "delta": {
+ "content": " point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441156,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-157",
+ "choices": [
+ {
+ "delta": {
+ "content": " of",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441156,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-157",
+ "choices": [
+ {
+ "delta": {
+ "content": " Poly",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441156,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-157",
+ "choices": [
+ {
+ "delta": {
+ "content": "ju",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441156,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-157",
+ "choices": [
+ {
+ "delta": {
+ "content": "ice",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441156,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-157",
+ "choices": [
+ {
+ "delta": {
+ "content": " is",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441156,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-157",
+ "choices": [
+ {
+ "delta": {
+ "content": " -",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441157,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-157",
+ "choices": [
+ {
+ "delta": {
+ "content": "100",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441157,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-157",
+ "choices": [
+ {
+ "delta": {
+ "content": "\u00b0C",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441157,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-157",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441157,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-157",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "stop",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441157,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/8486e5b1c6db.json b/tests/integration/recordings/responses/8486e5b1c6db.json
new file mode 100644
index 000000000..6eae12ff0
--- /dev/null
+++ b/tests/integration/recordings/responses/8486e5b1c6db.json
@@ -0,0 +1,276 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://localhost:11434/api/generate",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "raw": true,
+ "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point_with_metadata\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nCall get_boiling_point_with_metadata tool and answer What is the boiling point of polyjuice?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n[get_boiling_point_with_metadata(liquid_name=\"polyjuice\", celcius=True)]<|eot_id|><|start_header_id|>ipython<|end_header_id|>\n\n-100<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n",
+ "options": {
+ "temperature": 0.0001,
+ "top_p": 0.9
+ },
+ "stream": true
+ },
+ "endpoint": "/api/generate",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:15.185623Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "The",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:15.227358Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " boiling",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:15.268854Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " point",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:15.311161Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " of",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:15.353205Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " poly",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:15.394667Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ju",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:15.43604Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ice",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:15.477482Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " in",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:15.519193Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " Celsius",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:15.561068Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " is",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:15.602574Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " -",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:15.644332Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "100",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:15.686134Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": ".",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:15.727722Z",
+ "done": true,
+ "done_reason": "stop",
+ "total_duration": 730418375,
+ "load_duration": 118920875,
+ "prompt_eval_count": 401,
+ "prompt_eval_duration": 67995917,
+ "eval_count": 14,
+ "eval_duration": 542856417,
+ "response": "",
+ "thinking": null,
+ "context": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/84fc473e7b29.json b/tests/integration/recordings/responses/84fc473e7b29.json
index f01f11759..867f6208a 100644
--- a/tests/integration/recordings/responses/84fc473e7b29.json
+++ b/tests/integration/recordings/responses/84fc473e7b29.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-165",
+ "id": "chatcmpl-400",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759282579,
+ "created": 1759441673,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/87577729d812.json b/tests/integration/recordings/responses/87577729d812.json
index 9b8699084..372b41369 100644
--- a/tests/integration/recordings/responses/87577729d812.json
+++ b/tests/integration/recordings/responses/87577729d812.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-609",
+ "id": "chatcmpl-192",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759282388,
+ "created": 1759437810,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/8965c0df9071.json b/tests/integration/recordings/responses/8965c0df9071.json
new file mode 100644
index 000000000..66926eb11
--- /dev/null
+++ b/tests/integration/recordings/responses/8965c0df9071.json
@@ -0,0 +1,119 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant Always respond with tool calls no matter what. "
+ },
+ {
+ "role": "user",
+ "content": "Get the boiling point of polyjuice with a tool call."
+ }
+ ],
+ "max_tokens": 512,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": "auto",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-964",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": [
+ {
+ "index": 0,
+ "id": "call_v7gdtg8p",
+ "function": {
+ "arguments": "{\"celcius\":\"true\",\"liquid_name\":\"polyjuice\"}",
+ "name": "get_boiling_point"
+ },
+ "type": "function"
+ }
+ ]
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441159,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-964",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "tool_calls",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441159,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/8baad1435f9c.json b/tests/integration/recordings/responses/8baad1435f9c.json
index 2a8338816..ccc118a38 100644
--- a/tests/integration/recordings/responses/8baad1435f9c.json
+++ b/tests/integration/recordings/responses/8baad1435f9c.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-469",
+ "id": "chatcmpl-222",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759245125,
+ "created": 1759437799,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/8ce928ad0b85.json b/tests/integration/recordings/responses/8ce928ad0b85.json
index e15dad63e..4fac48e7c 100644
--- a/tests/integration/recordings/responses/8ce928ad0b85.json
+++ b/tests/integration/recordings/responses/8ce928ad0b85.json
@@ -19,390 +19,390 @@
"data": [
{
"embedding": [
- 0.043770123,
- 0.021501394,
- -0.081300564,
- 0.010615138,
- -0.07908651,
- -0.03219175,
- 0.13090447,
- 0.042329222,
- -0.11600146,
- -0.07588096,
- 0.041826088,
- -0.080617175,
- 0.038125783,
- -0.01069657,
- 0.01577377,
- -0.04196888,
- 0.043099895,
- -0.033355612,
- 0.013571747,
- -0.0103924,
- 0.015561896,
- -0.03786113,
- -0.050319925,
- -0.02566629,
- -0.047868017,
- -0.08717805,
- 0.01685358,
- -0.03676223,
- 0.0063788705,
- 0.020863743,
- 0.11264443,
- -0.0021451844,
- -0.07911777,
- 0.038758967,
- 0.115321144,
- -0.019753717,
- 0.0067159277,
- -0.02115779,
- -0.0144774495,
- -0.0027154125,
- -0.034384295,
- -0.052576542,
- -0.030578543,
- 0.04745372,
- -0.024294367,
- 0.01091144,
- -0.03947583,
- 0.07183755,
- -0.020715859,
- 0.018965777,
- 0.04292474,
- -0.007755194,
- 0.0025708016,
- -0.058263537,
- 0.0117485095,
- -0.022703577,
- 0.001755438,
- -0.012628832,
- 0.030728007,
- 0.017719304,
- -0.061525322,
- -0.036568273,
- 0.025831668,
- 0.025376469,
- 0.012137967,
- 0.009102949,
- -0.027313529,
- -0.093379095,
- 0.0052120173,
- 0.0074658697,
- -0.07538,
- 0.010161349,
- -0.028439516,
- 0.03026334,
- 0.0036700817,
- -0.022599109,
- -0.037862476,
- -0.08384314,
- -0.0124443015,
- -0.048889726,
- 0.029131662,
- -0.044443335,
- -0.07518736,
- -0.020938978,
- 0.063386515,
- 0.16294138,
- 0.060580015,
- -0.01281573,
- -0.031040885,
- 0.018372353,
- 0.11225789,
- 0.072922915,
- -0.06272038,
- -0.031792488,
- -0.017476005,
- 0.04846264,
- -0.04116229,
- -0.041834168,
- -0.059919056,
- 0.15907861,
- -0.027786179,
- -0.012492541,
- 0.05599519,
- -0.019895995,
- 0.022076221,
- 0.006363836,
- 0.046413723,
- -0.0731325,
- 0.03326452,
- 0.059475966,
- -0.033314705,
- 0.030761855,
- 0.00819013,
- -0.020254606,
- 0.05658313,
- -0.08153619,
- 0.023402533,
- 0.0060753864,
- -0.07993489,
- 0.013990512,
- 0.052254565,
- 0.027170746,
- -0.049271967,
- 0.02814688,
- 0.019500777,
- 0.054206643,
- 0.082691684,
- -1.8817448e-33,
- 0.013630832,
- -0.010863344,
- 0.015899567,
- 0.06938339,
- -0.05113185,
- 0.08995833,
- 0.04450505,
- 0.08101549,
- 0.018903807,
- -0.020960161,
- -0.017933648,
- -0.02174221,
- 0.010988686,
- 0.015100026,
- 0.017031211,
- 0.09433042,
- 0.003454907,
- 0.010199729,
- -0.0446973,
- 0.0018167854,
- 0.015817188,
- -0.06576281,
- -0.004943305,
- 0.004393494,
- -0.019598262,
- -0.092797264,
- -0.025917865,
- 0.04409669,
- 0.054165967,
- -0.007365383,
- -0.021470547,
- -0.03683317,
- -0.091507494,
- 0.08402351,
- -0.01809901,
- 0.0038072586,
- 0.020236026,
- 0.0439697,
- -0.077322714,
- 0.0057473024,
- -0.054513566,
- -0.024854423,
- 0.075270385,
- 0.034554463,
- -0.08118007,
- -0.12208905,
- -0.0052893,
- 0.0078005046,
- 0.05028763,
- 0.015558154,
- -0.056349996,
- 0.0398076,
- 0.012997719,
- -0.040145177,
- 0.014409028,
- -0.033200737,
- -0.008437484,
- -0.037582297,
- -0.019651853,
- 0.017285295,
- -0.008976723,
- -0.0018494898,
- -0.0030671947,
- 0.03046138,
- -0.051143825,
- -0.08688155,
- -0.018344227,
- -0.113307714,
- 0.073259674,
- 0.04602224,
- 0.012651309,
- -0.063435435,
- -0.028471926,
- 0.020155901,
- -0.078830436,
- -0.00069818215,
- -0.03156303,
- 0.123062745,
- 0.0042949035,
- -0.026413191,
- 0.07838535,
- -0.07747411,
- -0.02126005,
- 0.048919026,
- 0.02919413,
- -0.009296978,
- -0.030687347,
- -0.041037664,
- -0.038565576,
- -0.08043238,
- 0.023225678,
- 0.041928973,
- -0.05812511,
- 0.058555346,
- 0.07633673,
- 4.4510456e-34,
- -0.019582625,
- 0.040237214,
- 0.01455587,
- 0.034353998,
- 0.043911777,
- -0.023234777,
- 0.0677493,
- -0.030089214,
- -0.09076478,
- -0.019257858,
- -0.02767876,
- -0.00065146026,
- 0.0043030144,
- 0.05363546,
- 0.04073387,
- 0.03255476,
- -0.10712685,
- -0.050083157,
- -0.016644027,
- -0.0077649173,
- -0.11153465,
- 0.07478277,
- -0.015999233,
- -0.050547555,
- -0.113217294,
- -0.006174145,
- 0.050873067,
- -0.030284155,
- 0.04314861,
- 0.033020362,
- 0.023671353,
- 0.04654029,
- -0.03415647,
- 0.03614603,
- 0.023047049,
- -0.02677317,
- 0.063607745,
- 0.09978129,
- 0.03527302,
- 0.15538219,
- 0.08349002,
- 0.10931568,
- 0.04684532,
- -0.010147538,
- -0.03256112,
- 0.12924333,
- 0.031221064,
- -0.099673584,
- 0.010860566,
- 0.02326085,
- -0.011916549,
- 0.010135849,
- 0.06884636,
- 0.009350001,
- -0.0226591,
- -0.04280281,
- -0.04821317,
- -0.08508304,
- 0.051028382,
- 0.045148462,
- -0.03566162,
- 0.06547104,
- 0.048883036,
- 0.03793435,
- -0.1407055,
- -0.06711337,
- 0.009881868,
- -0.0049659596,
- -0.044289522,
- 0.0039236215,
- -0.02692826,
- -0.066134326,
- 0.04076233,
- -0.05222117,
- 0.060488354,
- -0.04113724,
- -0.04314174,
- -0.025147837,
- 0.085597694,
- -0.044939328,
- 0.06395307,
- -0.024218159,
- -0.050523587,
- -0.0020718095,
- -0.07894165,
- 0.0026805927,
- 0.020709056,
- 0.1026727,
- -0.012374822,
- 0.056179732,
- 0.06552235,
- 0.030915475,
- -0.077197015,
- -0.061245024,
- -0.016111895,
- -1.3512232e-08,
- -0.05040501,
- -0.033646606,
- 0.04670903,
- 0.047397695,
- -0.044165645,
- 0.046301767,
- -0.006073457,
- -0.053902794,
- 0.013089125,
- 0.050438043,
- -0.009894958,
- -0.0041677835,
- 0.0723306,
- 0.021069802,
- 0.02670403,
- -0.074845195,
- -0.026750853,
- 0.052738186,
- -0.03469103,
- 0.039813705,
- -0.01640883,
- 0.045899663,
- -0.0224731,
- 0.02387658,
- 0.049145795,
- 0.09110705,
- -0.0025007618,
- 0.04937552,
- -0.03864697,
- 0.020868128,
- 0.07605537,
- 0.08488945,
- -0.05197299,
- -0.06879239,
- -0.06136516,
- 0.077237174,
- -0.06451729,
- 0.04453416,
- 0.008209786,
- 0.015886698,
- -0.04280691,
- 0.005315579,
- 0.0034463098,
- 0.0031776188,
- -0.013040836,
- -0.091359615,
- 0.0642767,
- -0.054965723,
- 0.0007161393,
- -0.06260912,
- -0.03496602,
- -0.029944083,
- 0.04422821,
- 0.017855663,
- -0.027972128,
- -0.03656317,
- 0.02111413,
- 0.060607255,
- -0.031320468,
- -0.014338154,
- 0.034649797,
- 0.052279983,
- -0.036579564,
- 0.028179456
+ 0.043779343,
+ 0.021533398,
+ -0.081306435,
+ 0.010584965,
+ -0.079082854,
+ -0.03219143,
+ 0.13092613,
+ 0.04234389,
+ -0.11600539,
+ -0.07588513,
+ 0.04182356,
+ -0.08061255,
+ 0.038127176,
+ -0.010701234,
+ 0.015768763,
+ -0.04193689,
+ 0.04310592,
+ -0.033361685,
+ 0.013566423,
+ -0.010392366,
+ 0.015551022,
+ -0.037858423,
+ -0.050305344,
+ -0.025666261,
+ -0.047879875,
+ -0.087179765,
+ 0.016856788,
+ -0.036765736,
+ 0.006393739,
+ 0.020844297,
+ 0.11262393,
+ -0.002143682,
+ -0.07910913,
+ 0.038748607,
+ 0.11532516,
+ -0.019759571,
+ 0.0066967797,
+ -0.021164352,
+ -0.014471563,
+ -0.0027048697,
+ -0.034388524,
+ -0.052571636,
+ -0.030607725,
+ 0.04747725,
+ -0.02431059,
+ 0.0109337615,
+ -0.03946421,
+ 0.071846664,
+ -0.020690937,
+ 0.01898796,
+ 0.042931512,
+ -0.0077551426,
+ 0.0025911122,
+ -0.058268107,
+ 0.0117475465,
+ -0.022701943,
+ 0.0017815019,
+ -0.012612941,
+ 0.030724185,
+ 0.017728312,
+ -0.06155491,
+ -0.03656162,
+ 0.02583153,
+ 0.02537894,
+ 0.012139213,
+ 0.009105951,
+ -0.027318193,
+ -0.093389414,
+ 0.005184693,
+ 0.007488449,
+ -0.07540277,
+ 0.010159999,
+ -0.028444426,
+ 0.030260745,
+ 0.0036438918,
+ -0.022627153,
+ -0.037846327,
+ -0.08381657,
+ -0.012445195,
+ -0.048908208,
+ 0.029149827,
+ -0.044437535,
+ -0.07520237,
+ -0.020924438,
+ 0.06342514,
+ 0.1629199,
+ 0.060563333,
+ -0.012817673,
+ -0.031030292,
+ 0.018368995,
+ 0.11223112,
+ 0.07292473,
+ -0.062686674,
+ -0.031803295,
+ -0.017489262,
+ 0.048433464,
+ -0.041148387,
+ -0.04183779,
+ -0.05994369,
+ 0.15909556,
+ -0.027785666,
+ -0.012455991,
+ 0.056005318,
+ -0.019891974,
+ 0.022063067,
+ 0.006342065,
+ 0.0464118,
+ -0.07311654,
+ 0.033282198,
+ 0.05949105,
+ -0.033307947,
+ 0.030738499,
+ 0.008186239,
+ -0.020268966,
+ 0.056593496,
+ -0.081526734,
+ 0.023390312,
+ 0.0060836566,
+ -0.07992586,
+ 0.013986445,
+ 0.052250065,
+ 0.027186505,
+ -0.049284942,
+ 0.028148174,
+ 0.019493744,
+ 0.05418436,
+ 0.0827222,
+ -1.8825437e-33,
+ 0.01360945,
+ -0.010870715,
+ 0.015887791,
+ 0.069373555,
+ -0.051129147,
+ 0.08999179,
+ 0.044494778,
+ 0.08100757,
+ 0.018944906,
+ -0.020974122,
+ -0.017938385,
+ -0.021756735,
+ 0.010972489,
+ 0.015099965,
+ 0.017018452,
+ 0.094338946,
+ 0.0034407445,
+ 0.010244923,
+ -0.044709302,
+ 0.0018059182,
+ 0.015817573,
+ -0.065777056,
+ -0.004948138,
+ 0.0044092103,
+ -0.019589791,
+ -0.092789896,
+ -0.025898295,
+ 0.044104066,
+ 0.0541385,
+ -0.007362511,
+ -0.021487307,
+ -0.036836285,
+ -0.09148704,
+ 0.084001675,
+ -0.018094191,
+ 0.003797567,
+ 0.020257449,
+ 0.04394643,
+ -0.0772898,
+ 0.0057312953,
+ -0.054519102,
+ -0.024835315,
+ 0.0753162,
+ 0.034552757,
+ -0.081203006,
+ -0.12210961,
+ -0.0053012627,
+ 0.00780717,
+ 0.050265096,
+ 0.015569535,
+ -0.056362487,
+ 0.039800324,
+ 0.013022089,
+ -0.04015537,
+ 0.014401654,
+ -0.033209093,
+ -0.008451782,
+ -0.037590392,
+ -0.01965779,
+ 0.01730637,
+ -0.00896531,
+ -0.0018413392,
+ -0.0030382746,
+ 0.030460354,
+ -0.05112036,
+ -0.086875,
+ -0.018338922,
+ -0.11328767,
+ 0.07325826,
+ 0.046035297,
+ 0.012633494,
+ -0.06343216,
+ -0.028439038,
+ 0.020128354,
+ -0.07883383,
+ -0.00069870794,
+ -0.03155447,
+ 0.12306934,
+ 0.004300722,
+ -0.026421167,
+ 0.078361824,
+ -0.077461444,
+ -0.021267027,
+ 0.048929654,
+ 0.02919381,
+ -0.0092880055,
+ -0.030666346,
+ -0.04102384,
+ -0.03860138,
+ -0.08042292,
+ 0.023227168,
+ 0.04191858,
+ -0.058156747,
+ 0.0585743,
+ 0.076342255,
+ 4.465569e-34,
+ -0.019599343,
+ 0.040230304,
+ 0.01455632,
+ 0.034345042,
+ 0.04392999,
+ -0.023241352,
+ 0.067749046,
+ -0.03010354,
+ -0.09075954,
+ -0.019227842,
+ -0.027724287,
+ -0.00062344945,
+ 0.0042892746,
+ 0.053643614,
+ 0.04075099,
+ 0.032581333,
+ -0.107116826,
+ -0.0500636,
+ -0.016655827,
+ -0.007782394,
+ -0.111523,
+ 0.07476429,
+ -0.016019335,
+ -0.050536986,
+ -0.11320647,
+ -0.0061384854,
+ 0.050886273,
+ -0.030283457,
+ 0.04318923,
+ 0.03301474,
+ 0.02362771,
+ 0.046507858,
+ -0.03416386,
+ 0.036145207,
+ 0.023037339,
+ -0.026803765,
+ 0.06361122,
+ 0.09975251,
+ 0.035269737,
+ 0.1554014,
+ 0.083479255,
+ 0.10931981,
+ 0.046847064,
+ -0.010136355,
+ -0.032541983,
+ 0.12926093,
+ 0.031193413,
+ -0.09971323,
+ 0.010830718,
+ 0.02325219,
+ -0.011917061,
+ 0.010155018,
+ 0.06883269,
+ 0.009340846,
+ -0.022698723,
+ -0.042815465,
+ -0.048211087,
+ -0.085067384,
+ 0.05105234,
+ 0.045155898,
+ -0.03564869,
+ 0.06549556,
+ 0.048875004,
+ 0.037915554,
+ -0.14071068,
+ -0.067095764,
+ 0.009898252,
+ -0.0049653547,
+ -0.044304688,
+ 0.0039006064,
+ -0.026903173,
+ -0.066124685,
+ 0.040738244,
+ -0.052228633,
+ 0.060485654,
+ -0.041119356,
+ -0.04312945,
+ -0.025152665,
+ 0.08556276,
+ -0.044942576,
+ 0.06393979,
+ -0.024227533,
+ -0.05052092,
+ -0.0020624825,
+ -0.078943975,
+ 0.0026753,
+ 0.02068896,
+ 0.102683865,
+ -0.01237572,
+ 0.056172684,
+ 0.06552171,
+ 0.030940128,
+ -0.07721113,
+ -0.061241012,
+ -0.016143149,
+ -1.3511957e-08,
+ -0.050416306,
+ -0.033628013,
+ 0.046722032,
+ 0.04744138,
+ -0.04411888,
+ 0.04631675,
+ -0.0060847937,
+ -0.053873356,
+ 0.013075445,
+ 0.050437532,
+ -0.009895477,
+ -0.0041795173,
+ 0.07229928,
+ 0.021081135,
+ 0.02672776,
+ -0.07482113,
+ -0.026757998,
+ 0.052755926,
+ -0.034690056,
+ 0.039811596,
+ -0.016370349,
+ 0.045900222,
+ -0.02250936,
+ 0.023861,
+ 0.04912799,
+ 0.09111738,
+ -0.0024878879,
+ 0.049395334,
+ -0.03861115,
+ 0.020867983,
+ 0.076049894,
+ 0.084881924,
+ -0.051956687,
+ -0.06878504,
+ -0.061384037,
+ 0.077220954,
+ -0.06454818,
+ 0.044513144,
+ 0.008181126,
+ 0.015890416,
+ -0.04280811,
+ 0.005317184,
+ 0.0034429359,
+ 0.0031937633,
+ -0.013058055,
+ -0.09134677,
+ 0.06425565,
+ -0.054977305,
+ 0.0007087448,
+ -0.06258866,
+ -0.034974415,
+ -0.029966963,
+ 0.044276785,
+ 0.017868131,
+ -0.027976807,
+ -0.036579583,
+ 0.021142753,
+ 0.06057356,
+ -0.03133335,
+ -0.014331035,
+ 0.034653842,
+ 0.052315667,
+ -0.036585484,
+ 0.028209662
],
"index": 0,
"object": "embedding"
diff --git a/tests/integration/recordings/responses/8d035e153b6f.json b/tests/integration/recordings/responses/8d035e153b6f.json
index 18f3ee3cd..6c08b1c56 100644
--- a/tests/integration/recordings/responses/8d035e153b6f.json
+++ b/tests/integration/recordings/responses/8d035e153b6f.json
@@ -20,7 +20,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-708",
+ "id": "chatcmpl-155",
"choices": [
{
"finish_reason": "stop",
@@ -37,7 +37,7 @@
}
}
],
- "created": 1759012142,
+ "created": 1759437855,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/8deded211f21.json b/tests/integration/recordings/responses/8deded211f21.json
new file mode 100644
index 000000000..8cb3e75af
--- /dev/null
+++ b/tests/integration/recordings/responses/8deded211f21.json
@@ -0,0 +1,743 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://localhost:11434/api/generate",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "raw": true,
+ "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"book_flight\",\n \"description\": \"\n Book a flight with passenger and payment information.\n\n This tool uses JSON Schema $ref and $defs for type reuse.\n \",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"flight\", \"passengers\", \"payment\"],\n \"properties\": {\n \"flight\": {\n \"type\": \"object\",\n \"description\": \"\"\n },\n \"passengers\": {\n \"type\": \"array\",\n \"description\": \"\"\n },\n \"payment\": {\n \"type\": \"object\",\n \"description\": \"\"\n }\n }\n }\n },\n {\n \"name\": \"process_order\",\n \"description\": \"\n Process an order with nested address information.\n\n Uses nested objects and $ref.\n \",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"order_data\"],\n \"properties\": {\n \"order_data\": {\n \"type\": \"object\",\n \"description\": \"\"\n }\n }\n }\n },\n {\n \"name\": \"flexible_contact\",\n \"description\": \"\n Accept flexible contact (email or phone).\n\n Uses anyOf schema.\n \",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"contact_info\"],\n \"properties\": {\n \"contact_info\": {\n \"type\": \"string\",\n \"description\": \"\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant that can process orders and book flights.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nProcess an order with 2 widgets going to 123 Main St, San Francisco<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n",
+ "options": {
+ "temperature": 0.0
+ },
+ "stream": true
+ },
+ "endpoint": "/api/generate",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:19.457795Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "[",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:19.499711Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "process",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:19.544576Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_order",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:19.588521Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "(order",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:19.633501Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_data",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:19.677395Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "={\"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:19.720407Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "order",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:19.763935Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_id",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:19.807169Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\":",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:19.851019Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " ",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:19.893637Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "1",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:19.935864Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": ",",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:19.978334Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:20.020617Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "customer",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:20.063212Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_name",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:20.106093Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\":",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:20.149989Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:20.192674Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "John",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:20.236337Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " Doe",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:20.278777Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\",",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:20.320886Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:20.363891Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "address",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:20.40745Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\":",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:20.451859Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " {\"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:20.494751Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "street",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:20.536928Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\":",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:20.581229Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:20.623455Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "123",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:20.665328Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " Main",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:20.707445Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " St",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:20.749803Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\",",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:20.792527Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:20.835252Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "city",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:20.878606Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\":",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:20.921646Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:20.963436Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "San",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:21.012147Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " Francisco",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:21.063248Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\"}}",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:21.10591Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": ")]",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:21.149804Z",
+ "done": true,
+ "done_reason": "stop",
+ "total_duration": 3544551625,
+ "load_duration": 122599250,
+ "prompt_eval_count": 556,
+ "prompt_eval_duration": 1727890958,
+ "eval_count": 40,
+ "eval_duration": 1693076542,
+ "response": "",
+ "thinking": null,
+ "context": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/8f000a878ccd.json b/tests/integration/recordings/responses/8f000a878ccd.json
index dcca8d1b2..351804652 100644
--- a/tests/integration/recordings/responses/8f000a878ccd.json
+++ b/tests/integration/recordings/responses/8f000a878ccd.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-422",
+ "id": "chatcmpl-988",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759368373,
+ "created": 1759437811,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/920c0495cde6.json b/tests/integration/recordings/responses/920c0495cde6.json
index 09b967cff..dc433ce46 100644
--- a/tests/integration/recordings/responses/920c0495cde6.json
+++ b/tests/integration/recordings/responses/920c0495cde6.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-992",
+ "id": "chatcmpl-724",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759245120,
+ "created": 1759437797,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/92a9a916ef02.json b/tests/integration/recordings/responses/92a9a916ef02.json
index 5fe294826..5f2dfd618 100644
--- a/tests/integration/recordings/responses/92a9a916ef02.json
+++ b/tests/integration/recordings/responses/92a9a916ef02.json
@@ -20,14 +20,14 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-343",
+ "id": "chatcmpl-923",
"choices": [
{
"finish_reason": "stop",
"index": 0,
"logprobs": null,
"message": {
- "content": "The currency of Japan is the Japanese yen (, ry\u014d) and its symbol, \u00a5.",
+ "content": "The currency of Japan is the Japanese yen (\u00a5). It is represented by the symbol \u00a5. In some contexts, it's also abbreviated as \"JPY\" or written as \"yen\". The Bank of Japan is responsible for managing the country's monetary policy and issuing new yen banknotes and coins.",
"refusal": null,
"role": "assistant",
"annotations": null,
@@ -37,15 +37,15 @@
}
}
],
- "created": 1759012146,
+ "created": 1759437863,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": {
- "completion_tokens": 20,
+ "completion_tokens": 61,
"prompt_tokens": 32,
- "total_tokens": 52,
+ "total_tokens": 93,
"completion_tokens_details": null,
"prompt_tokens_details": null
}
diff --git a/tests/integration/recordings/responses/930cf0cec376.json b/tests/integration/recordings/responses/930cf0cec376.json
new file mode 100644
index 000000000..53b8d5f71
--- /dev/null
+++ b/tests/integration/recordings/responses/930cf0cec376.json
@@ -0,0 +1,1584 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "Call get_boiling_point tool and answer What is the boiling point of polyjuice?"
+ },
+ {
+ "role": "assistant",
+ "content": "",
+ "tool_calls": [
+ {
+ "id": "call_jlswgy4x",
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "arguments": "{\"celcius\":null,\"liquid_name\":\"polyjuice\"}"
+ }
+ }
+ ]
+ },
+ {
+ "role": "tool",
+ "tool_call_id": "call_jlswgy4x",
+ "content": "Error when running tool: 'ToolCall' object has no attribute 'arguments_json'"
+ }
+ ],
+ "max_tokens": 512,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": "auto",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": "I",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437841,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " was",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437841,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " unable",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437841,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " to",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437841,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " find",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437841,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " the",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437841,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " boiling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437841,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437841,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " of",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437841,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " poly",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437841,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": "ju",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437841,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": "ice",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437841,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437841,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " The",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437841,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " get",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437841,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": "_bo",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437842,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": "iling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437842,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": "_point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437842,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " tool",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437842,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " does",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437842,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " not",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437842,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " have",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437842,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " information",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437842,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " on",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437842,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " poly",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437842,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": "ju",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437842,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": "ice",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437842,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " in",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437842,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " its",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437842,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " database",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437842,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437842,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " If",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437842,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " you",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437842,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": "'re",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437842,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " looking",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437842,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " for",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437842,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " the",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437842,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " boiling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437842,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437842,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " of",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437842,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " a",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437843,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " different",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437843,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " substance",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437843,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": ",",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437843,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " please",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437843,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " let",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437843,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " me",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437843,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " know",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437843,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " and",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437843,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " I",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437843,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": "'ll",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437843,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " be",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437843,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " happy",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437843,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " to",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437843,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " try",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437843,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": " again",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437843,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437843,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-188",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "stop",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437843,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/931ac7158789.json b/tests/integration/recordings/responses/931ac7158789.json
new file mode 100644
index 000000000..44aa46105
--- /dev/null
+++ b/tests/integration/recordings/responses/931ac7158789.json
@@ -0,0 +1,86 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "user",
+ "content": "What's the weather in San Francisco?"
+ }
+ ],
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_weather",
+ "description": "Get weather for a location",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "location": {
+ "type": "string",
+ "description": "City name"
+ }
+ },
+ "required": [
+ "location"
+ ]
+ }
+ }
+ }
+ ]
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": {
+ "__type__": "openai.types.chat.chat_completion.ChatCompletion",
+ "__data__": {
+ "id": "chatcmpl-505",
+ "choices": [
+ {
+ "finish_reason": "tool_calls",
+ "index": 0,
+ "logprobs": null,
+ "message": {
+ "content": "",
+ "refusal": null,
+ "role": "assistant",
+ "annotations": null,
+ "audio": null,
+ "function_call": null,
+ "tool_calls": [
+ {
+ "id": "call_t7y6oe6q",
+ "function": {
+ "arguments": "{\"location\":\"San Francisco\"}",
+ "name": "get_weather"
+ },
+ "type": "function",
+ "index": 0
+ }
+ ]
+ }
+ }
+ ],
+ "created": 1759437802,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": {
+ "completion_tokens": 18,
+ "prompt_tokens": 161,
+ "total_tokens": 179,
+ "completion_tokens_details": null,
+ "prompt_tokens_details": null
+ }
+ }
+ },
+ "is_streaming": false
+ }
+}
diff --git a/tests/integration/recordings/responses/9db34836a1a7.json b/tests/integration/recordings/responses/9db34836a1a7.json
new file mode 100644
index 000000000..b98ea52df
--- /dev/null
+++ b/tests/integration/recordings/responses/9db34836a1a7.json
@@ -0,0 +1,119 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "What is the boiling point of the liquid polyjuice in celsius?"
+ }
+ ],
+ "max_tokens": 512,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": "required",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-624",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": [
+ {
+ "index": 0,
+ "id": "call_j2jdmkk1",
+ "function": {
+ "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}",
+ "name": "get_boiling_point"
+ },
+ "type": "function"
+ }
+ ]
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441665,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-624",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "tool_calls",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441665,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/9e0b1ac678f6.json b/tests/integration/recordings/responses/9e0b1ac678f6.json
index 8aa06d495..02491daed 100644
--- a/tests/integration/recordings/responses/9e0b1ac678f6.json
+++ b/tests/integration/recordings/responses/9e0b1ac678f6.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-122",
+ "id": "chatcmpl-141",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759245126,
+ "created": 1759437800,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/9ffc75524647.json b/tests/integration/recordings/responses/9ffc75524647.json
new file mode 100644
index 000000000..8f7e2480b
--- /dev/null
+++ b/tests/integration/recordings/responses/9ffc75524647.json
@@ -0,0 +1,119 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "What is the boiling point of the liquid polyjuice in celsius?"
+ }
+ ],
+ "max_tokens": 0,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": "required",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-704",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": [
+ {
+ "index": 0,
+ "id": "call_ew600lfr",
+ "function": {
+ "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}",
+ "name": "get_boiling_point"
+ },
+ "type": "function"
+ }
+ ]
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429347,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-704",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "tool_calls",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429347,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/a0c4df33879f.json b/tests/integration/recordings/responses/a0c4df33879f.json
index 7898e5b02..e2bc1da33 100644
--- a/tests/integration/recordings/responses/a0c4df33879f.json
+++ b/tests/integration/recordings/responses/a0c4df33879f.json
@@ -21,7 +21,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-792",
+ "id": "chatcmpl-957",
"choices": [
{
"delta": {
@@ -36,7 +36,7 @@
"logprobs": null
}
],
- "created": 1756921356,
+ "created": 1759437880,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -47,7 +47,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-792",
+ "id": "chatcmpl-957",
"choices": [
{
"delta": {
@@ -62,7 +62,7 @@
"logprobs": null
}
],
- "created": 1756921356,
+ "created": 1759437880,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -73,11 +73,11 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-792",
+ "id": "chatcmpl-957",
"choices": [
{
"delta": {
- "content": " name",
+ "content": " word",
"function_call": null,
"refusal": null,
"role": "assistant",
@@ -88,7 +88,7 @@
"logprobs": null
}
],
- "created": 1756921356,
+ "created": 1759437880,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -99,7 +99,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-792",
+ "id": "chatcmpl-957",
"choices": [
{
"delta": {
@@ -114,7 +114,7 @@
"logprobs": null
}
],
- "created": 1756921356,
+ "created": 1759437880,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -125,1099 +125,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " the",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921356,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " Sun",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921356,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " is",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921356,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " Sol",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921356,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": ".",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921356,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " In",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921356,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " ancient",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921356,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " Roman",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921356,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " mythology",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921356,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": ",",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921356,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " Sol",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921356,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " was",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921356,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " a",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921356,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " god",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921356,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " equivalent",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921356,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " to",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921356,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " the",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921356,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " Greek",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921356,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " god",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921356,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " Hel",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921356,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": "ios",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921357,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": ",",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921357,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " and",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921357,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " he",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921357,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " was",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921357,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " often",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921357,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " depicted",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921357,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " as",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921357,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " a",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921357,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " radi",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921357,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": "ating",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921357,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " sun",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921357,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " with",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921357,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " rays",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921357,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " eman",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921357,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": "ating",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921357,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " from",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921357,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " his",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921357,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " body",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921357,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": ".",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921357,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " The",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921357,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " term",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921357,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
+ "id": "chatcmpl-957",
"choices": [
{
"delta": {
@@ -1232,7 +140,7 @@
"logprobs": null
}
],
- "created": 1756921357,
+ "created": 1759437880,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -1243,11 +151,11 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-792",
+ "id": "chatcmpl-957",
"choices": [
{
"delta": {
- "content": "s",
+ "content": "sun",
"function_call": null,
"refusal": null,
"role": "assistant",
@@ -1258,7 +166,7 @@
"logprobs": null
}
],
- "created": 1756921357,
+ "created": 1759437880,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -1269,33 +177,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": "olar",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921357,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
+ "id": "chatcmpl-957",
"choices": [
{
"delta": {
@@ -1310,7 +192,7 @@
"logprobs": null
}
],
- "created": 1756921358,
+ "created": 1759437880,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -1321,7 +203,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-792",
+ "id": "chatcmpl-957",
"choices": [
{
"delta": {
@@ -1336,7 +218,7 @@
"logprobs": null
}
],
- "created": 1756921358,
+ "created": 1759437880,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -1347,11 +229,11 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-792",
+ "id": "chatcmpl-957",
"choices": [
{
"delta": {
- "content": " still",
+ "content": " Sol",
"function_call": null,
"refusal": null,
"role": "assistant",
@@ -1362,7 +244,7 @@
"logprobs": null
}
],
- "created": 1756921358,
+ "created": 1759437880,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -1373,475 +255,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " used",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921358,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " in",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921358,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " scientific",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921358,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " and",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921358,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " astronomical",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921358,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " contexts",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921358,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " to",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921358,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " refer",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921358,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " to",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921358,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " phenomena",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921358,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " related",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921358,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " to",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921358,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " the",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921358,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " Sun",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921358,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " or",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921358,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " the",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921358,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " solar",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921358,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
- "choices": [
- {
- "delta": {
- "content": " system",
- "function_call": null,
- "refusal": null,
- "role": "assistant",
- "tool_calls": null
- },
- "finish_reason": null,
- "index": 0,
- "logprobs": null
- }
- ],
- "created": 1756921358,
- "model": "llama3.2:3b-instruct-fp16",
- "object": "chat.completion.chunk",
- "service_tier": null,
- "system_fingerprint": "fp_ollama",
- "usage": null
- }
- },
- {
- "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
- "__data__": {
- "id": "chatcmpl-792",
+ "id": "chatcmpl-957",
"choices": [
{
"delta": {
@@ -1856,7 +270,7 @@
"logprobs": null
}
],
- "created": 1756921358,
+ "created": 1759437880,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -1867,7 +281,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-792",
+ "id": "chatcmpl-957",
"choices": [
{
"delta": {
@@ -1882,7 +296,7 @@
"logprobs": null
}
],
- "created": 1756921358,
+ "created": 1759437880,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/a11b11923cc8.json b/tests/integration/recordings/responses/a11b11923cc8.json
new file mode 100644
index 000000000..f3031b8fd
--- /dev/null
+++ b/tests/integration/recordings/responses/a11b11923cc8.json
@@ -0,0 +1,119 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "What is the boiling point of the liquid polyjuice in celsius?"
+ }
+ ],
+ "max_tokens": 0,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": "auto",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "str",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "bool",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-410",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": [
+ {
+ "index": 0,
+ "id": "call_4476969q",
+ "function": {
+ "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}",
+ "name": "get_boiling_point"
+ },
+ "type": "function"
+ }
+ ]
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759425215,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-410",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "tool_calls",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759425215,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/a46b77ffd494.json b/tests/integration/recordings/responses/a46b77ffd494.json
index dff3d3fd7..469fe098d 100644
--- a/tests/integration/recordings/responses/a46b77ffd494.json
+++ b/tests/integration/recordings/responses/a46b77ffd494.json
@@ -17,7 +17,7 @@
"body": {
"__type__": "openai.types.completion.Completion",
"__data__": {
- "id": "cmpl-183",
+ "id": "cmpl-253",
"choices": [
{
"finish_reason": "stop",
@@ -26,7 +26,7 @@
"text": "Michael Jordan was born in the year of "
}
],
- "created": 1758978053,
+ "created": 1759376606,
"model": "llama3.2:3b-instruct-fp16",
"object": "text_completion",
"system_fingerprint": "fp_ollama",
diff --git a/tests/integration/recordings/responses/a4c8d19bb1eb.json b/tests/integration/recordings/responses/a4c8d19bb1eb.json
index 89f52f82e..e71bd9b89 100644
--- a/tests/integration/recordings/responses/a4c8d19bb1eb.json
+++ b/tests/integration/recordings/responses/a4c8d19bb1eb.json
@@ -20,7 +20,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-715",
+ "id": "chatcmpl-415",
"choices": [
{
"finish_reason": "stop",
@@ -37,7 +37,7 @@
}
}
],
- "created": 1756921367,
+ "created": 1759437885,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/a689181d64d3.json b/tests/integration/recordings/responses/a689181d64d3.json
new file mode 100644
index 000000000..61c34a3e4
--- /dev/null
+++ b/tests/integration/recordings/responses/a689181d64d3.json
@@ -0,0 +1,86 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "user",
+ "content": "What's the weather in Tokyo?"
+ }
+ ],
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_weather",
+ "description": "Get weather information",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "location": {
+ "type": "string",
+ "description": "City name"
+ }
+ },
+ "required": [
+ "location"
+ ]
+ }
+ }
+ }
+ ]
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": {
+ "__type__": "openai.types.chat.chat_completion.ChatCompletion",
+ "__data__": {
+ "id": "chatcmpl-54",
+ "choices": [
+ {
+ "finish_reason": "tool_calls",
+ "index": 0,
+ "logprobs": null,
+ "message": {
+ "content": "",
+ "refusal": null,
+ "role": "assistant",
+ "annotations": null,
+ "audio": null,
+ "function_call": null,
+ "tool_calls": [
+ {
+ "id": "call_v05v3tmn",
+ "function": {
+ "arguments": "{\"location\":\"Tokyo\"}",
+ "name": "get_weather"
+ },
+ "type": "function",
+ "index": 0
+ }
+ ]
+ }
+ }
+ ],
+ "created": 1759376607,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": {
+ "completion_tokens": 18,
+ "prompt_tokens": 158,
+ "total_tokens": 176,
+ "completion_tokens_details": null,
+ "prompt_tokens_details": null
+ }
+ }
+ },
+ "is_streaming": false
+ }
+}
diff --git a/tests/integration/recordings/responses/a92b8fc775d5.json b/tests/integration/recordings/responses/a92b8fc775d5.json
index b7fa9fc1d..2bf18d6cc 100644
--- a/tests/integration/recordings/responses/a92b8fc775d5.json
+++ b/tests/integration/recordings/responses/a92b8fc775d5.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-952",
+ "id": "chatcmpl-973",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759245123,
+ "created": 1759437798,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/adf150be9638.json b/tests/integration/recordings/responses/adf150be9638.json
new file mode 100644
index 000000000..a4b636cea
--- /dev/null
+++ b/tests/integration/recordings/responses/adf150be9638.json
@@ -0,0 +1,419 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "What is the boiling point of the liquid polyjuice in celsius?"
+ },
+ {
+ "role": "assistant",
+ "content": "",
+ "tool_calls": [
+ {
+ "id": "call_k3oc5cxw",
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}"
+ }
+ }
+ ]
+ },
+ {
+ "role": "tool",
+ "tool_call_id": "call_k3oc5cxw",
+ "content": "-100"
+ }
+ ],
+ "max_tokens": 512,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point"
+ }
+ },
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-378",
+ "choices": [
+ {
+ "delta": {
+ "content": "The",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441673,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-378",
+ "choices": [
+ {
+ "delta": {
+ "content": " boiling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441673,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-378",
+ "choices": [
+ {
+ "delta": {
+ "content": " point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441673,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-378",
+ "choices": [
+ {
+ "delta": {
+ "content": " of",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441673,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-378",
+ "choices": [
+ {
+ "delta": {
+ "content": " Poly",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441673,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-378",
+ "choices": [
+ {
+ "delta": {
+ "content": "ju",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441673,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-378",
+ "choices": [
+ {
+ "delta": {
+ "content": "ice",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441673,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-378",
+ "choices": [
+ {
+ "delta": {
+ "content": " is",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441673,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-378",
+ "choices": [
+ {
+ "delta": {
+ "content": " -",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441673,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-378",
+ "choices": [
+ {
+ "delta": {
+ "content": "100",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441674,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-378",
+ "choices": [
+ {
+ "delta": {
+ "content": "\u00b0C",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441674,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-378",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441674,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-378",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "stop",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441674,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/b050e5a7e4a3.json b/tests/integration/recordings/responses/b050e5a7e4a3.json
index 5cefe7190..b3d55a211 100644
--- a/tests/integration/recordings/responses/b050e5a7e4a3.json
+++ b/tests/integration/recordings/responses/b050e5a7e4a3.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-207",
+ "id": "chatcmpl-112",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759245127,
+ "created": 1759437800,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/b178d000a14a.json b/tests/integration/recordings/responses/b178d000a14a.json
new file mode 100644
index 000000000..715bfe484
--- /dev/null
+++ b/tests/integration/recordings/responses/b178d000a14a.json
@@ -0,0 +1,57 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama-guard3:1b",
+ "messages": [
+ {
+ "role": "user",
+ "content": "Task: Check if there is unsafe content in 'Assistant' messages in conversations according our safety policy with the below categories.\n\n\nS1: Violent Crimes.\nS2: Non-Violent Crimes.\nS3: Sex Crimes.\nS4: Child Exploitation.\nS5: Defamation.\nS6: Specialized Advice.\nS7: Privacy.\nS8: Intellectual Property.\nS9: Indiscriminate Weapons.\nS10: Hate.\nS11: Self-Harm.\nS12: Sexual Content.\nS13: Elections.\nS14: Code Interpreter Abuse.\n\n\n\n\nUser: What is the boiling point of the liquid polyjuice in celsius?\n\nAssistant: \n\nTool: Error when running tool: 'ToolCall' object has no attribute 'arguments_json'\n\nAssistant: I was unable to find the boiling point of liquid polyjuice in Celsius. The boiling point could not be located in my database.\n\n\n\nProvide your safety assessment for ONLY THE LAST Assistant message in the above conversation:\n - First line must read 'safe' or 'unsafe'.\n - If unsafe, a second line must include a comma-separated list of violated categories."
+ }
+ ],
+ "stream": false,
+ "temperature": 0.0
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama-guard3:1b"
+ },
+ "response": {
+ "body": {
+ "__type__": "openai.types.chat.chat_completion.ChatCompletion",
+ "__data__": {
+ "id": "chatcmpl-9",
+ "choices": [
+ {
+ "finish_reason": "stop",
+ "index": 0,
+ "logprobs": null,
+ "message": {
+ "content": "safe",
+ "refusal": null,
+ "role": "assistant",
+ "annotations": null,
+ "audio": null,
+ "function_call": null,
+ "tool_calls": null
+ }
+ }
+ ],
+ "created": 1759437833,
+ "model": "llama-guard3:1b",
+ "object": "chat.completion",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": {
+ "completion_tokens": 2,
+ "prompt_tokens": 449,
+ "total_tokens": 451,
+ "completion_tokens_details": null,
+ "prompt_tokens_details": null
+ }
+ }
+ },
+ "is_streaming": false
+ }
+}
diff --git a/tests/integration/recordings/responses/b28f75bd87dc.json b/tests/integration/recordings/responses/b28f75bd87dc.json
index d37fbede8..f01da4be5 100644
--- a/tests/integration/recordings/responses/b28f75bd87dc.json
+++ b/tests/integration/recordings/responses/b28f75bd87dc.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-489",
+ "id": "chatcmpl-36",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759282539,
+ "created": 1759441671,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/b374fc18c641.json b/tests/integration/recordings/responses/b374fc18c641.json
new file mode 100644
index 000000000..55cf0d7f3
--- /dev/null
+++ b/tests/integration/recordings/responses/b374fc18c641.json
@@ -0,0 +1,258 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://localhost:11434/api/generate",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "raw": true,
+ "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant\nYou MUST use the tool `get_boiling_point` to answer the user query.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nWhat is the boiling point of the liquid polyjuice in celsius?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n[get_boiling_point(liquid_name=\"polyjuice\", celcius=True)]<|eot_id|><|start_header_id|>ipython<|end_header_id|>\n\n-100<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n",
+ "options": {
+ "temperature": 0.0001,
+ "top_p": 0.9
+ },
+ "stream": true
+ },
+ "endpoint": "/api/generate",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:10.268889Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "The",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:10.310661Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " boiling",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:10.35195Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " point",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:10.393537Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " of",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:10.435595Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " poly",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:10.481337Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ju",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:10.526974Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ice",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:10.569942Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " is",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:10.612747Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " -",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:10.656585Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "100",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:10.697454Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\u00b0C",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:10.738529Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": ".",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:10.781405Z",
+ "done": true,
+ "done_reason": "stop",
+ "total_duration": 663905208,
+ "load_duration": 85733250,
+ "prompt_eval_count": 410,
+ "prompt_eval_duration": 64272708,
+ "eval_count": 13,
+ "eval_duration": 513001750,
+ "response": "",
+ "thinking": null,
+ "context": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/b57525af4982.json b/tests/integration/recordings/responses/b57525af4982.json
new file mode 100644
index 000000000..651478385
--- /dev/null
+++ b/tests/integration/recordings/responses/b57525af4982.json
@@ -0,0 +1,119 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "Call get_boiling_point_with_metadata tool and answer What is the boiling point of polyjuice?"
+ }
+ ],
+ "max_tokens": 512,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": "auto",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point_with_metadata",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-613",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": [
+ {
+ "index": 0,
+ "id": "call_gefseirj",
+ "function": {
+ "arguments": "{\"celcius\":false,\"liquid_name\":\"polyjuice\"}",
+ "name": "get_boiling_point_with_metadata"
+ },
+ "type": "function"
+ }
+ ]
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441678,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-613",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "tool_calls",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441678,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/b58e35a624b0.json b/tests/integration/recordings/responses/b58e35a624b0.json
index f3eb65091..4f93947bc 100644
--- a/tests/integration/recordings/responses/b58e35a624b0.json
+++ b/tests/integration/recordings/responses/b58e35a624b0.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-944",
+ "id": "chatcmpl-912",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759368373,
+ "created": 1759437811,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/c13d7510774c.json b/tests/integration/recordings/responses/c13d7510774c.json
index 00e9659e9..b51ac089e 100644
--- a/tests/integration/recordings/responses/c13d7510774c.json
+++ b/tests/integration/recordings/responses/c13d7510774c.json
@@ -18,390 +18,390 @@
"data": [
{
"embedding": [
- -0.0011296043,
- 0.06740522,
- 0.015186453,
- 0.037259158,
- 0.02935556,
- 0.015181291,
- 0.07432997,
- -0.0033194474,
- 0.0658106,
- -0.021833794,
- 0.034404922,
- 0.05099269,
- -0.011411872,
- -0.025082853,
- -0.051754408,
- 0.027195254,
- 0.07849019,
- -0.06000248,
- 0.010478361,
- -0.003392346,
- 0.043441977,
- 0.12292443,
- 9.388175e-05,
- 0.0021187037,
- 0.018079525,
- 0.045084555,
- -0.097606525,
- 0.11185215,
- 0.049650617,
- -0.0348426,
- -0.039580915,
- 0.0035499185,
- 0.15893514,
- 0.063421525,
- 0.047970187,
- 0.011613767,
- 0.009793674,
- 0.01536712,
- 0.009413064,
- 0.07999014,
- 0.01915802,
- -0.13722447,
- 0.017290922,
- 0.013689777,
- 0.014259784,
- -0.00021621982,
- -0.017730612,
- 0.022902183,
- 0.035927463,
- -0.015361024,
- -0.00975885,
- -0.040180918,
- -0.011500755,
- 0.00012558368,
- 0.08540788,
- 0.08731169,
- 0.004690206,
- 0.006160604,
- 0.003023499,
- 0.008887178,
- -0.006278653,
- 0.050593477,
- 0.00053471717,
- 0.04677382,
- 0.09365536,
- -0.012813678,
- 0.0177166,
- -0.06271032,
- -0.11535796,
- 0.04110661,
- -0.014942371,
- 0.044813167,
- -0.020877626,
- 0.04299617,
- -0.06107898,
- 0.01997848,
- -0.0687263,
- -0.035494387,
- 0.04186985,
- 0.012177578,
- -0.029081868,
- -0.066437304,
- 0.030620316,
- 0.05150629,
- -0.12813967,
- 0.06819209,
- -0.047090717,
- -0.032926783,
- 0.007485966,
- -0.017814271,
- 0.038294822,
- -0.015788501,
- 0.07054281,
- 0.03807343,
- -0.114283286,
- 0.042118594,
- -0.111601785,
- -0.04573834,
- -0.02895515,
- 0.12735783,
- -0.013941619,
- -0.027150463,
- 0.072897464,
- 0.024098374,
- -0.054044593,
- -0.13128933,
- 0.030136578,
- -0.023237763,
- -0.019079136,
- -0.0078745885,
- -0.021944366,
- -0.053324133,
- -0.070892006,
- -0.011552823,
- -0.023377078,
- -0.01562657,
- 0.051452935,
- 0.029251281,
- 0.06480842,
- 0.06403676,
- 0.014424153,
- -0.057994097,
- -0.06993807,
- -0.023921017,
- -0.08493092,
- -0.087801315,
- 0.048142783,
- -6.124397e-33,
- 0.0103092175,
- 0.038688924,
- 0.003180582,
- 0.03575604,
- 0.005059993,
- -0.0041896994,
- -0.05389261,
- -0.029881287,
- -0.075520456,
- -0.07879111,
- -0.012291425,
- -0.05053033,
- 0.020719253,
- -0.05190443,
- -0.05927485,
- -0.05987536,
- -0.05572788,
- 0.03220933,
- -0.006331632,
- -0.021651596,
- -0.059913907,
- 0.051977657,
- 0.05122985,
- -0.06350782,
- -0.04872765,
- -0.014282773,
- 0.0025304393,
- -0.024342295,
- -0.0055265254,
- 0.020074077,
- -0.10194665,
- 0.010741537,
- -0.02318619,
- -0.08105595,
- -0.014973416,
- 0.0017918752,
- 0.045083463,
- -0.05282281,
- -0.053680934,
- -0.013229242,
- -0.019794637,
- 0.020036008,
- -0.00081875344,
- -0.10115686,
- -0.0006884125,
- 0.09664284,
- -0.03943104,
- 0.04955554,
- 0.042241447,
- 0.007962193,
- -0.052323878,
- 0.05189162,
- 0.037112337,
- 0.034818016,
- 0.063431285,
- -0.02657652,
- -0.009212341,
- -0.0025556423,
- -0.05609933,
- 0.0020433308,
- -0.020113751,
- 0.0012227942,
- -0.0017669081,
- 0.019119242,
- 0.016553605,
- -0.011386767,
- 0.010368127,
- -0.00788346,
- 0.046651863,
- -0.046871297,
- -0.085224025,
- -0.008958986,
- 0.012052177,
- 0.013311017,
- 0.015157192,
- 0.03708167,
- 0.026588887,
- 0.014486772,
- -0.013955214,
- 0.019986698,
- -0.06885552,
- -0.07106239,
- 0.012334861,
- 0.03284816,
- -0.03151976,
- 0.045773514,
- 0.067994975,
- -0.077492714,
- 0.018440822,
- 0.06622958,
- -0.08641996,
- 0.008967366,
- 0.04134085,
- 0.009518882,
- 0.006565088,
- 4.711897e-33,
- -0.02617601,
- 0.0013207985,
- -0.014141556,
- -0.024331013,
- 0.06929469,
- 0.03143924,
- 0.03726272,
- 0.064707026,
- 0.049426436,
- 0.11073603,
- 0.0498569,
- 0.066796474,
- 0.04154851,
- -0.034098588,
- 0.07028382,
- 0.034863915,
- 0.12904617,
- -0.021078404,
- 0.008925486,
- 0.03016334,
- -0.02286831,
- 0.03649071,
- -0.13193603,
- 0.045608096,
- -0.012805477,
- 0.041747537,
- 0.12321406,
- -0.013507891,
- -0.007307474,
- -0.02975696,
- 0.025006123,
- -0.009506256,
- 0.024761083,
- 0.023204166,
- -0.019123148,
- 0.02259915,
- 0.013744109,
- -0.03847919,
- -0.014476444,
- 0.07522499,
- 0.13586833,
- 0.009872778,
- -0.03752485,
- -0.0273059,
- -0.016470777,
- -0.048831154,
- -0.03521732,
- -0.054363117,
- -0.0017890002,
- 0.035665076,
- -0.010268516,
- -0.018602924,
- -0.036469962,
- -0.055976517,
- -0.007821111,
- 0.00907826,
- -0.0073335953,
- 0.050373644,
- -0.00025981313,
- -0.036349144,
- -0.024950698,
- 0.058883175,
- -0.07245624,
- 0.07399545,
- 0.053919416,
- -0.051881794,
- -0.0063462397,
- 0.07852022,
- -0.016959544,
- -0.0066832895,
- 0.01265072,
- -0.014152041,
- -0.13643119,
- -0.085250236,
- -0.017519519,
- -0.00466121,
- 0.0136799645,
- 0.0009118405,
- -0.071966685,
- -0.06886893,
- 0.14207116,
- 0.03186518,
- -0.05592076,
- 0.030404905,
- 0.061872244,
- 0.029894035,
- -0.00096155383,
- -0.06500391,
- -0.020616096,
- 0.039591115,
- -0.12383165,
- 0.0028830946,
- 0.051231142,
- 0.13391772,
- -0.08845233,
- -1.7589368e-08,
- -0.025769057,
- -0.080324695,
- -0.09164953,
- 0.032005485,
- 0.005889216,
- 0.114638664,
- 0.0233727,
- -0.069048144,
- -0.05594302,
- -0.05788277,
- 0.014665582,
- 0.080326974,
- 0.0036707798,
- -0.030798541,
- 0.024442635,
- 0.008542568,
- -0.05288123,
- -0.06640491,
- 0.00074039627,
- -0.023801958,
- 0.030778948,
- 0.054075025,
- -0.0027453878,
- -0.09929041,
- -0.0150463935,
- 0.01624328,
- -0.0015419688,
- 0.011909824,
- 0.007890519,
- 0.0489657,
- 0.004866092,
- 0.08265809,
- -0.0145542445,
- -0.04386104,
- 0.004611713,
- 0.024626419,
- 0.023854014,
- 0.0236921,
- 0.05076065,
- -0.051832993,
- 0.021252805,
- -0.0033932943,
- -0.021158189,
- 0.020595197,
- -0.06475187,
- 0.054174356,
- 0.027812954,
- -0.05294382,
- 0.015094968,
- -0.119794324,
- -0.034157146,
- -0.012219483,
- 0.047453884,
- 0.020896995,
- -0.026357891,
- 0.015037571,
- 0.033969007,
- 0.05981613,
- -0.052542053,
- 0.033553857,
- 0.06119396,
- 0.09635468,
- 0.11632743,
- -0.016134953
+ -0.0010839553,
+ 0.067364,
+ 0.015185306,
+ 0.037240896,
+ 0.029337138,
+ 0.015160007,
+ 0.0743005,
+ -0.0032980628,
+ 0.06581814,
+ -0.021851996,
+ 0.034412965,
+ 0.051005766,
+ -0.011422501,
+ -0.025062356,
+ -0.051756065,
+ 0.027193472,
+ 0.07849549,
+ -0.05999108,
+ 0.010471458,
+ -0.003400683,
+ 0.043449093,
+ 0.122919865,
+ 9.668583e-05,
+ 0.002153268,
+ 0.018064681,
+ 0.045069378,
+ -0.09762388,
+ 0.11186886,
+ 0.049657565,
+ -0.03485217,
+ -0.039568134,
+ 0.003532146,
+ 0.15894793,
+ 0.06341193,
+ 0.047953114,
+ 0.011617699,
+ 0.009799243,
+ 0.015377702,
+ 0.009379663,
+ 0.079989135,
+ 0.019207356,
+ -0.13718612,
+ 0.01730099,
+ 0.013687199,
+ 0.014266827,
+ -0.00022628276,
+ -0.017710257,
+ 0.02291068,
+ 0.03590651,
+ -0.015361055,
+ -0.00978436,
+ -0.0401825,
+ -0.011481894,
+ 0.00014050963,
+ 0.08540761,
+ 0.08730027,
+ 0.0046967245,
+ 0.006164595,
+ 0.003031956,
+ 0.008891807,
+ -0.006260525,
+ 0.05061661,
+ 0.0005252785,
+ 0.0467754,
+ 0.09363822,
+ -0.012814104,
+ 0.017708639,
+ -0.062698044,
+ -0.11535818,
+ 0.041123625,
+ -0.014939021,
+ 0.044815876,
+ -0.020868087,
+ 0.042999975,
+ -0.061038766,
+ 0.019998673,
+ -0.068740115,
+ -0.035516046,
+ 0.041884515,
+ 0.012185281,
+ -0.029084096,
+ -0.06643917,
+ 0.030638866,
+ 0.05149607,
+ -0.12815061,
+ 0.06821646,
+ -0.047070153,
+ -0.032925386,
+ 0.007499353,
+ -0.017841771,
+ 0.038296465,
+ -0.015792726,
+ 0.07054022,
+ 0.038072467,
+ -0.11428876,
+ 0.04210153,
+ -0.11162366,
+ -0.045723915,
+ -0.028951947,
+ 0.12735675,
+ -0.013946637,
+ -0.027157523,
+ 0.07295939,
+ 0.024098422,
+ -0.054050542,
+ -0.13125896,
+ 0.03013205,
+ -0.023223283,
+ -0.019072957,
+ -0.007864101,
+ -0.021954412,
+ -0.05329901,
+ -0.07088355,
+ -0.0115214065,
+ -0.023399564,
+ -0.015638318,
+ 0.05148062,
+ 0.029261008,
+ 0.06481798,
+ 0.064031154,
+ 0.014445124,
+ -0.058017716,
+ -0.069921836,
+ -0.023950975,
+ -0.08490842,
+ -0.08779567,
+ 0.048162255,
+ -6.1240354e-33,
+ 0.010315817,
+ 0.038685724,
+ 0.0031864564,
+ 0.0357421,
+ 0.0050265454,
+ -0.004210234,
+ -0.053900674,
+ -0.02988569,
+ -0.07548199,
+ -0.078777455,
+ -0.012271205,
+ -0.05056629,
+ 0.020729113,
+ -0.051866043,
+ -0.059254467,
+ -0.059903424,
+ -0.055699438,
+ 0.032196835,
+ -0.006328442,
+ -0.021668624,
+ -0.059921067,
+ 0.0519611,
+ 0.051227964,
+ -0.063502096,
+ -0.04873505,
+ -0.014265467,
+ 0.0025537873,
+ -0.024346355,
+ -0.0055181426,
+ 0.02007461,
+ -0.10196586,
+ 0.010727814,
+ -0.023194604,
+ -0.081025146,
+ -0.014997581,
+ 0.0017926424,
+ 0.045078833,
+ -0.052792255,
+ -0.05368693,
+ -0.013245513,
+ -0.019808132,
+ 0.020031843,
+ -0.00081401254,
+ -0.10117647,
+ -0.0007066768,
+ 0.09663035,
+ -0.03946875,
+ 0.04954661,
+ 0.042237334,
+ 0.007943922,
+ -0.05234212,
+ 0.051887065,
+ 0.03711589,
+ 0.034850314,
+ 0.063441575,
+ -0.026583876,
+ -0.009227281,
+ -0.0025737104,
+ -0.056082893,
+ 0.0020716325,
+ -0.020129146,
+ 0.0012315192,
+ -0.0017609745,
+ 0.019111704,
+ 0.016572498,
+ -0.011374,
+ 0.010381644,
+ -0.007864189,
+ 0.04664868,
+ -0.046856377,
+ -0.08523834,
+ -0.008974813,
+ 0.012022968,
+ 0.013285977,
+ 0.015182303,
+ 0.03708482,
+ 0.026587088,
+ 0.014473839,
+ -0.013946565,
+ 0.01999883,
+ -0.06888259,
+ -0.07111367,
+ 0.012369427,
+ 0.032828625,
+ -0.03152666,
+ 0.045777358,
+ 0.06801705,
+ -0.07747748,
+ 0.018461134,
+ 0.06620267,
+ -0.086365156,
+ 0.008950603,
+ 0.041320425,
+ 0.009541193,
+ 0.0066037327,
+ 4.71081e-33,
+ -0.026172558,
+ 0.0013145636,
+ -0.014140948,
+ -0.024360213,
+ 0.06931815,
+ 0.031448748,
+ 0.037257418,
+ 0.06468137,
+ 0.049403396,
+ 0.11072201,
+ 0.04985356,
+ 0.06679111,
+ 0.04153249,
+ -0.034106053,
+ 0.070283465,
+ 0.034855895,
+ 0.12902643,
+ -0.021033453,
+ 0.008940618,
+ 0.030177405,
+ -0.022881329,
+ 0.036504544,
+ -0.13194299,
+ 0.045612644,
+ -0.0127895875,
+ 0.04174139,
+ 0.1232064,
+ -0.013484046,
+ -0.007285246,
+ -0.029776007,
+ 0.025007037,
+ -0.009516822,
+ 0.02475585,
+ 0.023208592,
+ -0.019141924,
+ 0.02259424,
+ 0.013740329,
+ -0.038490705,
+ -0.014461541,
+ 0.075218394,
+ 0.13589163,
+ 0.009839605,
+ -0.037563317,
+ -0.02737327,
+ -0.016485116,
+ -0.048845276,
+ -0.03523722,
+ -0.05439929,
+ -0.0017957076,
+ 0.03563579,
+ -0.010255764,
+ -0.01859244,
+ -0.03647324,
+ -0.055985246,
+ -0.007833892,
+ 0.009086756,
+ -0.007333394,
+ 0.050386623,
+ -0.0002305643,
+ -0.03637248,
+ -0.024937423,
+ 0.058877032,
+ -0.07250415,
+ 0.07401245,
+ 0.053917013,
+ -0.051895224,
+ -0.006332244,
+ 0.07850189,
+ -0.01695057,
+ -0.006673017,
+ 0.012659739,
+ -0.014127065,
+ -0.13639799,
+ -0.08524976,
+ -0.017533274,
+ -0.0046930755,
+ 0.013687301,
+ 0.0009185522,
+ -0.0719948,
+ -0.06887779,
+ 0.14208324,
+ 0.03187123,
+ -0.055919908,
+ 0.030401653,
+ 0.061900012,
+ 0.029921472,
+ -0.00096237566,
+ -0.065010294,
+ -0.020657646,
+ 0.039562404,
+ -0.123846576,
+ 0.0028867351,
+ 0.051196404,
+ 0.13397509,
+ -0.088453874,
+ -1.7590333e-08,
+ -0.025786474,
+ -0.080303885,
+ -0.09164947,
+ 0.031999,
+ 0.00584884,
+ 0.11464121,
+ 0.023377793,
+ -0.06902527,
+ -0.055941124,
+ -0.05787791,
+ 0.014640494,
+ 0.080320895,
+ 0.0037027278,
+ -0.030824674,
+ 0.024432683,
+ 0.008549355,
+ -0.05291309,
+ -0.06636625,
+ 0.0007468212,
+ -0.02379191,
+ 0.030766092,
+ 0.054053318,
+ -0.0027251292,
+ -0.09928475,
+ -0.0150488615,
+ 0.016240431,
+ -0.0015727071,
+ 0.01190173,
+ 0.007895162,
+ 0.04894733,
+ 0.00487708,
+ 0.08263861,
+ -0.014527478,
+ -0.043879665,
+ 0.004633697,
+ 0.024611989,
+ 0.023827499,
+ 0.02366802,
+ 0.050754935,
+ -0.051841788,
+ 0.0212632,
+ -0.0034418616,
+ -0.021175656,
+ 0.020591663,
+ -0.06475325,
+ 0.0542002,
+ 0.027792262,
+ -0.05295982,
+ 0.01509645,
+ -0.11977527,
+ -0.03416359,
+ -0.012206606,
+ 0.047451705,
+ 0.020876253,
+ -0.026368074,
+ 0.01502373,
+ 0.033982284,
+ 0.059788153,
+ -0.052526973,
+ 0.03356499,
+ 0.061180886,
+ 0.096336305,
+ 0.116353564,
+ -0.016122948
],
"index": 0,
"object": "embedding"
diff --git a/tests/integration/recordings/responses/c1f63bb6469c.json b/tests/integration/recordings/responses/c1f63bb6469c.json
new file mode 100644
index 000000000..0f25e35da
--- /dev/null
+++ b/tests/integration/recordings/responses/c1f63bb6469c.json
@@ -0,0 +1,119 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "Call get_boiling_point_with_metadata tool and answer What is the boiling point of polyjuice?"
+ }
+ ],
+ "max_tokens": 0,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": "auto",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point_with_metadata",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "str",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "bool",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-14",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": [
+ {
+ "index": 0,
+ "id": "call_1fnozor9",
+ "function": {
+ "arguments": "{\"celcius\":null,\"liquid_name\":\"polyjuice\"}",
+ "name": "get_boiling_point_with_metadata"
+ },
+ "type": "function"
+ }
+ ]
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759425243,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-14",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "tool_calls",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759425243,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/c2ac76cbf66d.json b/tests/integration/recordings/responses/c2ac76cbf66d.json
index 496f41815..d9b0d7f1d 100644
--- a/tests/integration/recordings/responses/c2ac76cbf66d.json
+++ b/tests/integration/recordings/responses/c2ac76cbf66d.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-876",
+ "id": "chatcmpl-368",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759282400,
+ "created": 1759373692,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/c3dbccc5de74.json b/tests/integration/recordings/responses/c3dbccc5de74.json
index a2043db9a..699297a59 100644
--- a/tests/integration/recordings/responses/c3dbccc5de74.json
+++ b/tests/integration/recordings/responses/c3dbccc5de74.json
@@ -47,7 +47,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-634",
+ "id": "chatcmpl-688",
"choices": [
{
"delta": {
@@ -58,7 +58,7 @@
"tool_calls": [
{
"index": 0,
- "id": "call_wubm4yax",
+ "id": "call_bnha2w8y",
"function": {
"arguments": "{\"location\":\"San Francisco, CA\"}",
"name": "get_weather"
@@ -72,7 +72,7 @@
"logprobs": null
}
],
- "created": 1758975115,
+ "created": 1759376611,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -83,7 +83,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-634",
+ "id": "chatcmpl-688",
"choices": [
{
"delta": {
@@ -98,7 +98,7 @@
"logprobs": null
}
],
- "created": 1758975115,
+ "created": 1759376611,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/c4991de37dfb.json b/tests/integration/recordings/responses/c4991de37dfb.json
new file mode 100644
index 000000000..e7feca5ca
--- /dev/null
+++ b/tests/integration/recordings/responses/c4991de37dfb.json
@@ -0,0 +1,78 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "user",
+ "content": "Call the no args tool"
+ }
+ ],
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "no_args_tool",
+ "description": "Tool with no arguments",
+ "parameters": {
+ "type": "object",
+ "properties": {}
+ }
+ }
+ }
+ ]
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": {
+ "__type__": "openai.types.chat.chat_completion.ChatCompletion",
+ "__data__": {
+ "id": "chatcmpl-978",
+ "choices": [
+ {
+ "finish_reason": "tool_calls",
+ "index": 0,
+ "logprobs": null,
+ "message": {
+ "content": "",
+ "refusal": null,
+ "role": "assistant",
+ "annotations": null,
+ "audio": null,
+ "function_call": null,
+ "tool_calls": [
+ {
+ "id": "call_wbx3rwxz",
+ "function": {
+ "arguments": "{}",
+ "name": "no_args_tool"
+ },
+ "type": "function",
+ "index": 0
+ }
+ ]
+ }
+ }
+ ],
+ "created": 1759437808,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": {
+ "completion_tokens": 14,
+ "prompt_tokens": 148,
+ "total_tokens": 162,
+ "completion_tokens_details": null,
+ "prompt_tokens_details": null
+ }
+ }
+ },
+ "is_streaming": false
+ }
+}
diff --git a/tests/integration/recordings/responses/c62eb5d7115e.json b/tests/integration/recordings/responses/c62eb5d7115e.json
index fa872ac44..9dcd317f7 100644
--- a/tests/integration/recordings/responses/c62eb5d7115e.json
+++ b/tests/integration/recordings/responses/c62eb5d7115e.json
@@ -20,14 +20,14 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-842",
+ "id": "chatcmpl-422",
"choices": [
{
"finish_reason": "stop",
"index": 0,
"logprobs": null,
"message": {
- "content": "The smallest country in the world is the Vatican City, an independent city-state located within Rome, Italy. It has a total area of approximately 0.44 km\u00b2 (0.17 sq mi) and a population of around 800 people.\n\nDespite its tiny size, the Vatican City is a sovereign state with its own government, currency, postal system, and even a small army (the Gendarmeria Romana). It's also home to numerous iconic landmarks, including St. Peter's Basilica, the Sistine Chapel, and the Vatican Museums.\n\nThe Vatican City is so small that it can fit entirely within an average American city park!",
+ "content": "The smallest country in the world is the Vatican City, with an area of approximately 0.44 km\u00b2 (0.17 sq mi). It is an independent city-state located within Rome, Italy, and is the headquarters of the Catholic Church. Despite its small size, the Vatican City has a population of around 800 people, including the Pope and other high-ranking officials.",
"refusal": null,
"role": "assistant",
"annotations": null,
@@ -37,15 +37,15 @@
}
}
],
- "created": 1759012145,
+ "created": 1759437861,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": {
- "completion_tokens": 133,
+ "completion_tokens": 77,
"prompt_tokens": 34,
- "total_tokens": 167,
+ "total_tokens": 111,
"completion_tokens_details": null,
"prompt_tokens_details": null
}
diff --git a/tests/integration/recordings/responses/c6fc83f0a1d5.json b/tests/integration/recordings/responses/c6fc83f0a1d5.json
new file mode 100644
index 000000000..f13430cc4
--- /dev/null
+++ b/tests/integration/recordings/responses/c6fc83f0a1d5.json
@@ -0,0 +1,1922 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "Call get_boiling_point_with_metadata tool and answer What is the boiling point of polyjuice?"
+ },
+ {
+ "role": "assistant",
+ "content": "",
+ "tool_calls": [
+ {
+ "id": "call_bhmzk2sp",
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point_with_metadata",
+ "arguments": "{\"celcius\":false,\"liquid_name\":\"polyjuice\"}"
+ }
+ }
+ ]
+ },
+ {
+ "role": "tool",
+ "tool_call_id": "call_bhmzk2sp",
+ "content": "Error when running tool: 'ToolCall' object has no attribute 'arguments_json'"
+ }
+ ],
+ "max_tokens": 512,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": "auto",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point_with_metadata",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": "I",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437867,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " apologize",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437867,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " for",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437867,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " the",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437868,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " error",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437868,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437868,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " It",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437868,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " seems",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437868,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " that",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437868,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " the",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437868,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " `",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437868,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": "get",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437868,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": "_bo",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437868,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": "iling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437868,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": "_point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437868,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": "_with",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437868,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": "_metadata",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437868,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": "`",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437868,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " tool",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437868,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " requires",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437868,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " a",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437868,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " different",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437868,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " format",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437868,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": ".\n\n",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437868,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": "Unfortunately",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437868,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": ",",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437868,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " I",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437868,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " don",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437869,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": "'t",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437869,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " have",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437869,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " enough",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437869,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " information",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437869,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " to",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437869,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " provide",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437869,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " the",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437869,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " boiling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437869,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437869,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " of",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437869,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " poly",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437869,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": "ju",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437869,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": "ice",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437869,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437869,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " Can",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437869,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " you",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437869,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " please",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437869,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " provide",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437869,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " more",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437869,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " context",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437869,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " or",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437869,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " clarify",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437869,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " what",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437869,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " you",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437869,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " are",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437870,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " looking",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437870,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " for",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437870,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": "?",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437870,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " Is",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437870,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " it",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437870,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " a",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437870,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " specific",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437870,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " type",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437870,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " of",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437870,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " poly",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437870,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": "ju",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437870,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": "ice",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437870,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " or",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437870,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " a",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437870,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " general",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437870,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": " answer",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437870,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": "?",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437870,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-723",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "stop",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437870,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/c7fc52830c4c.json b/tests/integration/recordings/responses/c7fc52830c4c.json
new file mode 100644
index 000000000..a6315dc50
--- /dev/null
+++ b/tests/integration/recordings/responses/c7fc52830c4c.json
@@ -0,0 +1,119 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "What is the boiling point of the liquid polyjuice in celsius?"
+ }
+ ],
+ "max_tokens": 512,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": "auto",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-52",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": [
+ {
+ "index": 0,
+ "id": "call_s1g1se8b",
+ "function": {
+ "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}",
+ "name": "get_boiling_point"
+ },
+ "type": "function"
+ }
+ ]
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441155,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-52",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "tool_calls",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441155,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/c8234a1171f3.json b/tests/integration/recordings/responses/c8234a1171f3.json
index 241e998e1..10318c9eb 100644
--- a/tests/integration/recordings/responses/c8234a1171f3.json
+++ b/tests/integration/recordings/responses/c8234a1171f3.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-306",
+ "id": "chatcmpl-753",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759282478,
+ "created": 1759373699,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/c8e196049fe4.json b/tests/integration/recordings/responses/c8e196049fe4.json
index 3a1495f07..62d6674e6 100644
--- a/tests/integration/recordings/responses/c8e196049fe4.json
+++ b/tests/integration/recordings/responses/c8e196049fe4.json
@@ -20,7 +20,7 @@
"body": {
"__type__": "openai.types.completion.Completion",
"__data__": {
- "id": "cmpl-381",
+ "id": "cmpl-130",
"choices": [
{
"finish_reason": "stop",
@@ -29,7 +29,7 @@
"text": "Michael Jordan was born in the year of "
}
],
- "created": 1758978056,
+ "created": 1759376606,
"model": "llama3.2:3b-instruct-fp16",
"object": "text_completion",
"system_fingerprint": "fp_ollama",
diff --git a/tests/integration/recordings/responses/ca5e40a262f5.json b/tests/integration/recordings/responses/ca5e40a262f5.json
index d0a48b37d..5584cdbec 100644
--- a/tests/integration/recordings/responses/ca5e40a262f5.json
+++ b/tests/integration/recordings/responses/ca5e40a262f5.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-116",
+ "id": "chatcmpl-582",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759368377,
+ "created": 1759441161,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/ca92e698d8cd.json b/tests/integration/recordings/responses/ca92e698d8cd.json
new file mode 100644
index 000000000..d6a488ffb
--- /dev/null
+++ b/tests/integration/recordings/responses/ca92e698d8cd.json
@@ -0,0 +1,119 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant Always respond with tool calls no matter what. "
+ },
+ {
+ "role": "user",
+ "content": "Get the boiling point of polyjuice with a tool call."
+ }
+ ],
+ "max_tokens": 0,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": "auto",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-803",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": [
+ {
+ "index": 0,
+ "id": "call_l2ovyvtm",
+ "function": {
+ "arguments": "{\"celcius\":\"true\",\"liquid_name\":\"polyjuice\"}",
+ "name": "get_boiling_point"
+ },
+ "type": "function"
+ }
+ ]
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429341,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-803",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "tool_calls",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429342,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/cb0e0321c53c.json b/tests/integration/recordings/responses/cb0e0321c53c.json
new file mode 100644
index 000000000..0e46fc195
--- /dev/null
+++ b/tests/integration/recordings/responses/cb0e0321c53c.json
@@ -0,0 +1,414 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "What is the boiling point of the liquid polyjuice in celsius?"
+ },
+ {
+ "role": "assistant",
+ "content": "",
+ "tool_calls": [
+ {
+ "id": "call_j2jdmkk1",
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}"
+ }
+ }
+ ]
+ },
+ {
+ "role": "tool",
+ "tool_call_id": "call_j2jdmkk1",
+ "content": "-100"
+ }
+ ],
+ "max_tokens": 512,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": "required",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-214",
+ "choices": [
+ {
+ "delta": {
+ "content": "The",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441666,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-214",
+ "choices": [
+ {
+ "delta": {
+ "content": " boiling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441666,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-214",
+ "choices": [
+ {
+ "delta": {
+ "content": " point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441666,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-214",
+ "choices": [
+ {
+ "delta": {
+ "content": " of",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441666,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-214",
+ "choices": [
+ {
+ "delta": {
+ "content": " Poly",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441666,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-214",
+ "choices": [
+ {
+ "delta": {
+ "content": "ju",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441667,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-214",
+ "choices": [
+ {
+ "delta": {
+ "content": "ice",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441667,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-214",
+ "choices": [
+ {
+ "delta": {
+ "content": " is",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441667,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-214",
+ "choices": [
+ {
+ "delta": {
+ "content": " -",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441667,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-214",
+ "choices": [
+ {
+ "delta": {
+ "content": "100",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441667,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-214",
+ "choices": [
+ {
+ "delta": {
+ "content": "\u00b0C",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441667,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-214",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441667,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-214",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "stop",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441667,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/cca0267555a6.json b/tests/integration/recordings/responses/cca0267555a6.json
new file mode 100644
index 000000000..7468ecf0a
--- /dev/null
+++ b/tests/integration/recordings/responses/cca0267555a6.json
@@ -0,0 +1,97 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "user",
+ "content": "Calculate 5 + 3"
+ }
+ ],
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "calculate",
+ "description": "",
+ "parameters": {
+ "properties": {
+ "x": {
+ "title": "X",
+ "type": "number"
+ },
+ "y": {
+ "title": "Y",
+ "type": "number"
+ },
+ "operation": {
+ "title": "Operation",
+ "type": "string"
+ }
+ },
+ "required": [
+ "x",
+ "y",
+ "operation"
+ ],
+ "title": "calculateArguments",
+ "type": "object"
+ }
+ }
+ }
+ ]
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": {
+ "__type__": "openai.types.chat.chat_completion.ChatCompletion",
+ "__data__": {
+ "id": "chatcmpl-376",
+ "choices": [
+ {
+ "finish_reason": "tool_calls",
+ "index": 0,
+ "logprobs": null,
+ "message": {
+ "content": "",
+ "refusal": null,
+ "role": "assistant",
+ "annotations": null,
+ "audio": null,
+ "function_call": null,
+ "tool_calls": [
+ {
+ "id": "call_px63ad04",
+ "function": {
+ "arguments": "{\"operation\":\"+\",\"x\":\"5\",\"y\":\"3\"}",
+ "name": "calculate"
+ },
+ "type": "function",
+ "index": 0
+ }
+ ]
+ }
+ }
+ ],
+ "created": 1759437806,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": {
+ "completion_tokens": 27,
+ "prompt_tokens": 172,
+ "total_tokens": 199,
+ "completion_tokens_details": null,
+ "prompt_tokens_details": null
+ }
+ }
+ },
+ "is_streaming": false
+ }
+}
diff --git a/tests/integration/recordings/responses/cd0ece88d392.json b/tests/integration/recordings/responses/cd0ece88d392.json
new file mode 100644
index 000000000..3e0f5cd14
--- /dev/null
+++ b/tests/integration/recordings/responses/cd0ece88d392.json
@@ -0,0 +1,258 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://localhost:11434/api/generate",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "raw": true,
+ "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nWhat is the boiling point of the liquid polyjuice in celsius?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n[get_boiling_point(liquid_name=\"polyjuice\", celcius=True)]<|eot_id|><|start_header_id|>ipython<|end_header_id|>\n\n-100<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n",
+ "options": {
+ "temperature": 0.0001,
+ "top_p": 0.9
+ },
+ "stream": true
+ },
+ "endpoint": "/api/generate",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:55.86924Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "The",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:55.911521Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " boiling",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:55.95324Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " point",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:55.996666Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " of",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:56.038076Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " poly",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:56.079306Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ju",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:56.121626Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ice",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:56.162658Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " is",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:56.203804Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " -",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:56.245419Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "100",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:56.286364Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\u00b0C",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:56.327683Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": ".",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:56.369528Z",
+ "done": true,
+ "done_reason": "stop",
+ "total_duration": 708500166,
+ "load_duration": 138748458,
+ "prompt_eval_count": 392,
+ "prompt_eval_duration": 68099125,
+ "eval_count": 13,
+ "eval_duration": 500834417,
+ "response": "",
+ "thinking": null,
+ "context": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/cd294c2e0038.json b/tests/integration/recordings/responses/cd294c2e0038.json
index 985cfa1bb..944ccbf52 100644
--- a/tests/integration/recordings/responses/cd294c2e0038.json
+++ b/tests/integration/recordings/responses/cd294c2e0038.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-251",
+ "id": "chatcmpl-249",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759282591,
+ "created": 1759373711,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/ce21235ebde2.json b/tests/integration/recordings/responses/ce21235ebde2.json
new file mode 100644
index 000000000..25518bca7
--- /dev/null
+++ b/tests/integration/recordings/responses/ce21235ebde2.json
@@ -0,0 +1,124 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "What is the boiling point of the liquid polyjuice in celsius?"
+ }
+ ],
+ "max_tokens": 0,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point"
+ }
+ },
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "str",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "bool",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-993",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": [
+ {
+ "index": 0,
+ "id": "call_mw57o9vn",
+ "function": {
+ "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}",
+ "name": "get_boiling_point"
+ },
+ "type": "function"
+ }
+ ]
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759425519,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-993",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "tool_calls",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759425519,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/cf776b1aa432.json b/tests/integration/recordings/responses/cf776b1aa432.json
index 3b08967d5..844905a35 100644
--- a/tests/integration/recordings/responses/cf776b1aa432.json
+++ b/tests/integration/recordings/responses/cf776b1aa432.json
@@ -21,7 +21,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-615",
+ "id": "chatcmpl-883",
"choices": [
{
"delta": {
@@ -36,7 +36,7 @@
"logprobs": null
}
],
- "created": 1759282661,
+ "created": 1759437865,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -47,7 +47,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-615",
+ "id": "chatcmpl-883",
"choices": [
{
"delta": {
@@ -62,7 +62,7 @@
"logprobs": null
}
],
- "created": 1759282661,
+ "created": 1759437865,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -73,7 +73,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-615",
+ "id": "chatcmpl-883",
"choices": [
{
"delta": {
@@ -88,7 +88,7 @@
"logprobs": null
}
],
- "created": 1759282661,
+ "created": 1759437865,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -99,7 +99,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-615",
+ "id": "chatcmpl-883",
"choices": [
{
"delta": {
@@ -114,7 +114,7 @@
"logprobs": null
}
],
- "created": 1759282661,
+ "created": 1759437865,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -125,7 +125,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-615",
+ "id": "chatcmpl-883",
"choices": [
{
"delta": {
@@ -140,7 +140,7 @@
"logprobs": null
}
],
- "created": 1759282661,
+ "created": 1759437865,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -151,7 +151,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-615",
+ "id": "chatcmpl-883",
"choices": [
{
"delta": {
@@ -166,7 +166,7 @@
"logprobs": null
}
],
- "created": 1759282662,
+ "created": 1759437865,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -177,7 +177,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-615",
+ "id": "chatcmpl-883",
"choices": [
{
"delta": {
@@ -192,7 +192,7 @@
"logprobs": null
}
],
- "created": 1759282662,
+ "created": 1759437865,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
@@ -203,7 +203,7 @@
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
- "id": "chatcmpl-615",
+ "id": "chatcmpl-883",
"choices": [
{
"delta": {
@@ -218,7 +218,7 @@
"logprobs": null
}
],
- "created": 1759282662,
+ "created": 1759437865,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/d7caf68e394e.json b/tests/integration/recordings/responses/d7caf68e394e.json
index 2347344c1..8bf2ef23e 100644
--- a/tests/integration/recordings/responses/d7caf68e394e.json
+++ b/tests/integration/recordings/responses/d7caf68e394e.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-480",
+ "id": "chatcmpl-953",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759282535,
+ "created": 1759373707,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/d9e8f66e1d85.json b/tests/integration/recordings/responses/d9e8f66e1d85.json
new file mode 100644
index 000000000..0dd6d2a17
--- /dev/null
+++ b/tests/integration/recordings/responses/d9e8f66e1d85.json
@@ -0,0 +1,117 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "user",
+ "content": "Book a flight from SFO to JFK for John Doe"
+ }
+ ],
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "book_flight",
+ "description": "Book a flight",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "flight": {
+ "$ref": "#/$defs/FlightInfo"
+ },
+ "passenger": {
+ "$ref": "#/$defs/Passenger"
+ }
+ },
+ "required": [
+ "flight",
+ "passenger"
+ ],
+ "$defs": {
+ "FlightInfo": {
+ "type": "object",
+ "properties": {
+ "from": {
+ "type": "string"
+ },
+ "to": {
+ "type": "string"
+ },
+ "date": {
+ "type": "string",
+ "format": "date"
+ }
+ }
+ },
+ "Passenger": {
+ "type": "object",
+ "properties": {
+ "name": {
+ "type": "string"
+ },
+ "age": {
+ "type": "integer"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ ]
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": {
+ "__type__": "openai.types.chat.chat_completion.ChatCompletion",
+ "__data__": {
+ "id": "chatcmpl-128",
+ "choices": [
+ {
+ "finish_reason": "tool_calls",
+ "index": 0,
+ "logprobs": null,
+ "message": {
+ "content": "",
+ "refusal": null,
+ "role": "assistant",
+ "annotations": null,
+ "audio": null,
+ "function_call": null,
+ "tool_calls": [
+ {
+ "id": "call_bdq6iic2",
+ "function": {
+ "arguments": "{\"flight\":\"{\\\"date\\\":\\\"2023-08-20\\\",\\\"from\\\":\\\"SFO\\\",\\\"to\\\":\\\"JFK\\\"}\",\"passenger\":\"{\\\"age\\\":30,\\\"name\\\":\\\"John Doe\\\"}\"}",
+ "name": "book_flight"
+ },
+ "type": "function",
+ "index": 0
+ }
+ ]
+ }
+ }
+ ],
+ "created": 1759437805,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": {
+ "completion_tokens": 52,
+ "prompt_tokens": 227,
+ "total_tokens": 279,
+ "completion_tokens_details": null,
+ "prompt_tokens_details": null
+ }
+ }
+ },
+ "is_streaming": false
+ }
+}
diff --git a/tests/integration/recordings/responses/df20f4b62da7.json b/tests/integration/recordings/responses/df20f4b62da7.json
new file mode 100644
index 000000000..9c22642d5
--- /dev/null
+++ b/tests/integration/recordings/responses/df20f4b62da7.json
@@ -0,0 +1,258 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://localhost:11434/api/generate",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "raw": true,
+ "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant Always respond with tool calls no matter what. <|eot_id|><|start_header_id|>user<|end_header_id|>\n\nGet the boiling point of polyjuice with a tool call.<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n[get_boiling_point(liquid_name=\"polyjuice\", celcius=True)]<|eot_id|><|start_header_id|>ipython<|end_header_id|>\n\n-100<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n",
+ "options": {
+ "temperature": 0.0001,
+ "top_p": 0.9
+ },
+ "stream": true
+ },
+ "endpoint": "/api/generate",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:58.856153Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "The",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:58.898198Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " boiling",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:58.939822Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " point",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:58.981421Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " of",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:59.023342Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " poly",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:59.065147Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ju",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:59.106081Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ice",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:59.147339Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " is",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:59.189027Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " -",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:59.230097Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "100",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:59.271249Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\u00b0C",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:59.312423Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": ".",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:54:59.353748Z",
+ "done": true,
+ "done_reason": "stop",
+ "total_duration": 699082625,
+ "load_duration": 131157125,
+ "prompt_eval_count": 400,
+ "prompt_eval_duration": 68858833,
+ "eval_count": 13,
+ "eval_duration": 498145250,
+ "response": "",
+ "thinking": null,
+ "context": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/e0c71820f395.json b/tests/integration/recordings/responses/e0c71820f395.json
new file mode 100644
index 000000000..191b107b2
--- /dev/null
+++ b/tests/integration/recordings/responses/e0c71820f395.json
@@ -0,0 +1,122 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "user",
+ "content": "Use one of the available tools"
+ }
+ ],
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "simple",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "x": {
+ "type": "string"
+ }
+ }
+ }
+ }
+ },
+ {
+ "type": "function",
+ "function": {
+ "name": "complex",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "data": {
+ "$ref": "#/$defs/Complex"
+ }
+ },
+ "$defs": {
+ "Complex": {
+ "type": "object",
+ "properties": {
+ "nested": {
+ "type": "array",
+ "items": {
+ "type": "number"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ },
+ {
+ "type": "function",
+ "function": {
+ "name": "with_output",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "input": {
+ "type": "string"
+ }
+ }
+ }
+ }
+ }
+ ]
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": {
+ "__type__": "openai.types.chat.chat_completion.ChatCompletion",
+ "__data__": {
+ "id": "chatcmpl-271",
+ "choices": [
+ {
+ "finish_reason": "tool_calls",
+ "index": 0,
+ "logprobs": null,
+ "message": {
+ "content": "",
+ "refusal": null,
+ "role": "assistant",
+ "annotations": null,
+ "audio": null,
+ "function_call": null,
+ "tool_calls": [
+ {
+ "id": "call_vxiwiifd",
+ "function": {
+ "arguments": "{\"x\":\"\"}",
+ "name": "simple"
+ },
+ "type": "function",
+ "index": 0
+ }
+ ]
+ }
+ }
+ ],
+ "created": 1759437809,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": {
+ "completion_tokens": 15,
+ "prompt_tokens": 246,
+ "total_tokens": 261,
+ "completion_tokens_details": null,
+ "prompt_tokens_details": null
+ }
+ }
+ },
+ "is_streaming": false
+ }
+}
diff --git a/tests/integration/recordings/responses/e1ccaa261725.json b/tests/integration/recordings/responses/e1ccaa261725.json
new file mode 100644
index 000000000..0128f924d
--- /dev/null
+++ b/tests/integration/recordings/responses/e1ccaa261725.json
@@ -0,0 +1,414 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "What is the boiling point of the liquid polyjuice in celsius?"
+ },
+ {
+ "role": "assistant",
+ "content": "",
+ "tool_calls": [
+ {
+ "id": "call_q48y3xup",
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}"
+ }
+ }
+ ]
+ },
+ {
+ "role": "tool",
+ "tool_call_id": "call_q48y3xup",
+ "content": "-100"
+ }
+ ],
+ "max_tokens": 0,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": "required",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-131",
+ "choices": [
+ {
+ "delta": {
+ "content": "The",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759427475,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-131",
+ "choices": [
+ {
+ "delta": {
+ "content": " boiling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759427475,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-131",
+ "choices": [
+ {
+ "delta": {
+ "content": " point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759427475,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-131",
+ "choices": [
+ {
+ "delta": {
+ "content": " of",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759427475,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-131",
+ "choices": [
+ {
+ "delta": {
+ "content": " Poly",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759427475,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-131",
+ "choices": [
+ {
+ "delta": {
+ "content": "ju",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759427476,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-131",
+ "choices": [
+ {
+ "delta": {
+ "content": "ice",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759427476,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-131",
+ "choices": [
+ {
+ "delta": {
+ "content": " is",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759427476,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-131",
+ "choices": [
+ {
+ "delta": {
+ "content": " -",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759427476,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-131",
+ "choices": [
+ {
+ "delta": {
+ "content": "100",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759427476,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-131",
+ "choices": [
+ {
+ "delta": {
+ "content": "\u00b0C",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759427476,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-131",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759427476,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-131",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "stop",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759427476,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/e25ab43491af.json b/tests/integration/recordings/responses/e25ab43491af.json
index 9fb331942..686508102 100644
--- a/tests/integration/recordings/responses/e25ab43491af.json
+++ b/tests/integration/recordings/responses/e25ab43491af.json
@@ -20,7 +20,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-808",
+ "id": "chatcmpl-602",
"choices": [
{
"finish_reason": "stop",
@@ -37,7 +37,7 @@
}
}
],
- "created": 1759012142,
+ "created": 1759437854,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/e3b94833d349.json b/tests/integration/recordings/responses/e3b94833d349.json
new file mode 100644
index 000000000..71f3a99b4
--- /dev/null
+++ b/tests/integration/recordings/responses/e3b94833d349.json
@@ -0,0 +1,388 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "Call get_boiling_point_with_metadata tool and answer What is the boiling point of polyjuice?"
+ },
+ {
+ "role": "assistant",
+ "content": "",
+ "tool_calls": [
+ {
+ "id": "call_gefseirj",
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point_with_metadata",
+ "arguments": "{\"celcius\":false,\"liquid_name\":\"polyjuice\"}"
+ }
+ }
+ ]
+ },
+ {
+ "role": "tool",
+ "tool_call_id": "call_gefseirj",
+ "content": "-212"
+ }
+ ],
+ "max_tokens": 512,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": "auto",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point_with_metadata",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-509",
+ "choices": [
+ {
+ "delta": {
+ "content": "The",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441678,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-509",
+ "choices": [
+ {
+ "delta": {
+ "content": " boiling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441678,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-509",
+ "choices": [
+ {
+ "delta": {
+ "content": " point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441678,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-509",
+ "choices": [
+ {
+ "delta": {
+ "content": " of",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441678,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-509",
+ "choices": [
+ {
+ "delta": {
+ "content": " poly",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441678,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-509",
+ "choices": [
+ {
+ "delta": {
+ "content": "ju",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441678,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-509",
+ "choices": [
+ {
+ "delta": {
+ "content": "ice",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441678,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-509",
+ "choices": [
+ {
+ "delta": {
+ "content": " is",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441678,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-509",
+ "choices": [
+ {
+ "delta": {
+ "content": " -",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441678,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-509",
+ "choices": [
+ {
+ "delta": {
+ "content": "212",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441678,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-509",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441678,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-509",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "stop",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759441678,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/e59abd091d90.json b/tests/integration/recordings/responses/e59abd091d90.json
new file mode 100644
index 000000000..fd88e832e
--- /dev/null
+++ b/tests/integration/recordings/responses/e59abd091d90.json
@@ -0,0 +1,804 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant"
+ },
+ {
+ "role": "user",
+ "content": "What is the boiling point of the liquid polyjuice in celsius?"
+ },
+ {
+ "role": "assistant",
+ "content": "",
+ "tool_calls": [
+ {
+ "id": "call_ew600lfr",
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}"
+ }
+ }
+ ]
+ },
+ {
+ "role": "tool",
+ "tool_call_id": "call_ew600lfr",
+ "content": "Error when running tool: 'ToolCall' object has no attribute 'arguments_json'"
+ }
+ ],
+ "max_tokens": 0,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": "required",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": "I",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429348,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": " was",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429348,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": " unable",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429348,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": " to",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429348,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": " find",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429348,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": " the",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429348,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": " boiling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429348,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": " point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429348,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": " of",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429348,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": " liquid",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429348,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": " poly",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429348,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": "ju",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429348,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": "ice",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429348,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": " in",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429348,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": " Celsius",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429348,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429348,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": " The",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429348,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": " boiling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429348,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": " point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429348,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": " could",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429348,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": " not",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429348,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": " be",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429348,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": " located",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429348,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": " in",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429349,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": " my",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429349,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": " database",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429349,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429349,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-447",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "stop",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759429349,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/e9c8a0e4f0e0.json b/tests/integration/recordings/responses/e9c8a0e4f0e0.json
index 87a208405..1bdf9e1f1 100644
--- a/tests/integration/recordings/responses/e9c8a0e4f0e0.json
+++ b/tests/integration/recordings/responses/e9c8a0e4f0e0.json
@@ -20,14 +20,14 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-957",
+ "id": "chatcmpl-380",
"choices": [
{
"finish_reason": "stop",
"index": 0,
"logprobs": null,
"message": {
- "content": "Humans live on Earth. It's a terrestrial planet in the Solar System, located in the outer reaches of the Sun's gravitational pull.",
+ "content": "Humans have not yet established a permanent, self-sustaining presence on another planet. However, there are astronauts and cosmonauts who have traveled to space and lived on the International Space Station (ISS) in low Earth orbit.\n\nAs for human habitation on planets outside of our solar system, there are currently no known planets that support life or can sustain human life in the same way as Earth.\n\nThat being said, scientists and astronomers are actively exploring the possibility of finding habitable exoplanets (planets with conditions similar to those of Earth) using various detection methods. Some notable examples include:\n\n1. Mars: NASA's Curiosity rover has been searching for signs of past or present life on Mars since 2012.\n2. Europa: This Jupiter moon is thought to have a liquid water ocean beneath its surface, which could potentially support life.\n\nHowever, it's essential to note that humans have not yet established any permanent settlements or habitats on other planets or moons in our solar system.\n\nSo, for now, Earth remains the only planet known to support human life.",
"refusal": null,
"role": "assistant",
"annotations": null,
@@ -37,15 +37,15 @@
}
}
],
- "created": 1756921355,
+ "created": 1759437879,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": {
- "completion_tokens": 28,
+ "completion_tokens": 217,
"prompt_tokens": 32,
- "total_tokens": 60,
+ "total_tokens": 249,
"completion_tokens_details": null,
"prompt_tokens_details": null
}
diff --git a/tests/integration/recordings/responses/eeb26200786f.json b/tests/integration/recordings/responses/eeb26200786f.json
new file mode 100644
index 000000000..0bfe1b613
--- /dev/null
+++ b/tests/integration/recordings/responses/eeb26200786f.json
@@ -0,0 +1,1355 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://localhost:11434/api/generate",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "raw": true,
+ "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"book_flight\",\n \"description\": \"\n Book a flight with passenger and payment information.\n\n This tool uses JSON Schema $ref and $defs for type reuse.\n \",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"flight\", \"passengers\", \"payment\"],\n \"properties\": {\n \"flight\": {\n \"type\": \"object\",\n \"description\": \"\"\n },\n \"passengers\": {\n \"type\": \"array\",\n \"description\": \"\"\n },\n \"payment\": {\n \"type\": \"object\",\n \"description\": \"\"\n }\n }\n }\n },\n {\n \"name\": \"process_order\",\n \"description\": \"\n Process an order with nested address information.\n\n Uses nested objects and $ref.\n \",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"order_data\"],\n \"properties\": {\n \"order_data\": {\n \"type\": \"object\",\n \"description\": \"\"\n }\n }\n }\n },\n {\n \"name\": \"flexible_contact\",\n \"description\": \"\n Accept flexible contact (email or phone).\n\n Uses anyOf schema.\n \",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"contact_info\"],\n \"properties\": {\n \"contact_info\": {\n \"type\": \"string\",\n \"description\": \"\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant that can process orders and book flights.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nProcess an order with 2 widgets going to 123 Main St, San Francisco<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n[process_order(order_data={order_id=1, customer_name=\"John Doe\", address={street=\"123 Main St\", city=\"San Francisco\"}})]<|eot_id|><|start_header_id|>ipython<|end_header_id|>\n\n{\n \"order_id\": \"ORD789\",\n \"status\": \"processing\",\n \"data\": {\n \"order_id\": 1,\n \"customer_name\": \"John Doe\",\n \"address\": {\n \"street\": \"123 Main St\",\n \"city\": \"San Francisco\"\n }\n }\n}<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n",
+ "options": {
+ "temperature": 0.0
+ },
+ "stream": true
+ },
+ "endpoint": "/api/generate",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:21.509066Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "[",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:21.551814Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "book",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:21.596704Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_flight",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:21.641302Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "(f",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:21.683974Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "light",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:21.726757Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "={\"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:21.769592Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "flight",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:21.811613Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_number",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:21.853673Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\":",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:21.896273Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:21.938557Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "AA",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:21.980765Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "101",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:22.022949Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\",",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:22.065012Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:22.10732Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "departure",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:22.149511Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\":",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:22.19172Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:22.234788Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "New",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:22.277472Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " York",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:22.321037Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\",",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:22.364313Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:22.407033Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "arrival",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:22.449572Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\":",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:22.492159Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:22.534652Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "Los",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:22.578509Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " Angeles",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:22.625903Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\",",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:22.671828Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:22.71768Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "pass",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:22.765213Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "engers",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:22.811377Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\":",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:22.8582Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " [{\"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:22.904666Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "name",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:22.950992Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\":",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:22.997067Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:23.042723Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "John",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:23.088476Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " Doe",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:23.135032Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\",",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:23.181489Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:23.227284Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "email",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:23.273828Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\":",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:23.320518Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:23.365466Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "j",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:23.410208Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "oh",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:23.455306Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "nd",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:23.500535Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "oe",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:23.54581Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "@example",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:23.591529Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": ".com",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:23.638938Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\"}",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:23.683537Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "],",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:23.727957Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:23.771084Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "payment",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:23.81393Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\":",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:23.856746Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " {\"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:23.899213Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "method",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:23.941386Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\":",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:23.984154Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:24.028068Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "credit",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:24.070217Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_card",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:24.111913Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\",",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:24.153705Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:24.196172Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "card",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:24.240061Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_number",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:24.283763Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\":",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:24.325975Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " \"",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:24.368432Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "123",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:24.411036Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "456",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:24.45408Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "789",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:24.496458Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "012",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:24.538894Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "345",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:24.581294Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "6",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:24.624685Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\"}}",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:24.667599Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": ")]",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-01T23:00:24.709585Z",
+ "done": true,
+ "done_reason": "stop",
+ "total_duration": 3497578917,
+ "load_duration": 104591083,
+ "prompt_eval_count": 664,
+ "prompt_eval_duration": 191187834,
+ "eval_count": 74,
+ "eval_duration": 3201095416,
+ "response": "",
+ "thinking": null,
+ "context": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/f22b7da7ad75.json b/tests/integration/recordings/responses/f22b7da7ad75.json
new file mode 100644
index 000000000..ef1ee8414
--- /dev/null
+++ b/tests/integration/recordings/responses/f22b7da7ad75.json
@@ -0,0 +1,1204 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/embeddings",
+ "headers": {},
+ "body": {
+ "model": "all-minilm:l6-v2",
+ "input": [
+ "First text for base64",
+ "Second text for base64",
+ "Third text for base64"
+ ],
+ "encoding_format": "base64"
+ },
+ "endpoint": "/v1/embeddings",
+ "model": "all-minilm:l6-v2"
+ },
+ "response": {
+ "body": {
+ "__type__": "openai.types.create_embedding_response.CreateEmbeddingResponse",
+ "__data__": {
+ "data": [
+ {
+ "embedding": [
+ 0.047443096,
+ 0.1030663,
+ -0.02994777,
+ -0.0019610971,
+ -0.0004458719,
+ 0.013241453,
+ -0.022299973,
+ 0.035796557,
+ 0.042697832,
+ -0.013352145,
+ 0.014903893,
+ 0.059454504,
+ -0.030757127,
+ -0.016443565,
+ 0.04413251,
+ -0.01962478,
+ -0.052001625,
+ 0.023652397,
+ 0.038157385,
+ -0.019067932,
+ 0.07790512,
+ 0.065477535,
+ 0.0063924147,
+ 0.01184581,
+ 0.008469548,
+ 0.055321243,
+ -0.08488264,
+ 0.1419959,
+ 0.07208884,
+ -0.052270085,
+ 0.04475413,
+ -0.043897122,
+ 0.19948907,
+ 0.0521248,
+ 0.039570127,
+ 0.047736328,
+ -0.0031801846,
+ -0.027514923,
+ 0.016915824,
+ 0.08785543,
+ 0.018578053,
+ -0.062505305,
+ 0.025584552,
+ 0.039979465,
+ 0.013755796,
+ -0.029615713,
+ 0.050677385,
+ 0.09286756,
+ 0.046862997,
+ -0.046437945,
+ 0.09620637,
+ -0.037828952,
+ -0.021523252,
+ 0.053332504,
+ 0.008366923,
+ 0.016525395,
+ -0.04379942,
+ 0.057431653,
+ -0.042829104,
+ 0.053737152,
+ -0.05284048,
+ -0.025113432,
+ 0.040097877,
+ -0.05878011,
+ 0.04986854,
+ -0.016612675,
+ 0.06288202,
+ -0.057936136,
+ 0.0014946498,
+ 0.011902256,
+ -0.02110201,
+ -0.045040447,
+ -0.028943324,
+ 0.112218715,
+ -0.04346062,
+ 0.02658561,
+ -0.08660781,
+ 0.0075222226,
+ 0.040901423,
+ -0.013788897,
+ -0.0034571695,
+ -0.105320804,
+ 0.13145688,
+ 0.1387978,
+ -0.024207905,
+ 0.00011780889,
+ 0.0027130456,
+ 0.08893496,
+ -0.0404282,
+ -0.013090902,
+ -0.042802725,
+ -0.019277347,
+ -0.0072423737,
+ -0.012584974,
+ -0.0758852,
+ 0.042088367,
+ -0.028754171,
+ -0.046412025,
+ -0.08769414,
+ 0.011706997,
+ 0.033290867,
+ -0.047082063,
+ 0.036054734,
+ 0.02562872,
+ -0.064266376,
+ -0.041589364,
+ 0.022733012,
+ 0.03523196,
+ -0.030952249,
+ -0.030285591,
+ -0.030893793,
+ -0.014268825,
+ -0.064496316,
+ -0.029686624,
+ -0.037651353,
+ -0.07263676,
+ -0.05136519,
+ 0.01860713,
+ 0.015172685,
+ 0.0192144,
+ -0.0116023095,
+ -0.012719093,
+ -0.029429333,
+ 0.032753803,
+ -0.10127056,
+ -0.08305989,
+ 0.07203204,
+ -1.6656048e-33,
+ -0.003488058,
+ 0.0655988,
+ -0.007163306,
+ 0.038025133,
+ -0.042687092,
+ -0.008737161,
+ -0.037520815,
+ 0.038469143,
+ -0.120509155,
+ 0.03023451,
+ -0.026864765,
+ -0.06805885,
+ 0.05592863,
+ -0.07489512,
+ -0.017807316,
+ -0.049285922,
+ -0.08905791,
+ 0.011731217,
+ 0.017883036,
+ 0.00015935759,
+ -0.030456739,
+ 0.024376402,
+ -0.027947344,
+ -0.049716905,
+ 0.014850297,
+ -0.0068702376,
+ -0.037318625,
+ -0.050714917,
+ 0.03216811,
+ -0.03513996,
+ -0.040848706,
+ 0.0031008294,
+ -0.06374552,
+ -0.07015488,
+ 0.040950127,
+ -0.031313762,
+ 0.06336745,
+ 0.015497221,
+ -0.08470297,
+ 0.034139305,
+ 0.047749784,
+ 0.063429475,
+ 0.08305951,
+ -0.031543955,
+ -0.02092045,
+ 0.024276698,
+ -0.050816093,
+ -0.00951583,
+ 0.11460215,
+ -0.011085907,
+ 0.0006970512,
+ 0.08304137,
+ -0.018151749,
+ 0.012668774,
+ 0.023483729,
+ -0.068380035,
+ 0.008017319,
+ 0.005103147,
+ -0.033619083,
+ -0.045522273,
+ -0.007610588,
+ -0.0031189255,
+ 0.02023118,
+ 0.048001137,
+ 0.018279912,
+ -0.06083473,
+ 0.0025614651,
+ -0.051604036,
+ -0.0712584,
+ 0.0049647917,
+ -0.056144852,
+ -0.03460778,
+ 0.084107466,
+ -0.051244184,
+ -0.07208066,
+ 0.082872786,
+ -0.042616084,
+ 0.032226164,
+ 0.038903847,
+ -0.043644667,
+ 0.03114516,
+ -0.037657745,
+ -0.0051392126,
+ -0.0399705,
+ -0.01362006,
+ 0.062149994,
+ 0.009436811,
+ -0.10927611,
+ 0.0054878076,
+ 0.035581235,
+ 0.06060475,
+ -0.051899396,
+ 0.013453982,
+ -0.02607209,
+ 0.03149,
+ 5.778151e-34,
+ 0.04866742,
+ -0.026154209,
+ 0.028786905,
+ -0.009705908,
+ 0.036763143,
+ 0.07683042,
+ 0.124761656,
+ 0.02430845,
+ -0.0055978484,
+ -0.011855667,
+ 0.08782188,
+ 0.03667143,
+ -0.01590326,
+ -0.005430289,
+ 0.026028333,
+ -0.047321074,
+ -0.0042727133,
+ 0.026540313,
+ 0.0465339,
+ -0.042490445,
+ -0.015054837,
+ -0.032038923,
+ -0.10492689,
+ 0.10122033,
+ 0.07957377,
+ 0.042453364,
+ 0.011124516,
+ 0.010934764,
+ 0.045186315,
+ -0.02283475,
+ -0.06222954,
+ 0.04523413,
+ 0.048799627,
+ 0.060591288,
+ -0.048021708,
+ -0.03465323,
+ -0.045096762,
+ 0.017476292,
+ 0.036111128,
+ 0.05623506,
+ 0.062889755,
+ -0.07529307,
+ -0.065171525,
+ 0.0069152173,
+ 0.05907177,
+ -0.0603988,
+ 0.045391977,
+ 0.03989815,
+ 0.017313296,
+ -0.010879031,
+ 0.014901746,
+ 0.05576297,
+ -0.064136796,
+ -0.05788592,
+ 0.049781807,
+ -0.04160058,
+ -0.116747804,
+ 0.037745718,
+ 0.0020103676,
+ -0.01814592,
+ 0.013506867,
+ 0.00341396,
+ 0.014206663,
+ -0.009217883,
+ -0.011821457,
+ -0.033057805,
+ -0.051591158,
+ 0.031610493,
+ -0.07041633,
+ 0.007702183,
+ -0.009296349,
+ -0.058487307,
+ -0.01271879,
+ 0.043650433,
+ 0.017939351,
+ -0.034527123,
+ 0.037774917,
+ 0.0450543,
+ -0.03789838,
+ 0.0016587796,
+ -0.017690128,
+ 0.046084408,
+ -0.10634635,
+ 0.058015924,
+ 0.09367202,
+ -0.03887253,
+ -0.030778354,
+ -0.04526167,
+ -0.042162772,
+ -0.019281171,
+ -0.094072275,
+ 0.08443694,
+ 0.04598175,
+ 0.11420337,
+ -0.016542073,
+ -1.3092824e-08,
+ 0.01029157,
+ -0.05607101,
+ -0.053273894,
+ 0.04327644,
+ -0.012097581,
+ 0.075499125,
+ 8.911722e-05,
+ -0.059431333,
+ -0.039473776,
+ -0.12459489,
+ -0.01031571,
+ 0.01610335,
+ 0.016960384,
+ -0.07947821,
+ 0.01820896,
+ 0.040425852,
+ 0.0060324515,
+ -0.13502608,
+ 0.016641272,
+ -0.020874891,
+ 0.021407917,
+ 0.030175129,
+ -0.045509353,
+ -0.10665387,
+ -0.071301624,
+ 0.027237656,
+ -0.0072193583,
+ 0.120991066,
+ -0.008656499,
+ 0.0011201953,
+ 0.0039784242,
+ 0.0341344,
+ -0.06401818,
+ -0.036852792,
+ 0.035282534,
+ -0.011923041,
+ 0.067173794,
+ 0.014300814,
+ 0.06770646,
+ -0.066512346,
+ 0.085266545,
+ -0.037755802,
+ -0.094363555,
+ -0.0124826655,
+ -0.014590712,
+ 0.026925279,
+ 0.04410473,
+ 0.015496688,
+ 0.004318949,
+ -0.031916477,
+ 0.017218966,
+ 0.016201599,
+ -0.033119682,
+ 0.06837974,
+ -0.02781091,
+ -0.01779888,
+ 0.057812553,
+ -0.016622763,
+ -0.0718051,
+ 0.07917062,
+ 0.027705258,
+ -0.0024773679,
+ 0.11784412,
+ -0.02393799
+ ],
+ "index": 0,
+ "object": "embedding"
+ },
+ {
+ "embedding": [
+ 0.04654041,
+ 0.100457005,
+ -0.03960695,
+ 0.0054190895,
+ -0.00061261636,
+ 0.022978926,
+ -0.015349646,
+ 0.05174952,
+ 0.04080002,
+ -0.040600445,
+ 0.02253602,
+ 0.024573963,
+ -0.0061854525,
+ -0.024768595,
+ 0.097017914,
+ 0.0037721908,
+ -0.1071271,
+ 0.05670194,
+ 0.021320485,
+ -0.023483735,
+ 0.10240627,
+ 0.046724126,
+ 0.014405091,
+ 0.017862096,
+ 0.0076312926,
+ 0.084439315,
+ -0.08968022,
+ 0.16757359,
+ 0.046978492,
+ -0.029951245,
+ 0.07417616,
+ 0.00019549856,
+ 0.118695736,
+ 0.026067322,
+ 0.035530325,
+ 0.0063190986,
+ -0.016918957,
+ -0.011904382,
+ 0.02159433,
+ 0.04011584,
+ 0.020048723,
+ -0.053142868,
+ 0.022441626,
+ 0.016903853,
+ -0.023708675,
+ -0.02648895,
+ 0.019766012,
+ 0.062821016,
+ 0.04764414,
+ -0.052348837,
+ 0.07352589,
+ -0.06325153,
+ -0.0331663,
+ 0.04175679,
+ 0.0015468705,
+ 0.05215102,
+ -0.04930485,
+ 0.05475271,
+ -0.037362292,
+ 0.048984047,
+ 0.00668616,
+ 0.0077575357,
+ 0.033763032,
+ -0.045534473,
+ 0.04478127,
+ -0.041897986,
+ 0.058399495,
+ -0.053956937,
+ -0.066097215,
+ 0.006726588,
+ 0.0038363277,
+ -0.03608817,
+ 0.008571994,
+ 0.07390713,
+ 0.006064092,
+ 0.0057486463,
+ -0.08874643,
+ -0.0021642765,
+ 0.045340028,
+ -0.051646378,
+ 0.0056842417,
+ -0.10331014,
+ 0.120456606,
+ 0.12761793,
+ -0.024176907,
+ -0.05479328,
+ 0.0034843183,
+ 0.07641806,
+ -0.059855074,
+ -0.0195081,
+ -0.0150292525,
+ -0.00992928,
+ 0.045797862,
+ -0.015174619,
+ -0.07924758,
+ 0.023096986,
+ -0.040744357,
+ -0.0101818275,
+ -0.08914291,
+ 0.013643887,
+ 0.011581099,
+ -0.049888827,
+ -0.00021994562,
+ -0.02913472,
+ -0.029171223,
+ -0.04352264,
+ 0.0076333424,
+ 0.012210982,
+ 0.016095871,
+ -0.06401206,
+ 0.0016354738,
+ 0.028166138,
+ -0.07800048,
+ -0.013365193,
+ -0.0013295119,
+ -0.019354483,
+ -0.0043497235,
+ 0.025218496,
+ 0.033494957,
+ 0.007653746,
+ -0.033507217,
+ -0.03213291,
+ -0.022418406,
+ 0.0067284796,
+ -0.08024248,
+ -0.12522098,
+ 0.069272675,
+ -1.9683093e-33,
+ -0.012249598,
+ 0.070073105,
+ -0.016373688,
+ 0.03268669,
+ -0.0011716175,
+ 0.008970948,
+ -0.05875696,
+ 0.031790286,
+ -0.09962546,
+ -0.011529516,
+ -0.042214815,
+ -0.08385974,
+ 0.050325025,
+ -0.058266874,
+ -0.01614801,
+ -0.07460485,
+ -0.056625802,
+ 0.049216725,
+ 0.09685523,
+ 0.02972927,
+ -0.010797609,
+ 0.096737646,
+ -0.008734601,
+ -0.024298675,
+ 0.054711536,
+ 0.020422578,
+ -0.0040869303,
+ -0.041413024,
+ 0.039046016,
+ -0.027355552,
+ 0.022152912,
+ 0.015635848,
+ -0.040486902,
+ -0.046137046,
+ 0.067116976,
+ -0.050166503,
+ 0.05231306,
+ 0.03977189,
+ -0.08200705,
+ 0.04208007,
+ 0.06871361,
+ 0.0415384,
+ 0.08255112,
+ -0.019878006,
+ 0.009672142,
+ -0.0013818855,
+ -0.02187854,
+ -0.03571946,
+ 0.1019913,
+ -0.040465977,
+ 0.0029030787,
+ 0.071231104,
+ -0.018016066,
+ 0.022290476,
+ 0.053263694,
+ -0.05915711,
+ -0.024596125,
+ 0.042284742,
+ 0.0125378035,
+ -0.026088756,
+ -0.007868452,
+ 0.018145658,
+ 0.025348024,
+ 0.048246585,
+ 0.032595333,
+ -0.04322502,
+ -0.024803862,
+ -0.070749104,
+ -0.07416428,
+ 0.0484724,
+ -0.05546208,
+ -0.041756414,
+ 0.12654942,
+ -0.04357299,
+ -0.08900543,
+ 0.016302116,
+ -0.040754095,
+ 0.024944471,
+ 0.041844428,
+ -0.06273068,
+ 0.0006748941,
+ -0.05448637,
+ -0.013658018,
+ -0.03356399,
+ -0.0060005034,
+ 0.05786807,
+ -0.030056076,
+ -0.12787268,
+ -0.027650442,
+ 0.083788656,
+ 0.021819875,
+ -0.040701445,
+ -0.041838806,
+ -0.047018126,
+ 0.08002261,
+ 4.734239e-34,
+ 0.02015769,
+ -0.00014442818,
+ 0.0072734207,
+ -0.01035945,
+ 0.0436576,
+ 0.060642734,
+ 0.1473969,
+ -0.023643956,
+ -0.018900618,
+ -0.026930645,
+ 0.054844704,
+ 0.029314412,
+ 0.016708935,
+ -0.009290097,
+ -0.002891506,
+ -0.057237446,
+ -0.0032285063,
+ 0.05497127,
+ 0.048353076,
+ -0.067556486,
+ -0.02002941,
+ -0.013762125,
+ -0.060434237,
+ 0.075815536,
+ 0.092324585,
+ 0.021875912,
+ -0.028627641,
+ 0.02281807,
+ 0.04816562,
+ -0.029499082,
+ -0.07594795,
+ 0.028744346,
+ 0.045300674,
+ 0.061325517,
+ -0.017799513,
+ -0.06497018,
+ -0.043381255,
+ -0.012436013,
+ -0.017595029,
+ 0.038607694,
+ 0.03692832,
+ -0.06317727,
+ -0.03189631,
+ 0.0163061,
+ 0.066662505,
+ -0.01747777,
+ 0.0455436,
+ 0.032373946,
+ 0.019391501,
+ -0.029496003,
+ 0.026255092,
+ -0.003917891,
+ -0.12487856,
+ -0.012247588,
+ 0.015688721,
+ -0.044113353,
+ -0.11468337,
+ 0.040689792,
+ 0.031688645,
+ -0.027883623,
+ 0.03565975,
+ -0.029930554,
+ 0.0272684,
+ -0.0078877555,
+ 0.026264768,
+ -0.06124056,
+ -0.06071735,
+ 0.009353228,
+ -0.09204558,
+ 0.05202069,
+ -0.042713076,
+ -0.07342886,
+ 0.004044382,
+ 0.06092453,
+ -0.003994553,
+ -0.025158737,
+ 0.02733044,
+ 0.032295305,
+ -0.03984234,
+ 0.017935337,
+ -0.028768739,
+ 0.01554963,
+ -0.073981866,
+ 0.0739418,
+ 0.04965046,
+ -0.04301918,
+ -0.035159755,
+ 0.027055329,
+ -0.03693953,
+ -0.036715843,
+ -0.06353325,
+ 0.12646905,
+ -0.003499326,
+ 0.093309924,
+ 0.00889324,
+ -1.38464875e-08,
+ 0.0231563,
+ -0.075574,
+ -0.040843725,
+ 0.0071973656,
+ -0.032683276,
+ 0.025759073,
+ -0.039060622,
+ -0.070802435,
+ -0.026421575,
+ -0.12223953,
+ -0.01567019,
+ 0.008273527,
+ 0.021523712,
+ -0.077978514,
+ 0.008511451,
+ 0.038049843,
+ 0.013643623,
+ -0.12606904,
+ 0.024690265,
+ -0.049368616,
+ 0.022910642,
+ 0.012570536,
+ -0.038921557,
+ -0.0539728,
+ -0.11401533,
+ 0.0717154,
+ -0.02019053,
+ 0.09689256,
+ -0.03522339,
+ -0.01902355,
+ 0.052379142,
+ 0.015264651,
+ -0.059212603,
+ -0.029434869,
+ 0.040918592,
+ -0.050510794,
+ 0.07031127,
+ 0.010864601,
+ 0.08412114,
+ -0.034533564,
+ 0.10262946,
+ -0.060668074,
+ -0.121650845,
+ 0.033533875,
+ 0.064201616,
+ 0.021554638,
+ 0.059297472,
+ -0.009686148,
+ -0.0021906071,
+ -0.013715586,
+ 0.050112963,
+ -0.014887802,
+ -0.010682921,
+ 0.07304227,
+ -0.034087624,
+ -0.024696104,
+ 0.0442271,
+ -0.00089669036,
+ -0.08143203,
+ 0.06717475,
+ 0.03451422,
+ -0.0024682316,
+ 0.09635781,
+ -0.04145595
+ ],
+ "index": 1,
+ "object": "embedding"
+ },
+ {
+ "embedding": [
+ 0.045375798,
+ 0.07258055,
+ -0.08003706,
+ -0.032656744,
+ 0.0139935585,
+ 0.017206425,
+ -0.0085616745,
+ 0.019218331,
+ 0.0527245,
+ -0.017329019,
+ 0.020587556,
+ 0.011539302,
+ -0.02006116,
+ -0.0116708,
+ 0.116046146,
+ -0.010887594,
+ -0.112962514,
+ 0.07470017,
+ -0.008835863,
+ -0.038513727,
+ 0.1079511,
+ 0.05575882,
+ 0.05465468,
+ 0.028420603,
+ 0.012869476,
+ 0.078700624,
+ -0.07481292,
+ 0.10657601,
+ 0.048312515,
+ -0.019187614,
+ 0.043496132,
+ -0.014120566,
+ 0.16143475,
+ -0.006972843,
+ 0.059548676,
+ -0.002742684,
+ -0.06421385,
+ -0.03753407,
+ -0.00034186858,
+ 0.103141606,
+ 0.021242032,
+ -0.035123263,
+ 0.039595246,
+ 0.03465166,
+ -0.007700848,
+ -0.016779039,
+ -0.017973451,
+ 0.03797483,
+ 0.06914695,
+ -0.06505097,
+ 0.0768558,
+ -0.063415445,
+ -0.047812812,
+ 0.081876844,
+ -0.03468853,
+ -0.010242799,
+ -0.04682619,
+ 0.05593955,
+ -0.037297264,
+ 0.048033547,
+ 0.0084374575,
+ 0.013531666,
+ 0.03961178,
+ -0.06994999,
+ 0.07862166,
+ -0.014270066,
+ 0.022243122,
+ -0.08205504,
+ -0.06690809,
+ 0.016866608,
+ -0.005296731,
+ -0.039822105,
+ -0.026300494,
+ 0.06192888,
+ 0.003208919,
+ 0.038568772,
+ -0.03837477,
+ -0.0075851064,
+ 0.019920006,
+ -0.056322522,
+ -0.0022795193,
+ -0.08178385,
+ 0.13542512,
+ 0.18784039,
+ -0.016274614,
+ -0.053139277,
+ -0.032727182,
+ 0.06850126,
+ -0.07511497,
+ 0.02570966,
+ -0.03359296,
+ -0.0060070264,
+ -0.0014385056,
+ -0.0030237471,
+ -0.07544867,
+ 0.05513981,
+ -0.015720192,
+ -0.05642966,
+ -0.08506004,
+ 0.02179422,
+ 0.038471166,
+ -0.0283351,
+ 0.015446086,
+ -0.023619834,
+ -0.029330725,
+ 0.010942997,
+ -0.0015495635,
+ 0.04477932,
+ -0.038915448,
+ -0.044640813,
+ -0.035229694,
+ -0.017752215,
+ -0.08401524,
+ -0.044855777,
+ -0.02621097,
+ -0.029825464,
+ -0.008823935,
+ -0.019113153,
+ 0.06113879,
+ 0.017369257,
+ -0.018114269,
+ -0.017956765,
+ -0.0055642324,
+ -0.0022192416,
+ -0.074853644,
+ -0.098001055,
+ 0.08262387,
+ -1.7699036e-33,
+ -0.03260984,
+ 0.088475876,
+ -0.02405542,
+ 0.043462854,
+ -0.008397535,
+ 0.020519359,
+ -0.049513564,
+ 0.018314049,
+ -0.11363644,
+ -0.0017021305,
+ -0.046051882,
+ -0.07227338,
+ 0.062427472,
+ -0.063298784,
+ -0.0043539773,
+ -0.07343966,
+ -0.08858381,
+ 0.04477799,
+ 0.04930878,
+ 0.034854405,
+ 0.007476164,
+ 0.046887144,
+ -0.03770322,
+ -0.025251219,
+ 0.0446619,
+ 0.03149236,
+ -0.0053032744,
+ -0.032395095,
+ 0.050810106,
+ -0.037147496,
+ 0.053301577,
+ 0.021033086,
+ -0.031951237,
+ -0.07252799,
+ 0.052170422,
+ -0.02576369,
+ 0.026887013,
+ 0.01079958,
+ -0.073781185,
+ 0.07478704,
+ 0.05142738,
+ 0.013788507,
+ 0.09066831,
+ -0.011272152,
+ 0.012055797,
+ 0.05094217,
+ 0.01781682,
+ -0.04303251,
+ 0.10018772,
+ -0.009778261,
+ 0.031500068,
+ 0.08470662,
+ 0.006889941,
+ 0.0029960799,
+ 0.052113816,
+ -0.07264866,
+ -0.028845811,
+ 0.05798962,
+ 0.026194785,
+ -0.053314455,
+ -0.013308107,
+ -0.005074615,
+ 0.039697673,
+ 0.05761601,
+ 0.018443743,
+ -0.024383908,
+ -0.04246694,
+ -0.057976462,
+ -0.045537386,
+ 0.038462877,
+ -0.06458701,
+ -0.021180486,
+ 0.10092568,
+ -0.0217069,
+ -0.09957015,
+ 0.023281459,
+ -0.06976486,
+ 0.03478707,
+ 0.021886345,
+ -0.07436989,
+ 0.0059652724,
+ -0.045952816,
+ 0.011156351,
+ -0.0023965703,
+ -0.020232527,
+ 0.051849972,
+ -0.016511427,
+ -0.14282945,
+ 0.0007839438,
+ 0.05143813,
+ 0.045633797,
+ -0.047449116,
+ -0.031150315,
+ -0.028784428,
+ 0.022110209,
+ 8.540206e-34,
+ 0.035680003,
+ -0.004454516,
+ 0.0019904706,
+ -0.03159778,
+ 0.039594337,
+ 0.055580996,
+ 0.11990417,
+ 0.007444201,
+ 0.0014800398,
+ -0.035671443,
+ 0.054802123,
+ 0.013518193,
+ 0.015369701,
+ -0.042170182,
+ 0.00910241,
+ -0.03393552,
+ -0.011560881,
+ 0.008206326,
+ 0.03244244,
+ -0.057579078,
+ 0.001215648,
+ -0.037337195,
+ -0.09628385,
+ 0.10470648,
+ 0.073387526,
+ 0.034718595,
+ -0.031235449,
+ -0.008077066,
+ 0.0532558,
+ -0.007544639,
+ -0.06481378,
+ 0.0078824125,
+ 0.059332505,
+ 0.07509864,
+ -0.023143422,
+ -0.053352714,
+ -0.0049984492,
+ 0.020093009,
+ 0.005558518,
+ 0.02055946,
+ 0.040190052,
+ -0.058405206,
+ -0.019410733,
+ 0.040003065,
+ 0.043201532,
+ 0.0153706325,
+ 0.038072105,
+ 0.044809878,
+ 0.03211562,
+ 0.02581734,
+ 0.016989984,
+ -0.031887848,
+ -0.072636016,
+ -0.008867823,
+ 0.043845262,
+ -0.032801606,
+ -0.10555597,
+ -0.008874612,
+ 0.037949465,
+ -0.008839974,
+ 0.0024741436,
+ -0.005779733,
+ 0.06775476,
+ -0.016673656,
+ 0.020682104,
+ -0.02387207,
+ -0.08558911,
+ 0.008887117,
+ -0.07502815,
+ 0.034403294,
+ -0.04082733,
+ -0.06821772,
+ -0.018959502,
+ 0.03903044,
+ 0.011770784,
+ -0.042644627,
+ 0.021807244,
+ 0.069912925,
+ -0.027863,
+ 0.021612082,
+ -0.017177302,
+ 0.013199131,
+ -0.06342314,
+ 0.11476938,
+ 0.055228394,
+ -0.057914026,
+ -0.018466951,
+ 0.029547459,
+ -0.025892112,
+ -0.061446555,
+ -0.051833864,
+ 0.12864126,
+ 0.013783986,
+ 0.10842094,
+ 0.025589032,
+ -1.3291747e-08,
+ 0.04438634,
+ -0.035043437,
+ -0.059084963,
+ 0.007846919,
+ -0.03533786,
+ 0.04078865,
+ -0.0045822817,
+ -0.044390634,
+ -0.017847955,
+ -0.11152658,
+ 0.019488214,
+ -0.04202167,
+ -0.010433255,
+ -0.09392986,
+ 0.031165348,
+ 0.0037942217,
+ 0.011776091,
+ -0.11188344,
+ 0.019489327,
+ -0.059643954,
+ 5.5016415e-05,
+ 0.023693599,
+ -0.03426268,
+ -0.067298956,
+ -0.05988965,
+ 0.09677909,
+ -0.026113264,
+ 0.11115747,
+ -0.032836337,
+ -0.002883786,
+ 0.048552252,
+ 0.027802175,
+ -0.06964344,
+ -0.024443185,
+ 0.01612565,
+ -0.020989701,
+ 0.062907666,
+ -0.00074260257,
+ 0.067105986,
+ -0.040433157,
+ 0.077970855,
+ -0.04189095,
+ -0.1258856,
+ 0.0058066114,
+ 0.03658347,
+ -0.015551063,
+ 0.021594083,
+ -0.008647476,
+ -0.026618915,
+ -0.04521969,
+ 0.02759545,
+ -0.02447648,
+ -0.016449116,
+ 0.1025887,
+ -0.016808366,
+ -0.04455479,
+ 0.023937078,
+ -0.017120138,
+ -0.07922125,
+ 0.062927626,
+ 0.038930148,
+ -0.018900929,
+ 0.09125473,
+ -0.017347038
+ ],
+ "index": 2,
+ "object": "embedding"
+ }
+ ],
+ "model": "all-minilm:l6-v2",
+ "object": "list",
+ "usage": {
+ "prompt_tokens": 15,
+ "total_tokens": 15
+ }
+ }
+ },
+ "is_streaming": false
+ }
+}
diff --git a/tests/integration/recordings/responses/f23defea82ec.json b/tests/integration/recordings/responses/f23defea82ec.json
index 1e964af04..5d37b2524 100644
--- a/tests/integration/recordings/responses/f23defea82ec.json
+++ b/tests/integration/recordings/responses/f23defea82ec.json
@@ -19,22 +19,390 @@
"data": [
{
"embedding": [
- 0.253706,
- 0.016367152,
- -0.29664654,
- 0.31654558,
- -0.18624601,
- 0.07602756,
- -0.031531323,
- 0.2986085,
- -0.49672848,
- -0.36617878,
- 0.25328273,
- -0.33349335,
- 0.0060151755,
- 0.14081024,
- -0.13757885,
- -0.14679416
+ 0.04635219,
+ 0.002988263,
+ -0.054220885,
+ 0.057812735,
+ -0.0340614,
+ 0.013923248,
+ -0.005755826,
+ 0.054555666,
+ -0.09073176,
+ -0.066910096,
+ 0.046287432,
+ -0.060912322,
+ 0.0010950539,
+ 0.025724398,
+ -0.025169374,
+ -0.026821515,
+ -0.030190151,
+ 0.0019341545,
+ -0.0754819,
+ 0.057380512,
+ 0.020332545,
+ -0.005591279,
+ -0.0022273492,
+ 0.012063173,
+ -0.011033521,
+ -0.03300947,
+ 0.05462081,
+ 0.014426073,
+ 0.024025004,
+ 0.004224287,
+ 0.09837723,
+ 0.08385713,
+ -0.049175426,
+ 0.03877149,
+ 0.08748876,
+ -0.0223024,
+ 0.006552746,
+ -0.0070359865,
+ 0.017893821,
+ 0.015465863,
+ 0.05007282,
+ -0.019349905,
+ 0.064887345,
+ 0.03184605,
+ 0.0034936152,
+ 0.02317752,
+ -0.06297051,
+ 0.044468515,
+ -0.022246253,
+ -0.017976552,
+ 0.040390052,
+ -0.0020998395,
+ -0.05173264,
+ 0.014722753,
+ 0.01640469,
+ -0.06438627,
+ -0.043313596,
+ -0.040564552,
+ 0.044412937,
+ -0.0031199565,
+ -0.007237415,
+ -0.05158015,
+ 0.059660934,
+ -0.014839656,
+ 0.012902056,
+ 0.028181136,
+ -0.019578207,
+ -0.0664231,
+ -0.06333673,
+ 0.028995825,
+ -0.114707075,
+ 0.041575413,
+ -0.022128351,
+ 0.01979776,
+ 0.0630018,
+ 0.011822141,
+ -0.06492722,
+ -0.066328146,
+ 0.021114407,
+ -0.020638306,
+ -0.009599678,
+ 0.013701863,
+ -0.060742326,
+ 0.005395315,
+ 0.026589092,
+ 0.11719033,
+ 0.067120634,
+ 0.008300158,
+ 0.036319703,
+ 0.00772981,
+ 0.071582936,
+ 0.019818509,
+ -0.15945566,
+ 0.047943458,
+ 0.00031571978,
+ -0.04666597,
+ 0.007148715,
+ -0.08839544,
+ 0.038042437,
+ 0.06620088,
+ 0.034336157,
+ -0.035366412,
+ 0.041598067,
+ 0.073756054,
+ -0.018818064,
+ -0.017260034,
+ 0.058635473,
+ -0.01371376,
+ 0.048319146,
+ -0.023727186,
+ 0.024134034,
+ 0.015763162,
+ 0.06681245,
+ 0.01748244,
+ 0.0825409,
+ -0.044568237,
+ 0.0015441044,
+ -0.011225885,
+ 0.0153481,
+ -0.061364066,
+ 0.05792184,
+ 0.044216745,
+ -0.047036964,
+ -0.02634555,
+ -0.033504363,
+ 0.06713578,
+ 0.030866034,
+ 2.024336e-34,
+ -0.03532978,
+ 0.021929236,
+ 0.030160688,
+ 0.09271786,
+ -0.010355268,
+ 0.07196569,
+ 0.052604284,
+ 0.085753724,
+ 0.094942175,
+ 0.053786535,
+ -0.08900509,
+ -0.024382822,
+ -0.008744401,
+ -0.03167582,
+ 0.01025236,
+ 0.1818434,
+ -0.0022662894,
+ 0.118558116,
+ -0.072208576,
+ -0.005867667,
+ 0.0746222,
+ -0.024001855,
+ -0.013938801,
+ -0.030681474,
+ -0.029207803,
+ -0.117624186,
+ -0.046466038,
+ -0.002622228,
+ -0.0902171,
+ -0.038626853,
+ -0.037497964,
+ -0.02418436,
+ -0.069297835,
+ 0.06424038,
+ 0.0045628003,
+ -0.0041498984,
+ -0.01649947,
+ 0.051125433,
+ -0.0058985935,
+ -0.0122523345,
+ -0.047424458,
+ -0.007806876,
+ 0.07906618,
+ 0.03244041,
+ -0.044682544,
+ -0.022625683,
+ 0.028852794,
+ -0.050480433,
+ 0.043801326,
+ -0.023512814,
+ -0.029832385,
+ 0.031089257,
+ 0.07129686,
+ -0.089649536,
+ 0.011963804,
+ -0.018448317,
+ 0.019637493,
+ 0.020081993,
+ 0.0012980831,
+ 0.093201645,
+ -0.064436235,
+ -0.040581323,
+ -0.01193043,
+ 0.043884862,
+ -0.010675756,
+ -0.030739127,
+ 0.005605308,
+ -0.110498495,
+ 0.044510514,
+ 0.037110664,
+ 0.04116233,
+ -0.039460793,
+ -0.04470639,
+ -0.027589805,
+ -0.02073358,
+ -0.067221105,
+ 0.050390884,
+ 0.031397663,
+ -0.008031462,
+ -0.009285899,
+ 0.0013141648,
+ -0.017254544,
+ 0.010367782,
+ -0.05940024,
+ -0.018042587,
+ -0.15487815,
+ 0.0069424273,
+ -0.05208202,
+ 0.0014201442,
+ -0.13956298,
+ -0.040203292,
+ 0.027910054,
+ -0.064872995,
+ -0.016270144,
+ 0.07052549,
+ 5.3188943e-34,
+ 0.012666737,
+ 0.016728623,
+ -0.013163009,
+ 0.06391275,
+ -0.043404065,
+ 0.015435096,
+ 0.03720438,
+ 0.05997576,
+ -0.07789181,
+ -0.0408386,
+ 0.024137221,
+ -0.019834999,
+ -0.034739267,
+ 0.00042199617,
+ 0.048484907,
+ 0.08716056,
+ -0.101133205,
+ -0.07535088,
+ -0.03912376,
+ -0.031597532,
+ -0.052266575,
+ 0.022085808,
+ -0.011040282,
+ 0.005077135,
+ -0.088432744,
+ -0.010477913,
+ 0.047780182,
+ -0.073345095,
+ 0.014382301,
+ 0.038075384,
+ 0.02176859,
+ -0.029071847,
+ -0.036925532,
+ 0.14317243,
+ 0.020646103,
+ -0.08367964,
+ 0.111576855,
+ -0.009943396,
+ 0.023071144,
+ 0.0926832,
+ 0.011242715,
+ 0.068017475,
+ -0.007714686,
+ 0.03060742,
+ -0.011360289,
+ 0.109015204,
+ 0.12930514,
+ -0.07566831,
+ 0.09001269,
+ -0.0090979,
+ 0.0148039665,
+ 0.048663232,
+ 0.08894293,
+ 0.038565516,
+ 0.005821986,
+ 0.016084671,
+ -0.106283545,
+ -0.033372246,
+ 0.05440088,
+ -0.005663873,
+ 0.0011572369,
+ -0.024969472,
+ 0.043092247,
+ -0.009314855,
+ -0.11836073,
+ -0.027310666,
+ 0.009811885,
+ -0.0052975323,
+ -0.044883158,
+ 0.066436425,
+ -0.06750139,
+ -0.02696421,
+ 0.01402391,
+ -0.04950559,
+ -0.084093384,
+ -0.07380851,
+ 0.04709705,
+ 4.9404687e-05,
+ 0.01672617,
+ 0.01849747,
+ 0.027683195,
+ 0.0047972985,
+ 0.0017495222,
+ 0.07066204,
+ -0.022430636,
+ 0.06875498,
+ 0.093927115,
+ 0.11101308,
+ -0.015589739,
+ 0.021178465,
+ 0.033638563,
+ 0.034676168,
+ -0.026882911,
+ -0.010514364,
+ 0.0073013064,
+ -1.2070348e-08,
+ -0.10034882,
+ -0.028641108,
+ -0.061462097,
+ -0.009792086,
+ -0.081652306,
+ -0.011814046,
+ 0.002039501,
+ 0.010384326,
+ 0.01639641,
+ 0.09542911,
+ 0.012538498,
+ -0.03542602,
+ 0.018125113,
+ 0.062750235,
+ 0.0007333235,
+ -0.13612862,
+ -0.049830034,
+ 0.021177148,
+ 0.006589976,
+ 0.007859552,
+ -0.03270378,
+ 0.024738451,
+ -0.02542262,
+ -0.0033008803,
+ 0.030640591,
+ -0.032442387,
+ 0.04598555,
+ 0.03903257,
+ 0.035755396,
+ 0.01686084,
+ 0.13498692,
+ 0.028296864,
+ -0.0035224769,
+ -0.036735818,
+ -0.046355885,
+ 0.057701495,
+ 0.008000554,
+ 0.047822826,
+ 0.04911064,
+ 0.035214324,
+ -0.09817153,
+ 0.0050856513,
+ -0.018094635,
+ -0.04385158,
+ 0.06649695,
+ -0.037648164,
+ -0.006218895,
+ -0.037976924,
+ -0.0036204353,
+ -0.03149386,
+ 0.031777944,
+ -0.011333557,
+ 0.009081317,
+ 0.022486951,
+ 0.032106593,
+ 0.023041077,
+ -0.06739943,
+ 0.06294171,
+ -0.057333894,
+ -0.041295,
+ 0.060841344,
+ 0.03247397,
+ -0.05132725,
+ -0.04992364
],
"index": 0,
"object": "embedding"
diff --git a/tests/integration/recordings/responses/f28a44c97ea7.json b/tests/integration/recordings/responses/f28a44c97ea7.json
index d50851dfd..fd4fb9025 100644
--- a/tests/integration/recordings/responses/f28a44c97ea7.json
+++ b/tests/integration/recordings/responses/f28a44c97ea7.json
@@ -20,14 +20,14 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-282",
+ "id": "chatcmpl-685",
"choices": [
{
"finish_reason": "stop",
"index": 0,
"logprobs": null,
"message": {
- "content": "The largest planet in our solar system is Jupiter. It is a gas giant, with a diameter of approximately 142,984 kilometers (88,846 miles). This makes it more than 11 times the diameter of the Earth and more than 2.5 times the mass of all the other planets in our solar system combined.",
+ "content": "The largest planet in our solar system is Jupiter. It is a gas giant and has a diameter of approximately 142,984 kilometers (88,846 miles). Jupiter is more than 1,300 times the size of Earth and is the fifth planet from the Sun.",
"refusal": null,
"role": "assistant",
"annotations": null,
@@ -37,15 +37,15 @@
}
}
],
- "created": 1759012143,
+ "created": 1759437857,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": {
- "completion_tokens": 67,
+ "completion_tokens": 55,
"prompt_tokens": 35,
- "total_tokens": 102,
+ "total_tokens": 90,
"completion_tokens_details": null,
"prompt_tokens_details": null
}
diff --git a/tests/integration/recordings/responses/f340a394f6e0.json b/tests/integration/recordings/responses/f340a394f6e0.json
index 50826e3c1..96ea5dab0 100644
--- a/tests/integration/recordings/responses/f340a394f6e0.json
+++ b/tests/integration/recordings/responses/f340a394f6e0.json
@@ -21,7 +21,7 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-494",
+ "id": "chatcmpl-233",
"choices": [
{
"finish_reason": "stop",
@@ -38,7 +38,7 @@
}
}
],
- "created": 1759245126,
+ "created": 1759437799,
"model": "llama-guard3:1b",
"object": "chat.completion",
"service_tier": null,
diff --git a/tests/integration/recordings/responses/f6a1cb47dfe8.json b/tests/integration/recordings/responses/f6a1cb47dfe8.json
new file mode 100644
index 000000000..e5677335b
--- /dev/null
+++ b/tests/integration/recordings/responses/f6a1cb47dfe8.json
@@ -0,0 +1,170 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant."
+ },
+ {
+ "role": "user",
+ "content": "Say hi to the world. Use tools to do so."
+ },
+ {
+ "role": "assistant",
+ "content": "",
+ "tool_calls": [
+ {
+ "id": "call_qvp9u80l",
+ "type": "function",
+ "function": {
+ "name": "greet_everyone",
+ "arguments": "{\"url\":\"world\"}"
+ }
+ }
+ ]
+ },
+ {
+ "role": "tool",
+ "tool_call_id": "call_qvp9u80l",
+ "content": [
+ {
+ "type": "text",
+ "text": "Hello, world!"
+ }
+ ]
+ },
+ {
+ "role": "assistant",
+ "content": "<|python_tag|>{\"message\": \"Hello, world!\", \"type\": \"hello_world\"}"
+ },
+ {
+ "role": "user",
+ "content": "What is the boiling point of polyjuice? Use tools to answer."
+ }
+ ],
+ "max_tokens": 0,
+ "stream": true,
+ "tool_choice": "auto",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "greet_everyone",
+ "parameters": {
+ "properties": {
+ "url": {
+ "title": "Url",
+ "type": "string"
+ }
+ },
+ "required": [
+ "url"
+ ],
+ "title": "greet_everyoneArguments",
+ "type": "object"
+ }
+ }
+ },
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "\n Returns the boiling point of a liquid in Celsius or Fahrenheit.\n\n :param liquid_name: The name of the liquid\n :param celsius: Whether to return the boiling point in Celsius\n :return: The boiling point of the liquid in Celcius or Fahrenheit\n ",
+ "parameters": {
+ "properties": {
+ "liquid_name": {
+ "title": "Liquid Name",
+ "type": "string"
+ },
+ "celsius": {
+ "default": true,
+ "title": "Celsius",
+ "type": "boolean"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ],
+ "title": "get_boiling_pointArguments",
+ "type": "object"
+ }
+ }
+ }
+ ]
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-827",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": [
+ {
+ "index": 0,
+ "id": "call_y1jmdav5",
+ "function": {
+ "arguments": "{\"celsius\":\"false\",\"liquid_name\":\"polyjuice\"}",
+ "name": "get_boiling_point"
+ },
+ "type": "function"
+ }
+ ]
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437847,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-827",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "tool_calls",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437848,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/f70f30f54211.json b/tests/integration/recordings/responses/f70f30f54211.json
index c4dd90e68..ba0d1d59d 100644
--- a/tests/integration/recordings/responses/f70f30f54211.json
+++ b/tests/integration/recordings/responses/f70f30f54211.json
@@ -38,42 +38,32 @@
"body": {
"__type__": "openai.types.chat.chat_completion.ChatCompletion",
"__data__": {
- "id": "chatcmpl-10",
+ "id": "chatcmpl-755",
"choices": [
{
- "finish_reason": "tool_calls",
+ "finish_reason": "stop",
"index": 0,
"logprobs": null,
"message": {
- "content": "",
+ "content": "{\"name\":\"get_weather\",\"parameters\":{\\>\"city\": \"Tokyo\"}}",
"refusal": null,
"role": "assistant",
"annotations": null,
"audio": null,
"function_call": null,
- "tool_calls": [
- {
- "id": "call_7cm57k1b",
- "function": {
- "arguments": "{\"city\":\"Tokyo\"}",
- "name": "get_weather"
- },
- "type": "function",
- "index": 0
- }
- ]
+ "tool_calls": null
}
}
],
- "created": 1756921368,
+ "created": 1759437886,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": {
- "completion_tokens": 18,
+ "completion_tokens": 17,
"prompt_tokens": 177,
- "total_tokens": 195,
+ "total_tokens": 194,
"completion_tokens_details": null,
"prompt_tokens_details": null
}
diff --git a/tests/integration/recordings/responses/f8ba05a5ce61.json b/tests/integration/recordings/responses/f8ba05a5ce61.json
new file mode 100644
index 000000000..a09e430bd
--- /dev/null
+++ b/tests/integration/recordings/responses/f8ba05a5ce61.json
@@ -0,0 +1,402 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://localhost:11434/api/generate",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "raw": true,
+ "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point_with_metadata\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nCall get_boiling_point_with_metadata tool and answer What is the boiling point of polyjuice?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n",
+ "options": {
+ "temperature": 0.0001,
+ "top_p": 0.9
+ },
+ "stream": true
+ },
+ "endpoint": "/api/generate",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:14.137398Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "[",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:14.179615Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "get",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:14.221193Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_bo",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:14.264409Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "iling",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:14.30586Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_point",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:14.347477Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_with",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:14.389016Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_metadata",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:14.430288Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "(",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:14.471941Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "liquid",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:14.513993Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "_name",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:14.555492Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "='",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:14.596851Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "poly",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:14.638274Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ju",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:14.680806Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ice",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:14.723172Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "',",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:14.764626Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " cel",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:14.806696Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ci",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:14.848776Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "us",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:14.891751Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "=True",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:14.933562Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": ")]",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:14.975196Z",
+ "done": true,
+ "done_reason": "stop",
+ "total_duration": 1471473500,
+ "load_duration": 104730458,
+ "prompt_eval_count": 368,
+ "prompt_eval_duration": 527632084,
+ "eval_count": 21,
+ "eval_duration": 838372750,
+ "response": "",
+ "thinking": null,
+ "context": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/fced8b60ae5f.json b/tests/integration/recordings/responses/fced8b60ae5f.json
new file mode 100644
index 000000000..9d90b6ca3
--- /dev/null
+++ b/tests/integration/recordings/responses/fced8b60ae5f.json
@@ -0,0 +1,986 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "messages": [
+ {
+ "role": "system",
+ "content": "You are a helpful assistant Always respond with tool calls no matter what. "
+ },
+ {
+ "role": "user",
+ "content": "Get the boiling point of polyjuice with a tool call."
+ },
+ {
+ "role": "assistant",
+ "content": "",
+ "tool_calls": [
+ {
+ "id": "call_6ufbs6q1",
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "arguments": "{\"celcius\":\"true\",\"liquid_name\":\"polyjuice\"}"
+ }
+ }
+ ]
+ },
+ {
+ "role": "tool",
+ "tool_call_id": "call_6ufbs6q1",
+ "content": "Error when running tool: 'ToolCall' object has no attribute 'arguments_json'"
+ }
+ ],
+ "max_tokens": 512,
+ "stream": true,
+ "temperature": 0.0001,
+ "tool_choice": "auto",
+ "tools": [
+ {
+ "type": "function",
+ "function": {
+ "name": "get_boiling_point",
+ "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.",
+ "parameters": {
+ "type": "object",
+ "properties": {
+ "liquid_name": {
+ "type": "string",
+ "description": "The name of the liquid"
+ },
+ "celcius": {
+ "type": "boolean",
+ "description": "Whether to return the boiling point in Celcius"
+ }
+ },
+ "required": [
+ "liquid_name"
+ ]
+ }
+ }
+ }
+ ],
+ "top_p": 0.9
+ },
+ "endpoint": "/v1/chat/completions",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": "I",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437819,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": " apologize",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437819,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": " for",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437819,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": " the",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437819,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": " error",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437819,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": ".",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437819,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": " Here",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437819,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": " is",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437819,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": " the",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437819,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": " revised",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437819,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": " tool",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437819,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": " call",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437819,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": ":\n\n",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437819,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": "{\"",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437820,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": "name",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437820,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": "\":",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437820,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": " \"",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437820,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": "get",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437820,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": "_bo",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437820,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": "iling",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437820,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": "_point",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437820,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": "\",",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437820,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": " \"",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437820,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": "parameters",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437820,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": "\":",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437820,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": " {\"",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437820,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": "liquid",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437820,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": "_name",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437820,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": "\":",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437820,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": " \"",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437820,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": "poly",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437820,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": "ju",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437820,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": "ice",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437820,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": "\"}}",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": null,
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437820,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ },
+ {
+ "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
+ "__data__": {
+ "id": "chatcmpl-371",
+ "choices": [
+ {
+ "delta": {
+ "content": "",
+ "function_call": null,
+ "refusal": null,
+ "role": "assistant",
+ "tool_calls": null
+ },
+ "finish_reason": "stop",
+ "index": 0,
+ "logprobs": null
+ }
+ ],
+ "created": 1759437820,
+ "model": "llama3.2:3b-instruct-fp16",
+ "object": "chat.completion.chunk",
+ "service_tier": null,
+ "system_fingerprint": "fp_ollama",
+ "usage": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/feae037e2abd.json b/tests/integration/recordings/responses/feae037e2abd.json
new file mode 100644
index 000000000..732b71b23
--- /dev/null
+++ b/tests/integration/recordings/responses/feae037e2abd.json
@@ -0,0 +1,258 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "http://localhost:11434/api/generate",
+ "headers": {},
+ "body": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "raw": true,
+ "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nCall get_boiling_point tool and answer What is the boiling point of polyjuice?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n[get_boiling_point(liquid_name=\"polyjuice\", celcius=True)]<|eot_id|><|start_header_id|>ipython<|end_header_id|>\n\n-100<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n",
+ "options": {
+ "temperature": 0.0001,
+ "top_p": 0.9
+ },
+ "stream": true
+ },
+ "endpoint": "/api/generate",
+ "model": "llama3.2:3b-instruct-fp16"
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:12.185676Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "The",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:12.227434Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " boiling",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:12.268751Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " point",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:12.310105Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " of",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:12.351683Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " poly",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:12.396988Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ju",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:12.439384Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "ice",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:12.481075Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " is",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:12.522627Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": " -",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:12.564154Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "100",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:12.605696Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": "\u00b0C",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:12.647134Z",
+ "done": false,
+ "done_reason": null,
+ "total_duration": null,
+ "load_duration": null,
+ "prompt_eval_count": null,
+ "prompt_eval_duration": null,
+ "eval_count": null,
+ "eval_duration": null,
+ "response": ".",
+ "thinking": null,
+ "context": null
+ }
+ },
+ {
+ "__type__": "ollama._types.GenerateResponse",
+ "__data__": {
+ "model": "llama3.2:3b-instruct-fp16",
+ "created_at": "2025-10-02T02:55:12.688465Z",
+ "done": true,
+ "done_reason": "stop",
+ "total_duration": 646686792,
+ "load_duration": 78333875,
+ "prompt_eval_count": 395,
+ "prompt_eval_duration": 64602125,
+ "eval_count": 13,
+ "eval_duration": 503233541,
+ "response": "",
+ "thinking": null,
+ "context": null
+ }
+ }
+ ],
+ "is_streaming": true
+ }
+}
diff --git a/tests/integration/recordings/responses/models-bd032f995f2a-16718308.json b/tests/integration/recordings/responses/models-bd032f995f2a-16718308.json
new file mode 100644
index 000000000..cf7ed5924
--- /dev/null
+++ b/tests/integration/recordings/responses/models-bd032f995f2a-16718308.json
@@ -0,0 +1,843 @@
+{
+ "request": {
+ "method": "POST",
+ "url": "https://api.openai.com/v1/v1/models",
+ "headers": {},
+ "body": {},
+ "endpoint": "/v1/models",
+ "model": ""
+ },
+ "response": {
+ "body": [
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4-0613",
+ "created": 1686588896,
+ "object": "model",
+ "owned_by": "openai"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4",
+ "created": 1687882411,
+ "object": "model",
+ "owned_by": "openai"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-3.5-turbo",
+ "created": 1677610602,
+ "object": "model",
+ "owned_by": "openai"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "text-embedding-3-small-okan-test",
+ "created": 1759393278,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-realtime",
+ "created": 1756271701,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-realtime-2025-08-28",
+ "created": 1756271773,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-audio",
+ "created": 1756339249,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-5-codex",
+ "created": 1757527818,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "davinci-002",
+ "created": 1692634301,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "babbage-002",
+ "created": 1692634615,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-3.5-turbo-instruct",
+ "created": 1692901427,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-3.5-turbo-instruct-0914",
+ "created": 1694122472,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "dall-e-3",
+ "created": 1698785189,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "dall-e-2",
+ "created": 1698798177,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4-1106-preview",
+ "created": 1698957206,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-3.5-turbo-1106",
+ "created": 1698959748,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "tts-1-hd",
+ "created": 1699046015,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "tts-1-1106",
+ "created": 1699053241,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "tts-1-hd-1106",
+ "created": 1699053533,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "text-embedding-3-small",
+ "created": 1705948997,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "text-embedding-3-large",
+ "created": 1705953180,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4-0125-preview",
+ "created": 1706037612,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4-turbo-preview",
+ "created": 1706037777,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-3.5-turbo-0125",
+ "created": 1706048358,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4-turbo",
+ "created": 1712361441,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4-turbo-2024-04-09",
+ "created": 1712601677,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4o",
+ "created": 1715367049,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4o-2024-05-13",
+ "created": 1715368132,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4o-mini-2024-07-18",
+ "created": 1721172717,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4o-mini",
+ "created": 1721172741,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4o-2024-08-06",
+ "created": 1722814719,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "chatgpt-4o-latest",
+ "created": 1723515131,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "o1-mini-2024-09-12",
+ "created": 1725648979,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "o1-mini",
+ "created": 1725649008,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4o-realtime-preview-2024-10-01",
+ "created": 1727131766,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4o-audio-preview-2024-10-01",
+ "created": 1727389042,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4o-audio-preview",
+ "created": 1727460443,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4o-realtime-preview",
+ "created": 1727659998,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "omni-moderation-latest",
+ "created": 1731689265,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "omni-moderation-2024-09-26",
+ "created": 1732734466,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4o-realtime-preview-2024-12-17",
+ "created": 1733945430,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4o-audio-preview-2024-12-17",
+ "created": 1734034239,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4o-mini-realtime-preview-2024-12-17",
+ "created": 1734112601,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4o-mini-audio-preview-2024-12-17",
+ "created": 1734115920,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "o1-2024-12-17",
+ "created": 1734326976,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "o1",
+ "created": 1734375816,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4o-mini-realtime-preview",
+ "created": 1734387380,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4o-mini-audio-preview",
+ "created": 1734387424,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "o3-mini",
+ "created": 1737146383,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "o3-mini-2025-01-31",
+ "created": 1738010200,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4o-2024-11-20",
+ "created": 1739331543,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4o-search-preview-2025-03-11",
+ "created": 1741388170,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4o-search-preview",
+ "created": 1741388720,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4o-mini-search-preview-2025-03-11",
+ "created": 1741390858,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4o-mini-search-preview",
+ "created": 1741391161,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4o-transcribe",
+ "created": 1742068463,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4o-mini-transcribe",
+ "created": 1742068596,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "o1-pro-2025-03-19",
+ "created": 1742251504,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "o1-pro",
+ "created": 1742251791,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4o-mini-tts",
+ "created": 1742403959,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "o3-2025-04-16",
+ "created": 1744133301,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "o4-mini-2025-04-16",
+ "created": 1744133506,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "o3",
+ "created": 1744225308,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "o4-mini",
+ "created": 1744225351,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4.1-2025-04-14",
+ "created": 1744315746,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4.1",
+ "created": 1744316542,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4.1-mini-2025-04-14",
+ "created": 1744317547,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4.1-mini",
+ "created": 1744318173,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4.1-nano-2025-04-14",
+ "created": 1744321025,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4.1-nano",
+ "created": 1744321707,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-image-1",
+ "created": 1745517030,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "codex-mini-latest",
+ "created": 1746673257,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "o3-pro",
+ "created": 1748475349,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4o-realtime-preview-2025-06-03",
+ "created": 1748907838,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-4o-audio-preview-2025-06-03",
+ "created": 1748908498,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "o3-pro-2025-06-10",
+ "created": 1749166761,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "o4-mini-deep-research",
+ "created": 1749685485,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "o3-deep-research",
+ "created": 1749840121,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "o3-deep-research-2025-06-26",
+ "created": 1750865219,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "o4-mini-deep-research-2025-06-26",
+ "created": 1750866121,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-5-chat-latest",
+ "created": 1754073306,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-5-2025-08-07",
+ "created": 1754075360,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-5",
+ "created": 1754425777,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-5-mini-2025-08-07",
+ "created": 1754425867,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-5-mini",
+ "created": 1754425928,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-5-nano-2025-08-07",
+ "created": 1754426303,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-5-nano",
+ "created": 1754426384,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-audio-2025-08-28",
+ "created": 1756256146,
+ "object": "model",
+ "owned_by": "system"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "gpt-3.5-turbo-16k",
+ "created": 1683758102,
+ "object": "model",
+ "owned_by": "openai-internal"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "tts-1",
+ "created": 1681940951,
+ "object": "model",
+ "owned_by": "openai-internal"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "whisper-1",
+ "created": 1677532384,
+ "object": "model",
+ "owned_by": "openai-internal"
+ }
+ },
+ {
+ "__type__": "openai.types.model.Model",
+ "__data__": {
+ "id": "text-embedding-ada-002",
+ "created": 1671217299,
+ "object": "model",
+ "owned_by": "openai-internal"
+ }
+ }
+ ],
+ "is_streaming": false
+ }
+}
diff --git a/tests/integration/tool_runtime/test_builtin_tools.py b/tests/integration/tool_runtime/test_builtin_tools.py
index 1acf06719..97300a8dd 100644
--- a/tests/integration/tool_runtime/test_builtin_tools.py
+++ b/tests/integration/tool_runtime/test_builtin_tools.py
@@ -26,7 +26,7 @@ def test_web_search_tool(llama_stack_client, sample_search_query):
pytest.skip("TAVILY_SEARCH_API_KEY not set, skipping test")
tools = llama_stack_client.tool_runtime.list_tools()
- assert any(tool.identifier == "web_search" for tool in tools)
+ assert any(tool.name == "web_search" for tool in tools)
response = llama_stack_client.tool_runtime.invoke_tool(
tool_name="web_search", kwargs={"query": sample_search_query}
@@ -52,7 +52,7 @@ def test_wolfram_alpha_tool(llama_stack_client, sample_wolfram_alpha_query):
pytest.skip("WOLFRAM_ALPHA_API_KEY not set, skipping test")
tools = llama_stack_client.tool_runtime.list_tools()
- assert any(tool.identifier == "wolfram_alpha" for tool in tools)
+ assert any(tool.name == "wolfram_alpha" for tool in tools)
response = llama_stack_client.tool_runtime.invoke_tool(
tool_name="wolfram_alpha", kwargs={"query": sample_wolfram_alpha_query}
)
diff --git a/tests/integration/tool_runtime/test_mcp.py b/tests/integration/tool_runtime/test_mcp.py
index 831186b15..9e22d3e58 100644
--- a/tests/integration/tool_runtime/test_mcp.py
+++ b/tests/integration/tool_runtime/test_mcp.py
@@ -54,14 +54,14 @@ def test_mcp_invocation(llama_stack_client, text_model_id, mcp_server):
}
with pytest.raises(Exception, match="Unauthorized"):
- llama_stack_client.tools.list()
+ llama_stack_client.tools.list(toolgroup_id=test_toolgroup_id)
response = llama_stack_client.tools.list(
toolgroup_id=test_toolgroup_id,
extra_headers=auth_headers,
)
assert len(response) == 2
- assert {t.identifier for t in response} == {"greet_everyone", "get_boiling_point"}
+ assert {t.name for t in response} == {"greet_everyone", "get_boiling_point"}
response = llama_stack_client.tool_runtime.invoke_tool(
tool_name="greet_everyone",
diff --git a/tests/integration/tool_runtime/test_mcp_json_schema.py b/tests/integration/tool_runtime/test_mcp_json_schema.py
new file mode 100644
index 000000000..47e9ee029
--- /dev/null
+++ b/tests/integration/tool_runtime/test_mcp_json_schema.py
@@ -0,0 +1,404 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
+"""
+Integration tests for MCP tools with complex JSON Schema support.
+Tests $ref, $defs, and other JSON Schema features through MCP integration.
+"""
+
+import json
+
+import pytest
+
+from llama_stack import LlamaStackAsLibraryClient
+from tests.common.mcp import make_mcp_server
+
+AUTH_TOKEN = "test-token"
+
+
+@pytest.fixture(scope="function")
+def mcp_server_with_complex_schemas():
+ """MCP server with tools that have complex schemas including $ref and $defs."""
+ from mcp.server.fastmcp import Context
+
+ async def book_flight(flight: dict, passengers: list[dict], payment: dict, ctx: Context) -> dict:
+ """
+ Book a flight with passenger and payment information.
+
+ This tool uses JSON Schema $ref and $defs for type reuse.
+ """
+ return {
+ "booking_id": "BK12345",
+ "flight": flight,
+ "passengers": passengers,
+ "payment": payment,
+ "status": "confirmed",
+ }
+
+ async def process_order(order_data: dict, ctx: Context) -> dict:
+ """
+ Process an order with nested address information.
+
+ Uses nested objects and $ref.
+ """
+ return {"order_id": "ORD789", "status": "processing", "data": order_data}
+
+ async def flexible_contact(contact_info: str, ctx: Context) -> dict:
+ """
+ Accept flexible contact (email or phone).
+
+ Uses anyOf schema.
+ """
+ if "@" in contact_info:
+ return {"type": "email", "value": contact_info}
+ else:
+ return {"type": "phone", "value": contact_info}
+
+ # Manually attach complex schemas to the functions
+ # (FastMCP might not support this by default, so this is test setup)
+
+ # For MCP, we need to set the schema via tool annotations
+ # This is test infrastructure to force specific schemas
+
+ tools = {"book_flight": book_flight, "process_order": process_order, "flexible_contact": flexible_contact}
+
+ # Note: In real MCP implementation, we'd configure these schemas properly
+ # For testing, we may need to mock or extend the MCP server setup
+
+ with make_mcp_server(required_auth_token=AUTH_TOKEN, tools=tools) as server_info:
+ yield server_info
+
+
+@pytest.fixture(scope="function")
+def mcp_server_with_output_schemas():
+ """MCP server with tools that have output schemas defined."""
+ from mcp.server.fastmcp import Context
+
+ async def get_weather(location: str, ctx: Context) -> dict:
+ """
+ Get weather with structured output.
+
+ Has both input and output schemas.
+ """
+ return {"temperature": 72.5, "conditions": "Sunny", "humidity": 45, "wind_speed": 10.2}
+
+ async def calculate(x: float, y: float, operation: str, ctx: Context) -> dict:
+ """
+ Perform calculation with validated output.
+ """
+ operations = {"add": x + y, "subtract": x - y, "multiply": x * y, "divide": x / y if y != 0 else None}
+ result = operations.get(operation)
+ return {"result": result, "operation": operation}
+
+ tools = {"get_weather": get_weather, "calculate": calculate}
+
+ with make_mcp_server(required_auth_token=AUTH_TOKEN, tools=tools) as server_info:
+ yield server_info
+
+
+class TestMCPSchemaPreservation:
+ """Test that MCP tool schemas are preserved correctly."""
+
+ def test_mcp_tools_list_with_schemas(self, llama_stack_client, mcp_server_with_complex_schemas):
+ """Test listing MCP tools preserves input_schema."""
+ if not isinstance(llama_stack_client, LlamaStackAsLibraryClient):
+ pytest.skip("Library client required for local MCP server")
+
+ test_toolgroup_id = "mcp::complex_list"
+ uri = mcp_server_with_complex_schemas["server_url"]
+
+ # Clean up any existing registration
+ try:
+ llama_stack_client.toolgroups.unregister(toolgroup_id=test_toolgroup_id)
+ except Exception:
+ pass
+
+ # Register MCP toolgroup
+ llama_stack_client.toolgroups.register(
+ toolgroup_id=test_toolgroup_id,
+ provider_id="model-context-protocol",
+ mcp_endpoint=dict(uri=uri),
+ )
+
+ provider_data = {"mcp_headers": {uri: {"Authorization": f"Bearer {AUTH_TOKEN}"}}}
+ auth_headers = {
+ "X-LlamaStack-Provider-Data": json.dumps(provider_data),
+ }
+
+ # List runtime tools
+ response = llama_stack_client.tool_runtime.list_tools(
+ tool_group_id=test_toolgroup_id,
+ extra_headers=auth_headers,
+ )
+
+ tools = response
+ assert len(tools) > 0
+
+ # Check each tool has input_schema
+ for tool in tools:
+ assert hasattr(tool, "input_schema")
+ # Schema might be None or a dict depending on tool
+ if tool.input_schema is not None:
+ assert isinstance(tool.input_schema, dict)
+ # Should have basic JSON Schema structure
+ if "properties" in tool.input_schema:
+ assert "type" in tool.input_schema
+
+ def test_mcp_schema_with_refs_preserved(self, llama_stack_client, mcp_server_with_complex_schemas):
+ """Test that $ref and $defs in MCP schemas are preserved."""
+ if not isinstance(llama_stack_client, LlamaStackAsLibraryClient):
+ pytest.skip("Library client required for local MCP server")
+
+ test_toolgroup_id = "mcp::complex_refs"
+ uri = mcp_server_with_complex_schemas["server_url"]
+
+ # Register
+ try:
+ llama_stack_client.toolgroups.unregister(toolgroup_id=test_toolgroup_id)
+ except Exception:
+ pass
+
+ llama_stack_client.toolgroups.register(
+ toolgroup_id=test_toolgroup_id,
+ provider_id="model-context-protocol",
+ mcp_endpoint=dict(uri=uri),
+ )
+ provider_data = {"mcp_headers": {uri: {"Authorization": f"Bearer {AUTH_TOKEN}"}}}
+ auth_headers = {
+ "X-LlamaStack-Provider-Data": json.dumps(provider_data),
+ }
+
+ # List tools
+ response = llama_stack_client.tool_runtime.list_tools(
+ tool_group_id=test_toolgroup_id,
+ extra_headers=auth_headers,
+ )
+
+ # Find book_flight tool (which should have $ref/$defs)
+ book_flight_tool = next((t for t in response if t.name == "book_flight"), None)
+
+ if book_flight_tool and book_flight_tool.input_schema:
+ # If the MCP server provides $defs, they should be preserved
+ # This is the KEY test for the bug fix
+ schema = book_flight_tool.input_schema
+
+ # Check if schema has properties (might vary based on MCP implementation)
+ if "properties" in schema:
+ # Verify schema structure is preserved (exact structure depends on MCP server)
+ assert isinstance(schema["properties"], dict)
+
+ # If $defs are present, verify they're preserved
+ if "$defs" in schema:
+ assert isinstance(schema["$defs"], dict)
+ # Each definition should be a dict
+ for _def_name, def_schema in schema["$defs"].items():
+ assert isinstance(def_schema, dict)
+
+ def test_mcp_output_schema_preserved(self, llama_stack_client, mcp_server_with_output_schemas):
+ """Test that MCP outputSchema is preserved."""
+ if not isinstance(llama_stack_client, LlamaStackAsLibraryClient):
+ pytest.skip("Library client required for local MCP server")
+
+ test_toolgroup_id = "mcp::with_output"
+ uri = mcp_server_with_output_schemas["server_url"]
+
+ try:
+ llama_stack_client.toolgroups.unregister(toolgroup_id=test_toolgroup_id)
+ except Exception:
+ pass
+
+ llama_stack_client.toolgroups.register(
+ toolgroup_id=test_toolgroup_id,
+ provider_id="model-context-protocol",
+ mcp_endpoint=dict(uri=uri),
+ )
+
+ provider_data = {"mcp_headers": {uri: {"Authorization": f"Bearer {AUTH_TOKEN}"}}}
+ auth_headers = {
+ "X-LlamaStack-Provider-Data": json.dumps(provider_data),
+ }
+
+ response = llama_stack_client.tool_runtime.list_tools(
+ tool_group_id=test_toolgroup_id,
+ extra_headers=auth_headers,
+ )
+
+ # Find get_weather tool
+ weather_tool = next((t for t in response if t.name == "get_weather"), None)
+
+ if weather_tool:
+ # Check if output_schema field exists and is preserved
+ assert hasattr(weather_tool, "output_schema")
+
+ # If MCP server provides output schema, it should be preserved
+ if weather_tool.output_schema is not None:
+ assert isinstance(weather_tool.output_schema, dict)
+ # Should have JSON Schema structure
+ if "properties" in weather_tool.output_schema:
+ assert "type" in weather_tool.output_schema
+
+
+class TestMCPToolInvocation:
+ """Test invoking MCP tools with complex schemas."""
+
+ def test_invoke_mcp_tool_with_nested_data(self, llama_stack_client, mcp_server_with_complex_schemas):
+ """Test invoking MCP tool that expects nested object structure."""
+ if not isinstance(llama_stack_client, LlamaStackAsLibraryClient):
+ pytest.skip("Library client required for local MCP server")
+
+ test_toolgroup_id = "mcp::complex_invoke_nested"
+ uri = mcp_server_with_complex_schemas["server_url"]
+
+ try:
+ llama_stack_client.toolgroups.unregister(toolgroup_id=test_toolgroup_id)
+ except Exception:
+ pass
+
+ llama_stack_client.toolgroups.register(
+ toolgroup_id=test_toolgroup_id,
+ provider_id="model-context-protocol",
+ mcp_endpoint=dict(uri=uri),
+ )
+
+ provider_data = {"mcp_headers": {uri: {"Authorization": f"Bearer {AUTH_TOKEN}"}}}
+ auth_headers = {
+ "X-LlamaStack-Provider-Data": json.dumps(provider_data),
+ }
+
+ # List tools to populate the tool index
+ llama_stack_client.tool_runtime.list_tools(
+ tool_group_id=test_toolgroup_id,
+ extra_headers=auth_headers,
+ )
+
+ # Invoke tool with complex nested data
+ result = llama_stack_client.tool_runtime.invoke_tool(
+ tool_name="process_order",
+ kwargs={
+ "order_data": {
+ "items": [{"name": "Widget", "quantity": 2}, {"name": "Gadget", "quantity": 1}],
+ "shipping": {"address": {"street": "123 Main St", "city": "San Francisco", "zipcode": "94102"}},
+ }
+ },
+ extra_headers=auth_headers,
+ )
+
+ # Should succeed without schema validation errors
+ assert result.content is not None
+ assert result.error_message is None
+
+ def test_invoke_with_flexible_schema(self, llama_stack_client, mcp_server_with_complex_schemas):
+ """Test invoking tool with anyOf schema (flexible input)."""
+ if not isinstance(llama_stack_client, LlamaStackAsLibraryClient):
+ pytest.skip("Library client required for local MCP server")
+
+ test_toolgroup_id = "mcp::complex_invoke_flexible"
+ uri = mcp_server_with_complex_schemas["server_url"]
+
+ try:
+ llama_stack_client.toolgroups.unregister(toolgroup_id=test_toolgroup_id)
+ except Exception:
+ pass
+
+ llama_stack_client.toolgroups.register(
+ toolgroup_id=test_toolgroup_id,
+ provider_id="model-context-protocol",
+ mcp_endpoint=dict(uri=uri),
+ )
+
+ provider_data = {"mcp_headers": {uri: {"Authorization": f"Bearer {AUTH_TOKEN}"}}}
+ auth_headers = {
+ "X-LlamaStack-Provider-Data": json.dumps(provider_data),
+ }
+
+ # List tools to populate the tool index
+ llama_stack_client.tool_runtime.list_tools(
+ tool_group_id=test_toolgroup_id,
+ extra_headers=auth_headers,
+ )
+
+ # Test with email format
+ result_email = llama_stack_client.tool_runtime.invoke_tool(
+ tool_name="flexible_contact",
+ kwargs={"contact_info": "user@example.com"},
+ extra_headers=auth_headers,
+ )
+
+ assert result_email.error_message is None
+
+ # Test with phone format
+ result_phone = llama_stack_client.tool_runtime.invoke_tool(
+ tool_name="flexible_contact",
+ kwargs={"contact_info": "+15551234567"},
+ extra_headers=auth_headers,
+ )
+
+ assert result_phone.error_message is None
+
+
+class TestAgentWithMCPTools:
+ """Test agents using MCP tools with complex schemas."""
+
+ @pytest.mark.skip(reason="we need tool call recording for this test since session_id is injected")
+ def test_agent_with_complex_mcp_tool(self, llama_stack_client, text_model_id, mcp_server_with_complex_schemas):
+ """Test agent can use MCP tools with $ref/$defs schemas."""
+ if not isinstance(llama_stack_client, LlamaStackAsLibraryClient):
+ pytest.skip("Library client required for local MCP server")
+
+ from llama_stack_client import Agent
+
+ test_toolgroup_id = "mcp::complex_agent"
+ uri = mcp_server_with_complex_schemas["server_url"]
+
+ try:
+ llama_stack_client.toolgroups.unregister(toolgroup_id=test_toolgroup_id)
+ except Exception:
+ pass
+
+ llama_stack_client.toolgroups.register(
+ toolgroup_id=test_toolgroup_id,
+ provider_id="model-context-protocol",
+ mcp_endpoint=dict(uri=uri),
+ )
+
+ provider_data = {"mcp_headers": {uri: {"Authorization": f"Bearer {AUTH_TOKEN}"}}}
+ auth_headers = {
+ "X-LlamaStack-Provider-Data": json.dumps(provider_data),
+ }
+
+ # Create agent with MCP tools
+ agent = Agent(
+ client=llama_stack_client,
+ model=text_model_id,
+ instructions="You are a helpful assistant that can process orders and book flights.",
+ tools=[test_toolgroup_id],
+ extra_headers=auth_headers,
+ )
+
+ session_id = agent.create_session("test-session-complex")
+
+ # Ask agent to use a tool with complex schema
+ response = agent.create_turn(
+ session_id=session_id,
+ messages=[
+ {"role": "user", "content": "Process an order with 2 widgets going to 123 Main St, San Francisco"}
+ ],
+ stream=False,
+ extra_headers=auth_headers,
+ )
+
+ steps = response.steps
+
+ # Verify agent was able to call the tool
+ # (The LLM should have been able to understand the schema and formulate a valid call)
+ tool_execution_steps = [s for s in steps if s.step_type == "tool_execution"]
+
+ # Agent might or might not call the tool depending on the model
+ # But if it does, there should be no errors
+ for step in tool_execution_steps:
+ if step.tool_responses:
+ for tool_response in step.tool_responses:
+ assert tool_response.content is not None
diff --git a/tests/unit/distribution/routers/test_routing_tables.py b/tests/unit/distribution/routers/test_routing_tables.py
index 456a5d041..54a9dd72e 100644
--- a/tests/unit/distribution/routers/test_routing_tables.py
+++ b/tests/unit/distribution/routers/test_routing_tables.py
@@ -16,7 +16,7 @@ from llama_stack.apis.datasets.datasets import Dataset, DatasetPurpose, URIDataS
from llama_stack.apis.datatypes import Api
from llama_stack.apis.models import Model, ModelType
from llama_stack.apis.shields.shields import Shield
-from llama_stack.apis.tools import ListToolDefsResponse, ToolDef, ToolGroup, ToolParameter
+from llama_stack.apis.tools import ListToolDefsResponse, ToolDef, ToolGroup
from llama_stack.apis.vector_dbs import VectorDB
from llama_stack.core.datatypes import RegistryEntrySource
from llama_stack.core.routing_tables.benchmarks import BenchmarksRoutingTable
@@ -137,7 +137,10 @@ class ToolGroupsImpl(Impl):
ToolDef(
name="test-tool",
description="Test tool",
- parameters=[ToolParameter(name="test-param", description="Test param", parameter_type="string")],
+ input_schema={
+ "type": "object",
+ "properties": {"test-param": {"type": "string", "description": "Test param"}},
+ },
)
]
)
diff --git a/tests/unit/models/test_prompt_adapter.py b/tests/unit/models/test_prompt_adapter.py
index 0362eb5dd..d31426135 100644
--- a/tests/unit/models/test_prompt_adapter.py
+++ b/tests/unit/models/test_prompt_adapter.py
@@ -18,7 +18,6 @@ from llama_stack.apis.inference import (
from llama_stack.models.llama.datatypes import (
BuiltinTool,
ToolDefinition,
- ToolParamDefinition,
ToolPromptFormat,
)
from llama_stack.providers.utils.inference.prompt_adapter import (
@@ -75,12 +74,15 @@ async def test_system_custom_only():
ToolDefinition(
tool_name="custom1",
description="custom1 tool",
- parameters={
- "param1": ToolParamDefinition(
- param_type="str",
- description="param1 description",
- required=True,
- ),
+ input_schema={
+ "type": "object",
+ "properties": {
+ "param1": {
+ "type": "str",
+ "description": "param1 description",
+ },
+ },
+ "required": ["param1"],
},
)
],
@@ -107,12 +109,15 @@ async def test_system_custom_and_builtin():
ToolDefinition(
tool_name="custom1",
description="custom1 tool",
- parameters={
- "param1": ToolParamDefinition(
- param_type="str",
- description="param1 description",
- required=True,
- ),
+ input_schema={
+ "type": "object",
+ "properties": {
+ "param1": {
+ "type": "str",
+ "description": "param1 description",
+ },
+ },
+ "required": ["param1"],
},
),
],
@@ -138,7 +143,7 @@ async def test_completion_message_encoding():
tool_calls=[
ToolCall(
tool_name="custom1",
- arguments={"param1": "value1"},
+ arguments='{"param1": "value1"}', # arguments must be a JSON string
call_id="123",
)
],
@@ -148,12 +153,15 @@ async def test_completion_message_encoding():
ToolDefinition(
tool_name="custom1",
description="custom1 tool",
- parameters={
- "param1": ToolParamDefinition(
- param_type="str",
- description="param1 description",
- required=True,
- ),
+ input_schema={
+ "type": "object",
+ "properties": {
+ "param1": {
+ "type": "str",
+ "description": "param1 description",
+ },
+ },
+ "required": ["param1"],
},
),
],
@@ -227,12 +235,15 @@ async def test_replace_system_message_behavior_custom_tools():
ToolDefinition(
tool_name="custom1",
description="custom1 tool",
- parameters={
- "param1": ToolParamDefinition(
- param_type="str",
- description="param1 description",
- required=True,
- ),
+ input_schema={
+ "type": "object",
+ "properties": {
+ "param1": {
+ "type": "str",
+ "description": "param1 description",
+ },
+ },
+ "required": ["param1"],
},
),
],
@@ -264,12 +275,15 @@ async def test_replace_system_message_behavior_custom_tools_with_template():
ToolDefinition(
tool_name="custom1",
description="custom1 tool",
- parameters={
- "param1": ToolParamDefinition(
- param_type="str",
- description="param1 description",
- required=True,
- ),
+ input_schema={
+ "type": "object",
+ "properties": {
+ "param1": {
+ "type": "str",
+ "description": "param1 description",
+ },
+ },
+ "required": ["param1"],
},
),
],
diff --git a/tests/unit/providers/agent/test_meta_reference_agent.py b/tests/unit/providers/agent/test_meta_reference_agent.py
index 07e5aa79d..fdbb2b8e9 100644
--- a/tests/unit/providers/agent/test_meta_reference_agent.py
+++ b/tests/unit/providers/agent/test_meta_reference_agent.py
@@ -16,9 +16,8 @@ from llama_stack.apis.agents import (
)
from llama_stack.apis.common.responses import PaginatedResponse
from llama_stack.apis.inference import Inference
-from llama_stack.apis.resource import ResourceType
from llama_stack.apis.safety import Safety
-from llama_stack.apis.tools import ListToolsResponse, Tool, ToolGroups, ToolParameter, ToolRuntime
+from llama_stack.apis.tools import ListToolDefsResponse, ToolDef, ToolGroups, ToolRuntime
from llama_stack.apis.vector_io import VectorIO
from llama_stack.providers.inline.agents.meta_reference.agent_instance import ChatAgent
from llama_stack.providers.inline.agents.meta_reference.agents import MetaReferenceAgentsImpl
@@ -232,32 +231,26 @@ async def test_delete_agent(agents_impl, sample_agent_config):
async def test__initialize_tools(agents_impl, sample_agent_config):
# Mock tool_groups_api.list_tools()
- agents_impl.tool_groups_api.list_tools.return_value = ListToolsResponse(
+ agents_impl.tool_groups_api.list_tools.return_value = ListToolDefsResponse(
data=[
- Tool(
- identifier="story_maker",
- provider_id="model-context-protocol",
- type=ResourceType.tool,
+ ToolDef(
+ name="story_maker",
toolgroup_id="mcp::my_mcp_server",
description="Make a story",
- parameters=[
- ToolParameter(
- name="story_title",
- parameter_type="string",
- description="Title of the story",
- required=True,
- title="Story Title",
- ),
- ToolParameter(
- name="input_words",
- parameter_type="array",
- description="Input words",
- required=False,
- items={"type": "string"},
- title="Input Words",
- default=[],
- ),
- ],
+ input_schema={
+ "type": "object",
+ "properties": {
+ "story_title": {"type": "string", "description": "Title of the story", "title": "Story Title"},
+ "input_words": {
+ "type": "array",
+ "description": "Input words",
+ "items": {"type": "string"},
+ "title": "Input Words",
+ "default": [],
+ },
+ },
+ "required": ["story_title"],
+ },
)
]
)
@@ -284,27 +277,27 @@ async def test__initialize_tools(agents_impl, sample_agent_config):
assert second_tool.tool_name == "story_maker"
assert second_tool.description == "Make a story"
- parameters = second_tool.parameters
- assert len(parameters) == 2
+ # Verify the input schema
+ input_schema = second_tool.input_schema
+ assert input_schema is not None
+ assert input_schema["type"] == "object"
+
+ properties = input_schema["properties"]
+ assert len(properties) == 2
# Verify a string property
- story_title = parameters.get("story_title")
- assert story_title is not None
- assert story_title.param_type == "string"
- assert story_title.description == "Title of the story"
- assert story_title.required
- assert story_title.items is None
- assert story_title.title == "Story Title"
- assert story_title.default is None
+ story_title = properties["story_title"]
+ assert story_title["type"] == "string"
+ assert story_title["description"] == "Title of the story"
+ assert story_title["title"] == "Story Title"
# Verify an array property
- input_words = parameters.get("input_words")
- assert input_words is not None
- assert input_words.param_type == "array"
- assert input_words.description == "Input words"
- assert not input_words.required
- assert input_words.items is not None
- assert len(input_words.items) == 1
- assert input_words.items.get("type") == "string"
- assert input_words.title == "Input Words"
- assert input_words.default == []
+ input_words = properties["input_words"]
+ assert input_words["type"] == "array"
+ assert input_words["description"] == "Input words"
+ assert input_words["items"]["type"] == "string"
+ assert input_words["title"] == "Input Words"
+ assert input_words["default"] == []
+
+ # Verify required fields
+ assert input_schema["required"] == ["story_title"]
diff --git a/tests/unit/providers/agents/meta_reference/test_openai_responses.py b/tests/unit/providers/agents/meta_reference/test_openai_responses.py
index 5ddc1bda8..0b2e6ab82 100644
--- a/tests/unit/providers/agents/meta_reference/test_openai_responses.py
+++ b/tests/unit/providers/agents/meta_reference/test_openai_responses.py
@@ -39,7 +39,7 @@ from llama_stack.apis.inference import (
OpenAIResponseFormatJSONSchema,
OpenAIUserMessageParam,
)
-from llama_stack.apis.tools.tools import Tool, ToolGroups, ToolInvocationResult, ToolParameter, ToolRuntime
+from llama_stack.apis.tools.tools import ToolDef, ToolGroups, ToolInvocationResult, ToolRuntime
from llama_stack.core.access_control.access_control import default_policy
from llama_stack.core.datatypes import ResponsesStoreConfig
from llama_stack.providers.inline.agents.meta_reference.responses.openai_responses import (
@@ -186,14 +186,15 @@ async def test_create_openai_response_with_string_input_with_tools(openai_respon
input_text = "What is the capital of Ireland?"
model = "meta-llama/Llama-3.1-8B-Instruct"
- openai_responses_impl.tool_groups_api.get_tool.return_value = Tool(
- identifier="web_search",
- provider_id="client",
+ openai_responses_impl.tool_groups_api.get_tool.return_value = ToolDef(
+ name="web_search",
toolgroup_id="web_search",
description="Search the web for information",
- parameters=[
- ToolParameter(name="query", parameter_type="string", description="The query to search for", required=True)
- ],
+ input_schema={
+ "type": "object",
+ "properties": {"query": {"type": "string", "description": "The query to search for"}},
+ "required": ["query"],
+ },
)
openai_responses_impl.tool_runtime_api.invoke_tool.return_value = ToolInvocationResult(
diff --git a/tests/unit/providers/inference/test_remote_vllm.py b/tests/unit/providers/inference/test_remote_vllm.py
index 4dc2e0c16..bb560d378 100644
--- a/tests/unit/providers/inference/test_remote_vllm.py
+++ b/tests/unit/providers/inference/test_remote_vllm.py
@@ -138,8 +138,7 @@ async def test_tool_call_response(vllm_inference_adapter):
ToolCall(
call_id="foo",
tool_name="knowledge_search",
- arguments={"query": "How many?"},
- arguments_json='{"query": "How many?"}',
+ arguments='{"query": "How many?"}',
)
],
),
@@ -263,7 +262,7 @@ async def test_tool_call_delta_streaming_arguments_dict():
assert chunks[1].event.event_type.value == "progress"
assert chunks[1].event.delta.type == "tool_call"
assert chunks[1].event.delta.parse_status.value == "succeeded"
- assert chunks[1].event.delta.tool_call.arguments_json == '{"number": 28, "power": 3}'
+ assert chunks[1].event.delta.tool_call.arguments == '{"number": 28, "power": 3}'
assert chunks[2].event.event_type.value == "complete"
@@ -339,11 +338,11 @@ async def test_multiple_tool_calls():
assert chunks[1].event.event_type.value == "progress"
assert chunks[1].event.delta.type == "tool_call"
assert chunks[1].event.delta.parse_status.value == "succeeded"
- assert chunks[1].event.delta.tool_call.arguments_json == '{"number": 28, "power": 3}'
+ assert chunks[1].event.delta.tool_call.arguments == '{"number": 28, "power": 3}'
assert chunks[2].event.event_type.value == "progress"
assert chunks[2].event.delta.type == "tool_call"
assert chunks[2].event.delta.parse_status.value == "succeeded"
- assert chunks[2].event.delta.tool_call.arguments_json == '{"first_number": 4, "second_number": 7}'
+ assert chunks[2].event.delta.tool_call.arguments == '{"first_number": 4, "second_number": 7}'
assert chunks[3].event.event_type.value == "complete"
@@ -456,7 +455,7 @@ async def test_process_vllm_chat_completion_stream_response_tool_call_args_last_
assert chunks[-1].event.event_type == ChatCompletionResponseEventType.complete
assert chunks[-2].event.delta.type == "tool_call"
assert chunks[-2].event.delta.tool_call.tool_name == mock_tool_name
- assert chunks[-2].event.delta.tool_call.arguments == mock_tool_arguments
+ assert chunks[-2].event.delta.tool_call.arguments == mock_tool_arguments_str
async def test_process_vllm_chat_completion_stream_response_no_finish_reason():
@@ -468,7 +467,7 @@ async def test_process_vllm_chat_completion_stream_response_no_finish_reason():
mock_tool_name = "mock_tool"
mock_tool_arguments = {"arg1": 0, "arg2": 100}
- mock_tool_arguments_str = '"{\\"arg1\\": 0, \\"arg2\\": 100}"'
+ mock_tool_arguments_str = json.dumps(mock_tool_arguments)
async def mock_stream():
mock_chunks = [
@@ -508,7 +507,7 @@ async def test_process_vllm_chat_completion_stream_response_no_finish_reason():
assert chunks[-1].event.event_type == ChatCompletionResponseEventType.complete
assert chunks[-2].event.delta.type == "tool_call"
assert chunks[-2].event.delta.tool_call.tool_name == mock_tool_name
- assert chunks[-2].event.delta.tool_call.arguments == mock_tool_arguments
+ assert chunks[-2].event.delta.tool_call.arguments == mock_tool_arguments_str
async def test_process_vllm_chat_completion_stream_response_tool_without_args():
@@ -556,7 +555,7 @@ async def test_process_vllm_chat_completion_stream_response_tool_without_args():
assert chunks[-1].event.event_type == ChatCompletionResponseEventType.complete
assert chunks[-2].event.delta.type == "tool_call"
assert chunks[-2].event.delta.tool_call.tool_name == mock_tool_name
- assert chunks[-2].event.delta.tool_call.arguments == {}
+ assert chunks[-2].event.delta.tool_call.arguments == "{}"
async def test_health_status_success(vllm_inference_adapter):
diff --git a/tests/unit/providers/inline/agents/meta_reference/responses/test_streaming.py b/tests/unit/providers/inline/agents/meta_reference/responses/test_streaming.py
index 6fda2b508..4b706717d 100644
--- a/tests/unit/providers/inline/agents/meta_reference/responses/test_streaming.py
+++ b/tests/unit/providers/inline/agents/meta_reference/responses/test_streaming.py
@@ -4,7 +4,7 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
-from llama_stack.apis.tools import ToolDef, ToolParameter
+from llama_stack.apis.tools import ToolDef
from llama_stack.providers.inline.agents.meta_reference.responses.streaming import (
convert_tooldef_to_chat_tool,
)
@@ -20,15 +20,11 @@ def test_convert_tooldef_to_chat_tool_preserves_items_field():
tool_def = ToolDef(
name="test_tool",
description="A test tool with array parameter",
- parameters=[
- ToolParameter(
- name="tags",
- parameter_type="array",
- description="List of tags",
- required=True,
- items={"type": "string"},
- )
- ],
+ input_schema={
+ "type": "object",
+ "properties": {"tags": {"type": "array", "description": "List of tags", "items": {"type": "string"}}},
+ "required": ["tags"],
+ },
)
result = convert_tooldef_to_chat_tool(tool_def)
diff --git a/tests/unit/providers/utils/inference/test_openai_compat.py b/tests/unit/providers/utils/inference/test_openai_compat.py
index ddc70e102..c200c4395 100644
--- a/tests/unit/providers/utils/inference/test_openai_compat.py
+++ b/tests/unit/providers/utils/inference/test_openai_compat.py
@@ -41,9 +41,7 @@ async def test_convert_message_to_openai_dict():
async def test_convert_message_to_openai_dict_with_tool_call():
message = CompletionMessage(
content="",
- tool_calls=[
- ToolCall(call_id="123", tool_name="test_tool", arguments_json='{"foo": "bar"}', arguments={"foo": "bar"})
- ],
+ tool_calls=[ToolCall(call_id="123", tool_name="test_tool", arguments='{"foo": "bar"}')],
stop_reason=StopReason.end_of_turn,
)
@@ -65,8 +63,7 @@ async def test_convert_message_to_openai_dict_with_builtin_tool_call():
ToolCall(
call_id="123",
tool_name=BuiltinTool.brave_search,
- arguments_json='{"foo": "bar"}',
- arguments={"foo": "bar"},
+ arguments='{"foo": "bar"}',
)
],
stop_reason=StopReason.end_of_turn,
@@ -202,8 +199,7 @@ async def test_convert_message_to_openai_dict_new_completion_message_with_tool_c
ToolCall(
call_id="call_123",
tool_name="get_weather",
- arguments={"city": "Sligo"},
- arguments_json='{"city": "Sligo"}',
+ arguments='{"city": "Sligo"}',
)
],
stop_reason=StopReason.end_of_turn,
diff --git a/tests/unit/providers/utils/test_openai_compat_conversion.py b/tests/unit/providers/utils/test_openai_compat_conversion.py
new file mode 100644
index 000000000..2681068f1
--- /dev/null
+++ b/tests/unit/providers/utils/test_openai_compat_conversion.py
@@ -0,0 +1,381 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
+"""
+Unit tests for OpenAI compatibility tool conversion.
+Tests convert_tooldef_to_openai_tool with new JSON Schema approach.
+"""
+
+from llama_stack.models.llama.datatypes import BuiltinTool, ToolDefinition
+from llama_stack.providers.utils.inference.openai_compat import convert_tooldef_to_openai_tool
+
+
+class TestSimpleSchemaConversion:
+ """Test basic schema conversions to OpenAI format."""
+
+ def test_simple_tool_conversion(self):
+ """Test conversion of simple tool with basic input schema."""
+ tool = ToolDefinition(
+ tool_name="get_weather",
+ description="Get weather information",
+ input_schema={
+ "type": "object",
+ "properties": {"location": {"type": "string", "description": "City name"}},
+ "required": ["location"],
+ },
+ )
+
+ result = convert_tooldef_to_openai_tool(tool)
+
+ # Check OpenAI structure
+ assert result["type"] == "function"
+ assert "function" in result
+
+ function = result["function"]
+ assert function["name"] == "get_weather"
+ assert function["description"] == "Get weather information"
+
+ # Check parameters are passed through
+ assert "parameters" in function
+ assert function["parameters"] == tool.input_schema
+ assert function["parameters"]["type"] == "object"
+ assert "location" in function["parameters"]["properties"]
+
+ def test_tool_without_description(self):
+ """Test tool conversion without description."""
+ tool = ToolDefinition(tool_name="test_tool", input_schema={"type": "object", "properties": {}})
+
+ result = convert_tooldef_to_openai_tool(tool)
+
+ assert result["function"]["name"] == "test_tool"
+ assert "description" not in result["function"]
+ assert "parameters" in result["function"]
+
+ def test_builtin_tool_conversion(self):
+ """Test conversion of BuiltinTool enum."""
+ tool = ToolDefinition(
+ tool_name=BuiltinTool.code_interpreter,
+ description="Run Python code",
+ input_schema={"type": "object", "properties": {"code": {"type": "string"}}},
+ )
+
+ result = convert_tooldef_to_openai_tool(tool)
+
+ # BuiltinTool should be converted to its value
+ assert result["function"]["name"] == "code_interpreter"
+
+
+class TestComplexSchemaConversion:
+ """Test conversion of complex JSON Schema features."""
+
+ def test_schema_with_refs_and_defs(self):
+ """Test that $ref and $defs are passed through to OpenAI."""
+ tool = ToolDefinition(
+ tool_name="book_flight",
+ description="Book a flight",
+ input_schema={
+ "type": "object",
+ "properties": {
+ "flight": {"$ref": "#/$defs/FlightInfo"},
+ "passengers": {"type": "array", "items": {"$ref": "#/$defs/Passenger"}},
+ "payment": {"$ref": "#/$defs/Payment"},
+ },
+ "required": ["flight", "passengers", "payment"],
+ "$defs": {
+ "FlightInfo": {
+ "type": "object",
+ "properties": {
+ "from": {"type": "string", "description": "Departure airport"},
+ "to": {"type": "string", "description": "Arrival airport"},
+ "date": {"type": "string", "format": "date"},
+ },
+ "required": ["from", "to", "date"],
+ },
+ "Passenger": {
+ "type": "object",
+ "properties": {"name": {"type": "string"}, "age": {"type": "integer", "minimum": 0}},
+ "required": ["name", "age"],
+ },
+ "Payment": {
+ "type": "object",
+ "properties": {
+ "method": {"type": "string", "enum": ["credit_card", "debit_card"]},
+ "amount": {"type": "number", "minimum": 0},
+ },
+ },
+ },
+ },
+ )
+
+ result = convert_tooldef_to_openai_tool(tool)
+
+ params = result["function"]["parameters"]
+
+ # Verify $defs are preserved
+ assert "$defs" in params
+ assert "FlightInfo" in params["$defs"]
+ assert "Passenger" in params["$defs"]
+ assert "Payment" in params["$defs"]
+
+ # Verify $ref are preserved
+ assert params["properties"]["flight"]["$ref"] == "#/$defs/FlightInfo"
+ assert params["properties"]["passengers"]["items"]["$ref"] == "#/$defs/Passenger"
+ assert params["properties"]["payment"]["$ref"] == "#/$defs/Payment"
+
+ # Verify nested schema details are preserved
+ assert params["$defs"]["FlightInfo"]["properties"]["date"]["format"] == "date"
+ assert params["$defs"]["Passenger"]["properties"]["age"]["minimum"] == 0
+ assert params["$defs"]["Payment"]["properties"]["method"]["enum"] == ["credit_card", "debit_card"]
+
+ def test_anyof_schema_conversion(self):
+ """Test conversion of anyOf schemas."""
+ tool = ToolDefinition(
+ tool_name="flexible_input",
+ input_schema={
+ "type": "object",
+ "properties": {
+ "contact": {
+ "anyOf": [
+ {"type": "string", "format": "email"},
+ {"type": "string", "pattern": "^\\+?[0-9]{10,15}$"},
+ ],
+ "description": "Email or phone number",
+ }
+ },
+ },
+ )
+
+ result = convert_tooldef_to_openai_tool(tool)
+
+ contact_schema = result["function"]["parameters"]["properties"]["contact"]
+ assert "anyOf" in contact_schema
+ assert len(contact_schema["anyOf"]) == 2
+ assert contact_schema["anyOf"][0]["format"] == "email"
+ assert "pattern" in contact_schema["anyOf"][1]
+
+ def test_nested_objects_conversion(self):
+ """Test conversion of deeply nested objects."""
+ tool = ToolDefinition(
+ tool_name="nested_data",
+ input_schema={
+ "type": "object",
+ "properties": {
+ "user": {
+ "type": "object",
+ "properties": {
+ "profile": {
+ "type": "object",
+ "properties": {
+ "name": {"type": "string"},
+ "settings": {
+ "type": "object",
+ "properties": {"theme": {"type": "string", "enum": ["light", "dark"]}},
+ },
+ },
+ }
+ },
+ }
+ },
+ },
+ )
+
+ result = convert_tooldef_to_openai_tool(tool)
+
+ # Navigate deep structure
+ user_schema = result["function"]["parameters"]["properties"]["user"]
+ profile_schema = user_schema["properties"]["profile"]
+ settings_schema = profile_schema["properties"]["settings"]
+ theme_schema = settings_schema["properties"]["theme"]
+
+ assert theme_schema["enum"] == ["light", "dark"]
+
+ def test_array_schemas_with_constraints(self):
+ """Test conversion of array schemas with constraints."""
+ tool = ToolDefinition(
+ tool_name="list_processor",
+ input_schema={
+ "type": "object",
+ "properties": {
+ "items": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {"id": {"type": "integer"}, "name": {"type": "string"}},
+ "required": ["id"],
+ },
+ "minItems": 1,
+ "maxItems": 100,
+ "uniqueItems": True,
+ }
+ },
+ },
+ )
+
+ result = convert_tooldef_to_openai_tool(tool)
+
+ items_schema = result["function"]["parameters"]["properties"]["items"]
+ assert items_schema["type"] == "array"
+ assert items_schema["minItems"] == 1
+ assert items_schema["maxItems"] == 100
+ assert items_schema["uniqueItems"] is True
+ assert items_schema["items"]["type"] == "object"
+
+
+class TestOutputSchemaHandling:
+ """Test that output_schema is correctly handled (or dropped) for OpenAI."""
+
+ def test_output_schema_is_dropped(self):
+ """Test that output_schema is NOT included in OpenAI format (API limitation)."""
+ tool = ToolDefinition(
+ tool_name="calculator",
+ description="Perform calculation",
+ input_schema={"type": "object", "properties": {"x": {"type": "number"}, "y": {"type": "number"}}},
+ output_schema={"type": "object", "properties": {"result": {"type": "number"}}, "required": ["result"]},
+ )
+
+ result = convert_tooldef_to_openai_tool(tool)
+
+ # OpenAI doesn't support output schema
+ assert "outputSchema" not in result["function"]
+ assert "responseSchema" not in result["function"]
+ assert "output_schema" not in result["function"]
+
+ # But input schema should be present
+ assert "parameters" in result["function"]
+ assert result["function"]["parameters"] == tool.input_schema
+
+ def test_only_output_schema_no_input(self):
+ """Test tool with only output_schema (unusual but valid)."""
+ tool = ToolDefinition(
+ tool_name="no_input_tool",
+ description="Tool with no inputs",
+ output_schema={"type": "object", "properties": {"timestamp": {"type": "string"}}},
+ )
+
+ result = convert_tooldef_to_openai_tool(tool)
+
+ # No parameters should be set if input_schema is None
+ # (or we might set an empty object schema - implementation detail)
+ assert "outputSchema" not in result["function"]
+
+
+class TestEdgeCases:
+ """Test edge cases and error conditions."""
+
+ def test_tool_with_no_schemas(self):
+ """Test tool with neither input nor output schema."""
+ tool = ToolDefinition(tool_name="schemaless_tool", description="Tool without schemas")
+
+ result = convert_tooldef_to_openai_tool(tool)
+
+ assert result["function"]["name"] == "schemaless_tool"
+ assert result["function"]["description"] == "Tool without schemas"
+ # Implementation detail: might have no parameters or empty object
+
+ def test_empty_input_schema(self):
+ """Test tool with empty object schema."""
+ tool = ToolDefinition(tool_name="no_params", input_schema={"type": "object", "properties": {}})
+
+ result = convert_tooldef_to_openai_tool(tool)
+
+ assert result["function"]["parameters"]["type"] == "object"
+ assert result["function"]["parameters"]["properties"] == {}
+
+ def test_schema_with_additional_properties(self):
+ """Test that additionalProperties is preserved."""
+ tool = ToolDefinition(
+ tool_name="flexible_tool",
+ input_schema={
+ "type": "object",
+ "properties": {"known_field": {"type": "string"}},
+ "additionalProperties": True,
+ },
+ )
+
+ result = convert_tooldef_to_openai_tool(tool)
+
+ assert result["function"]["parameters"]["additionalProperties"] is True
+
+ def test_schema_with_pattern_properties(self):
+ """Test that patternProperties is preserved."""
+ tool = ToolDefinition(
+ tool_name="pattern_tool",
+ input_schema={"type": "object", "patternProperties": {"^[a-z]+$": {"type": "string"}}},
+ )
+
+ result = convert_tooldef_to_openai_tool(tool)
+
+ assert "patternProperties" in result["function"]["parameters"]
+
+ def test_schema_identity(self):
+ """Test that converted schema is identical to input (no lossy conversion)."""
+ original_schema = {
+ "type": "object",
+ "properties": {"complex": {"$ref": "#/$defs/Complex"}},
+ "$defs": {
+ "Complex": {
+ "type": "object",
+ "properties": {"nested": {"anyOf": [{"type": "string"}, {"type": "number"}]}},
+ }
+ },
+ "required": ["complex"],
+ "additionalProperties": False,
+ }
+
+ tool = ToolDefinition(tool_name="test", input_schema=original_schema)
+
+ result = convert_tooldef_to_openai_tool(tool)
+
+ # Converted parameters should be EXACTLY the same as input
+ assert result["function"]["parameters"] == original_schema
+
+
+class TestConversionConsistency:
+ """Test consistency across multiple conversions."""
+
+ def test_multiple_tools_with_shared_defs(self):
+ """Test converting multiple tools that could share definitions."""
+ tool1 = ToolDefinition(
+ tool_name="tool1",
+ input_schema={
+ "type": "object",
+ "properties": {"data": {"$ref": "#/$defs/Data"}},
+ "$defs": {"Data": {"type": "object", "properties": {"x": {"type": "number"}}}},
+ },
+ )
+
+ tool2 = ToolDefinition(
+ tool_name="tool2",
+ input_schema={
+ "type": "object",
+ "properties": {"info": {"$ref": "#/$defs/Data"}},
+ "$defs": {"Data": {"type": "object", "properties": {"y": {"type": "string"}}}},
+ },
+ )
+
+ result1 = convert_tooldef_to_openai_tool(tool1)
+ result2 = convert_tooldef_to_openai_tool(tool2)
+
+ # Each tool maintains its own $defs independently
+ assert result1["function"]["parameters"]["$defs"]["Data"]["properties"]["x"]["type"] == "number"
+ assert result2["function"]["parameters"]["$defs"]["Data"]["properties"]["y"]["type"] == "string"
+
+ def test_conversion_is_pure(self):
+ """Test that conversion doesn't modify the original tool."""
+ original_schema = {
+ "type": "object",
+ "properties": {"x": {"type": "string"}},
+ "$defs": {"T": {"type": "number"}},
+ }
+
+ tool = ToolDefinition(tool_name="test", input_schema=original_schema.copy())
+
+ # Convert
+ convert_tooldef_to_openai_tool(tool)
+
+ # Original tool should be unchanged
+ assert tool.input_schema == original_schema
+ assert "$defs" in tool.input_schema
diff --git a/tests/unit/tools/test_tools_json_schema.py b/tests/unit/tools/test_tools_json_schema.py
new file mode 100644
index 000000000..8fe3103bc
--- /dev/null
+++ b/tests/unit/tools/test_tools_json_schema.py
@@ -0,0 +1,297 @@
+# Copyright (c) Meta Platforms, Inc. and affiliates.
+# All rights reserved.
+#
+# This source code is licensed under the terms described in the LICENSE file in
+# the root directory of this source tree.
+
+"""
+Unit tests for JSON Schema-based tool definitions.
+Tests the new input_schema and output_schema fields.
+"""
+
+from pydantic import ValidationError
+
+from llama_stack.apis.tools import ToolDef
+from llama_stack.models.llama.datatypes import BuiltinTool, ToolDefinition
+
+
+class TestToolDefValidation:
+ """Test ToolDef validation with JSON Schema."""
+
+ def test_simple_input_schema(self):
+ """Test ToolDef with simple input schema."""
+ tool = ToolDef(
+ name="get_weather",
+ description="Get weather information",
+ input_schema={
+ "type": "object",
+ "properties": {"location": {"type": "string", "description": "City name"}},
+ "required": ["location"],
+ },
+ )
+
+ assert tool.name == "get_weather"
+ assert tool.input_schema["type"] == "object"
+ assert "location" in tool.input_schema["properties"]
+ assert tool.output_schema is None
+
+ def test_input_and_output_schema(self):
+ """Test ToolDef with both input and output schemas."""
+ tool = ToolDef(
+ name="calculate",
+ description="Perform calculation",
+ input_schema={
+ "type": "object",
+ "properties": {"x": {"type": "number"}, "y": {"type": "number"}},
+ "required": ["x", "y"],
+ },
+ output_schema={"type": "object", "properties": {"result": {"type": "number"}}, "required": ["result"]},
+ )
+
+ assert tool.input_schema is not None
+ assert tool.output_schema is not None
+ assert "result" in tool.output_schema["properties"]
+
+ def test_schema_with_refs_and_defs(self):
+ """Test that $ref and $defs are preserved in schemas."""
+ tool = ToolDef(
+ name="book_flight",
+ description="Book a flight",
+ input_schema={
+ "type": "object",
+ "properties": {
+ "flight": {"$ref": "#/$defs/FlightInfo"},
+ "passengers": {"type": "array", "items": {"$ref": "#/$defs/Passenger"}},
+ },
+ "$defs": {
+ "FlightInfo": {
+ "type": "object",
+ "properties": {"from": {"type": "string"}, "to": {"type": "string"}},
+ },
+ "Passenger": {
+ "type": "object",
+ "properties": {"name": {"type": "string"}, "age": {"type": "integer"}},
+ },
+ },
+ },
+ )
+
+ # Verify $defs are preserved
+ assert "$defs" in tool.input_schema
+ assert "FlightInfo" in tool.input_schema["$defs"]
+ assert "Passenger" in tool.input_schema["$defs"]
+
+ # Verify $ref are preserved
+ assert tool.input_schema["properties"]["flight"]["$ref"] == "#/$defs/FlightInfo"
+ assert tool.input_schema["properties"]["passengers"]["items"]["$ref"] == "#/$defs/Passenger"
+
+ def test_output_schema_with_refs(self):
+ """Test that output_schema also supports $ref and $defs."""
+ tool = ToolDef(
+ name="search",
+ description="Search for items",
+ input_schema={"type": "object", "properties": {"query": {"type": "string"}}},
+ output_schema={
+ "type": "object",
+ "properties": {"results": {"type": "array", "items": {"$ref": "#/$defs/SearchResult"}}},
+ "$defs": {
+ "SearchResult": {
+ "type": "object",
+ "properties": {"title": {"type": "string"}, "score": {"type": "number"}},
+ }
+ },
+ },
+ )
+
+ assert "$defs" in tool.output_schema
+ assert "SearchResult" in tool.output_schema["$defs"]
+
+ def test_complex_json_schema_features(self):
+ """Test various JSON Schema features are preserved."""
+ tool = ToolDef(
+ name="complex_tool",
+ description="Tool with complex schema",
+ input_schema={
+ "type": "object",
+ "properties": {
+ # anyOf
+ "contact": {
+ "anyOf": [
+ {"type": "string", "format": "email"},
+ {"type": "string", "pattern": "^\\+?[0-9]{10,15}$"},
+ ]
+ },
+ # enum
+ "status": {"type": "string", "enum": ["pending", "approved", "rejected"]},
+ # nested objects
+ "address": {
+ "type": "object",
+ "properties": {
+ "street": {"type": "string"},
+ "city": {"type": "string"},
+ "zipcode": {"type": "string", "pattern": "^[0-9]{5}$"},
+ },
+ "required": ["street", "city"],
+ },
+ # array with constraints
+ "tags": {
+ "type": "array",
+ "items": {"type": "string"},
+ "minItems": 1,
+ "maxItems": 10,
+ "uniqueItems": True,
+ },
+ },
+ },
+ )
+
+ # Verify anyOf
+ assert "anyOf" in tool.input_schema["properties"]["contact"]
+
+ # Verify enum
+ assert tool.input_schema["properties"]["status"]["enum"] == ["pending", "approved", "rejected"]
+
+ # Verify nested object
+ assert tool.input_schema["properties"]["address"]["type"] == "object"
+ assert "zipcode" in tool.input_schema["properties"]["address"]["properties"]
+
+ # Verify array constraints
+ tags_schema = tool.input_schema["properties"]["tags"]
+ assert tags_schema["minItems"] == 1
+ assert tags_schema["maxItems"] == 10
+ assert tags_schema["uniqueItems"] is True
+
+ def test_invalid_json_schema_raises_error(self):
+ """Test that invalid JSON Schema raises validation error."""
+ # TODO: This test will pass once we add schema validation
+ # For now, Pydantic accepts any dict, so this is a placeholder
+
+ # This should eventually raise an error due to invalid schema
+ try:
+ ToolDef(
+ name="bad_tool",
+ input_schema={
+ "type": "invalid_type", # Not a valid JSON Schema type
+ "properties": "not_an_object", # Should be an object
+ },
+ )
+ # For now this passes, but shouldn't after we add validation
+ except ValidationError:
+ pass # Expected once validation is added
+
+
+class TestToolDefinitionValidation:
+ """Test ToolDefinition (internal) validation with JSON Schema."""
+
+ def test_simple_tool_definition(self):
+ """Test ToolDefinition with simple schema."""
+ tool = ToolDefinition(
+ tool_name="get_time",
+ description="Get current time",
+ input_schema={"type": "object", "properties": {"timezone": {"type": "string"}}},
+ )
+
+ assert tool.tool_name == "get_time"
+ assert tool.input_schema is not None
+
+ def test_builtin_tool_with_schema(self):
+ """Test ToolDefinition with BuiltinTool enum."""
+ tool = ToolDefinition(
+ tool_name=BuiltinTool.code_interpreter,
+ description="Run Python code",
+ input_schema={"type": "object", "properties": {"code": {"type": "string"}}, "required": ["code"]},
+ output_schema={"type": "object", "properties": {"output": {"type": "string"}, "error": {"type": "string"}}},
+ )
+
+ assert isinstance(tool.tool_name, BuiltinTool)
+ assert tool.input_schema is not None
+ assert tool.output_schema is not None
+
+ def test_tool_definition_with_refs(self):
+ """Test ToolDefinition preserves $ref/$defs."""
+ tool = ToolDefinition(
+ tool_name="process_data",
+ input_schema={
+ "type": "object",
+ "properties": {"data": {"$ref": "#/$defs/DataObject"}},
+ "$defs": {
+ "DataObject": {
+ "type": "object",
+ "properties": {
+ "id": {"type": "integer"},
+ "values": {"type": "array", "items": {"type": "number"}},
+ },
+ }
+ },
+ },
+ )
+
+ assert "$defs" in tool.input_schema
+ assert tool.input_schema["properties"]["data"]["$ref"] == "#/$defs/DataObject"
+
+
+class TestSchemaEquivalence:
+ """Test that schemas remain unchanged through serialization."""
+
+ def test_schema_roundtrip(self):
+ """Test that schemas survive model_dump/model_validate roundtrip."""
+ original = ToolDef(
+ name="test",
+ input_schema={
+ "type": "object",
+ "properties": {"x": {"$ref": "#/$defs/X"}},
+ "$defs": {"X": {"type": "string"}},
+ },
+ )
+
+ # Serialize and deserialize
+ dumped = original.model_dump()
+ restored = ToolDef(**dumped)
+
+ # Schemas should be identical
+ assert restored.input_schema == original.input_schema
+ assert "$defs" in restored.input_schema
+ assert restored.input_schema["properties"]["x"]["$ref"] == "#/$defs/X"
+
+ def test_json_serialization(self):
+ """Test JSON serialization preserves schema."""
+ import json
+
+ tool = ToolDef(
+ name="test",
+ input_schema={
+ "type": "object",
+ "properties": {"a": {"type": "string"}},
+ "$defs": {"T": {"type": "number"}},
+ },
+ output_schema={"type": "object", "properties": {"b": {"$ref": "#/$defs/T"}}},
+ )
+
+ # Serialize to JSON and back
+ json_str = tool.model_dump_json()
+ parsed = json.loads(json_str)
+ restored = ToolDef(**parsed)
+
+ assert restored.input_schema == tool.input_schema
+ assert restored.output_schema == tool.output_schema
+ assert "$defs" in restored.input_schema
+
+
+class TestBackwardsCompatibility:
+ """Test handling of legacy code patterns."""
+
+ def test_none_schemas(self):
+ """Test tools with no schemas (legacy case)."""
+ tool = ToolDef(name="legacy_tool", description="Tool without schemas", input_schema=None, output_schema=None)
+
+ assert tool.input_schema is None
+ assert tool.output_schema is None
+
+ def test_metadata_preserved(self):
+ """Test that metadata field still works."""
+ tool = ToolDef(
+ name="test", input_schema={"type": "object"}, metadata={"endpoint": "http://example.com", "version": "1.0"}
+ )
+
+ assert tool.metadata["endpoint"] == "http://example.com"
+ assert tool.metadata["version"] == "1.0"