From d32d8ec94b2fc664e72d508c7e0de1966bba623e Mon Sep 17 00:00:00 2001 From: Raghotham Murthy Date: Thu, 2 Oct 2025 11:17:41 -0700 Subject: [PATCH 01/13] feat: Add support for memory-only kvstore --- llama_stack/providers/utils/kvstore/config.py | 2 +- .../providers/utils/kvstore/sqlite/sqlite.py | 106 ++++++++------- .../unit/utils/kvstore/test_sqlite_memory.py | 121 ++++++++++++++++++ 3 files changed, 181 insertions(+), 48 deletions(-) create mode 100644 tests/unit/utils/kvstore/test_sqlite_memory.py diff --git a/llama_stack/providers/utils/kvstore/config.py b/llama_stack/providers/utils/kvstore/config.py index 7b6a79350..4899bbc80 100644 --- a/llama_stack/providers/utils/kvstore/config.py +++ b/llama_stack/providers/utils/kvstore/config.py @@ -53,7 +53,7 @@ class SqliteKVStoreConfig(CommonConfig): type: Literal["sqlite"] = KVStoreType.sqlite.value db_path: str = Field( default=(RUNTIME_BASE_DIR / "kvstore.db").as_posix(), - description="File path for the sqlite database", + description="File path for the sqlite database. Use ':memory:' for an in-memory database", ) @classmethod diff --git a/llama_stack/providers/utils/kvstore/sqlite/sqlite.py b/llama_stack/providers/utils/kvstore/sqlite/sqlite.py index 5b782902e..5372d2981 100644 --- a/llama_stack/providers/utils/kvstore/sqlite/sqlite.py +++ b/llama_stack/providers/utils/kvstore/sqlite/sqlite.py @@ -21,67 +21,79 @@ class SqliteKVStoreImpl(KVStore): def __init__(self, config: SqliteKVStoreConfig): self.db_path = config.db_path self.table_name = "kvstore" + self._conn = None def __str__(self): return f"SqliteKVStoreImpl(db_path={self.db_path}, table_name={self.table_name})" + def _is_memory_db(self) -> bool: + """Check if this is an in-memory database.""" + return self.db_path == ":memory:" or "mode=memory" in self.db_path + async def initialize(self): - os.makedirs(os.path.dirname(self.db_path), exist_ok=True) - async with aiosqlite.connect(self.db_path) as db: - await db.execute( - f""" - CREATE TABLE IF NOT EXISTS {self.table_name} ( - key TEXT PRIMARY KEY, - value TEXT, - expiration TIMESTAMP - ) - """ + # Skip directory creation for in-memory databases and file: URIs + if not self._is_memory_db() and not self.db_path.startswith("file:"): + db_dir = os.path.dirname(self.db_path) + if db_dir: # Only create if there's a directory component + os.makedirs(db_dir, exist_ok=True) + + # Create persistent connection for all databases + self._conn = await aiosqlite.connect(self.db_path) + await self._conn.execute( + f""" + CREATE TABLE IF NOT EXISTS {self.table_name} ( + key TEXT PRIMARY KEY, + value TEXT, + expiration TIMESTAMP ) - await db.commit() + """ + ) + await self._conn.commit() + + async def close(self): + """Close the persistent connection.""" + if self._conn: + await self._conn.close() + self._conn = None async def set(self, key: str, value: str, expiration: datetime | None = None) -> None: - async with aiosqlite.connect(self.db_path) as db: - await db.execute( - f"INSERT OR REPLACE INTO {self.table_name} (key, value, expiration) VALUES (?, ?, ?)", - (key, value, expiration), - ) - await db.commit() + await self._conn.execute( + f"INSERT OR REPLACE INTO {self.table_name} (key, value, expiration) VALUES (?, ?, ?)", + (key, value, expiration), + ) + await self._conn.commit() async def get(self, key: str) -> str | None: - async with aiosqlite.connect(self.db_path) as db: - async with db.execute(f"SELECT value, expiration FROM {self.table_name} WHERE key = ?", (key,)) as cursor: - row = await cursor.fetchone() - if row is None: - return None - value, expiration = row - if not isinstance(value, str): - logger.warning(f"Expected string value for key {key}, got {type(value)}, returning None") - return None - return value + async with self._conn.execute(f"SELECT value, expiration FROM {self.table_name} WHERE key = ?", (key,)) as cursor: + row = await cursor.fetchone() + if row is None: + return None + value, expiration = row + if not isinstance(value, str): + logger.warning(f"Expected string value for key {key}, got {type(value)}, returning None") + return None + return value async def delete(self, key: str) -> None: - async with aiosqlite.connect(self.db_path) as db: - await db.execute(f"DELETE FROM {self.table_name} WHERE key = ?", (key,)) - await db.commit() + await self._conn.execute(f"DELETE FROM {self.table_name} WHERE key = ?", (key,)) + await self._conn.commit() async def values_in_range(self, start_key: str, end_key: str) -> list[str]: - async with aiosqlite.connect(self.db_path) as db: - async with db.execute( - f"SELECT key, value, expiration FROM {self.table_name} WHERE key >= ? AND key <= ?", - (start_key, end_key), - ) as cursor: - result = [] - async for row in cursor: - _, value, _ = row - result.append(value) - return result + async with self._conn.execute( + f"SELECT key, value, expiration FROM {self.table_name} WHERE key >= ? AND key <= ?", + (start_key, end_key), + ) as cursor: + result = [] + async for row in cursor: + _, value, _ = row + result.append(value) + return result async def keys_in_range(self, start_key: str, end_key: str) -> list[str]: """Get all keys in the given range.""" - async with aiosqlite.connect(self.db_path) as db: - cursor = await db.execute( - f"SELECT key FROM {self.table_name} WHERE key >= ? AND key <= ?", - (start_key, end_key), - ) - rows = await cursor.fetchall() - return [row[0] for row in rows] + cursor = await self._conn.execute( + f"SELECT key FROM {self.table_name} WHERE key >= ? AND key <= ?", + (start_key, end_key), + ) + rows = await cursor.fetchall() + return [row[0] for row in rows] diff --git a/tests/unit/utils/kvstore/test_sqlite_memory.py b/tests/unit/utils/kvstore/test_sqlite_memory.py new file mode 100644 index 000000000..8017f9bdd --- /dev/null +++ b/tests/unit/utils/kvstore/test_sqlite_memory.py @@ -0,0 +1,121 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + +import pytest + +from llama_stack.providers.utils.kvstore.config import SqliteKVStoreConfig +from llama_stack.providers.utils.kvstore.sqlite import SqliteKVStoreImpl + + +@pytest.mark.asyncio +async def test_memory_kvstore_basic_operations(): + """Test basic CRUD operations with :memory: database.""" + config = SqliteKVStoreConfig(db_path=":memory:") + kvstore = SqliteKVStoreImpl(config) + await kvstore.initialize() + + try: + # Test set and get + await kvstore.set("test_key", "test_value") + value = await kvstore.get("test_key") + assert value == "test_value" + + # Test get non-existent key + value = await kvstore.get("non_existent") + assert value is None + + # Test delete + await kvstore.delete("test_key") + value = await kvstore.get("test_key") + assert value is None + finally: + await kvstore.close() + + +@pytest.mark.asyncio +async def test_memory_kvstore_range_operations(): + """Test range operations with :memory: database.""" + config = SqliteKVStoreConfig(db_path=":memory:") + kvstore = SqliteKVStoreImpl(config) + await kvstore.initialize() + + try: + # Set multiple keys + await kvstore.set("key_1", "value_1") + await kvstore.set("key_2", "value_2") + await kvstore.set("key_3", "value_3") + await kvstore.set("key_4", "value_4") + + # Test values_in_range + values = await kvstore.values_in_range("key_1", "key_3") + assert len(values) == 3 + assert "value_1" in values + assert "value_2" in values + assert "value_3" in values + + # Test keys_in_range + keys = await kvstore.keys_in_range("key_2", "key_4") + assert len(keys) == 3 + assert "key_2" in keys + assert "key_3" in keys + assert "key_4" in keys + finally: + await kvstore.close() + + +@pytest.mark.asyncio +async def test_memory_kvstore_multiple_instances(): + """Test that multiple :memory: instances are independent.""" + config1 = SqliteKVStoreConfig(db_path=":memory:") + kvstore1 = SqliteKVStoreImpl(config1) + await kvstore1.initialize() + + config2 = SqliteKVStoreConfig(db_path=":memory:") + kvstore2 = SqliteKVStoreImpl(config2) + await kvstore2.initialize() + + try: + # Set value in first instance + await kvstore1.set("shared_key", "value_1") + + # Verify second instance doesn't have the value + value = await kvstore2.get("shared_key") + assert value is None + + # Set different value in second instance + await kvstore2.set("shared_key", "value_2") + + # Verify instances remain independent + value1 = await kvstore1.get("shared_key") + value2 = await kvstore2.get("shared_key") + assert value1 == "value_1" + assert value2 == "value_2" + finally: + await kvstore1.close() + await kvstore2.close() + + +@pytest.mark.asyncio +async def test_memory_kvstore_persistence_behavior(): + """Test that :memory: database doesn't persist across instances.""" + config = SqliteKVStoreConfig(db_path=":memory:") + + # First instance + kvstore1 = SqliteKVStoreImpl(config) + await kvstore1.initialize() + await kvstore1.set("test_key", "test_value") + await kvstore1.close() + + # Create new instance with same config + kvstore2 = SqliteKVStoreImpl(config) + await kvstore2.initialize() + + try: + # Data should not persist + value = await kvstore2.get("test_key") + assert value is None + finally: + await kvstore2.close() From 82ca72bde629a8dee452a8cdf27698feab94d3bf Mon Sep 17 00:00:00 2001 From: Raghotham Murthy Date: Thu, 2 Oct 2025 11:34:10 -0700 Subject: [PATCH 02/13] fix precommit errors --- llama_stack/providers/utils/kvstore/sqlite/sqlite.py | 11 +++++++++-- tests/unit/utils/kvstore/test_sqlite_memory.py | 5 ----- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/llama_stack/providers/utils/kvstore/sqlite/sqlite.py b/llama_stack/providers/utils/kvstore/sqlite/sqlite.py index 5372d2981..e1c0332fe 100644 --- a/llama_stack/providers/utils/kvstore/sqlite/sqlite.py +++ b/llama_stack/providers/utils/kvstore/sqlite/sqlite.py @@ -21,7 +21,7 @@ class SqliteKVStoreImpl(KVStore): def __init__(self, config: SqliteKVStoreConfig): self.db_path = config.db_path self.table_name = "kvstore" - self._conn = None + self._conn: aiosqlite.Connection | None = None def __str__(self): return f"SqliteKVStoreImpl(db_path={self.db_path}, table_name={self.table_name})" @@ -57,6 +57,7 @@ class SqliteKVStoreImpl(KVStore): self._conn = None async def set(self, key: str, value: str, expiration: datetime | None = None) -> None: + assert self._conn is not None, "Connection not initialized. Call initialize() first." await self._conn.execute( f"INSERT OR REPLACE INTO {self.table_name} (key, value, expiration) VALUES (?, ?, ?)", (key, value, expiration), @@ -64,7 +65,10 @@ class SqliteKVStoreImpl(KVStore): await self._conn.commit() async def get(self, key: str) -> str | None: - async with self._conn.execute(f"SELECT value, expiration FROM {self.table_name} WHERE key = ?", (key,)) as cursor: + assert self._conn is not None, "Connection not initialized. Call initialize() first." + async with self._conn.execute( + f"SELECT value, expiration FROM {self.table_name} WHERE key = ?", (key,) + ) as cursor: row = await cursor.fetchone() if row is None: return None @@ -75,10 +79,12 @@ class SqliteKVStoreImpl(KVStore): return value async def delete(self, key: str) -> None: + assert self._conn is not None, "Connection not initialized. Call initialize() first." await self._conn.execute(f"DELETE FROM {self.table_name} WHERE key = ?", (key,)) await self._conn.commit() async def values_in_range(self, start_key: str, end_key: str) -> list[str]: + assert self._conn is not None, "Connection not initialized. Call initialize() first." async with self._conn.execute( f"SELECT key, value, expiration FROM {self.table_name} WHERE key >= ? AND key <= ?", (start_key, end_key), @@ -91,6 +97,7 @@ class SqliteKVStoreImpl(KVStore): async def keys_in_range(self, start_key: str, end_key: str) -> list[str]: """Get all keys in the given range.""" + assert self._conn is not None, "Connection not initialized. Call initialize() first." cursor = await self._conn.execute( f"SELECT key FROM {self.table_name} WHERE key >= ? AND key <= ?", (start_key, end_key), diff --git a/tests/unit/utils/kvstore/test_sqlite_memory.py b/tests/unit/utils/kvstore/test_sqlite_memory.py index 8017f9bdd..6d8267ce8 100644 --- a/tests/unit/utils/kvstore/test_sqlite_memory.py +++ b/tests/unit/utils/kvstore/test_sqlite_memory.py @@ -4,13 +4,11 @@ # This source code is licensed under the terms described in the LICENSE file in # the root directory of this source tree. -import pytest from llama_stack.providers.utils.kvstore.config import SqliteKVStoreConfig from llama_stack.providers.utils.kvstore.sqlite import SqliteKVStoreImpl -@pytest.mark.asyncio async def test_memory_kvstore_basic_operations(): """Test basic CRUD operations with :memory: database.""" config = SqliteKVStoreConfig(db_path=":memory:") @@ -35,7 +33,6 @@ async def test_memory_kvstore_basic_operations(): await kvstore.close() -@pytest.mark.asyncio async def test_memory_kvstore_range_operations(): """Test range operations with :memory: database.""" config = SqliteKVStoreConfig(db_path=":memory:") @@ -66,7 +63,6 @@ async def test_memory_kvstore_range_operations(): await kvstore.close() -@pytest.mark.asyncio async def test_memory_kvstore_multiple_instances(): """Test that multiple :memory: instances are independent.""" config1 = SqliteKVStoreConfig(db_path=":memory:") @@ -98,7 +94,6 @@ async def test_memory_kvstore_multiple_instances(): await kvstore2.close() -@pytest.mark.asyncio async def test_memory_kvstore_persistence_behavior(): """Test that :memory: database doesn't persist across instances.""" config = SqliteKVStoreConfig(db_path=":memory:") From 67fa616c51b8427c9b3a0fb91e1f83fb9b787281 Mon Sep 17 00:00:00 2001 From: Charlie Doern Date: Thu, 2 Oct 2025 09:04:26 -0400 Subject: [PATCH 03/13] fix: re-enable conformance skipping ability (#3651) # What does this PR do? this was broken by #3631, re-enable this ability by only using oasdiff when .skip != 'true' Signed-off-by: Charlie Doern --- .github/workflows/conformance.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/conformance.yml b/.github/workflows/conformance.yml index 2dd62a9c4..5bbd53e5f 100644 --- a/.github/workflows/conformance.yml +++ b/.github/workflows/conformance.yml @@ -96,6 +96,7 @@ jobs: # Verify API specs exist for conformance testing - name: Check API Specs + if: steps.skip-check.outputs.skip != 'true' run: | echo "Checking for API specification files..." @@ -134,10 +135,10 @@ jobs: - name: Run OpenAPI Breaking Change Diff if: steps.skip-check.outputs.skip != 'true' run: | - oasdiff breaking --fail-on ERR base/docs/static/llama-stack-spec.yaml docs/static/llama-stack-spec.yaml --match-path '^/v1/' + oasdiff breaking --fail-on ERR $BASE_SPEC $CURRENT_SPEC --match-path '^/v1/' # Report when test is skipped - name: Report skip reason if: steps.skip-check.outputs.skip == 'true' run: | - oasdiff breaking --fail-on ERR $BASE_SPEC $CURRENT_SPEC --match-path '^/v1/' + echo "Conformance test skipped due to breaking change indicator" From 856de662313e38bc449ba7f76c94864440f2bf8e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Han?= Date: Thu, 2 Oct 2025 16:11:05 +0200 Subject: [PATCH 04/13] chore!: add double routes for v1/openai/v1 (#3636) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit So that users get a warning in 0.3.0 and we remove them in 0.4.0. Signed-off-by: Sébastien Han --- docs/static/deprecated-llama-stack-spec.html | 7163 +++++++++++++++++- docs/static/deprecated-llama-stack-spec.yaml | 5435 +++++++++++++ docs/static/llama-stack-spec.html | 42 +- docs/static/llama-stack-spec.yaml | 34 +- llama_stack/apis/agents/agents.py | 14 +- llama_stack/apis/batches/batches.py | 4 + llama_stack/apis/files/files.py | 5 + llama_stack/apis/inference/inference.py | 7 + llama_stack/apis/models/models.py | 8 + llama_stack/apis/safety/safety.py | 1 + llama_stack/apis/vector_io/vector_io.py | 77 + 11 files changed, 12768 insertions(+), 22 deletions(-) diff --git a/docs/static/deprecated-llama-stack-spec.html b/docs/static/deprecated-llama-stack-spec.html index 21ba4a1de..99ce8ee9c 100644 --- a/docs/static/deprecated-llama-stack-spec.html +++ b/docs/static/deprecated-llama-stack-spec.html @@ -1414,6 +1414,1841 @@ "deprecated": true } }, + "/v1/openai/v1/chat/completions": { + "get": { + "responses": { + "200": { + "description": "A ListOpenAIChatCompletionResponse.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ListOpenAIChatCompletionResponse" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "Inference" + ], + "summary": "List all chat completions.", + "description": "List all chat completions.", + "parameters": [ + { + "name": "after", + "in": "query", + "description": "The ID of the last chat completion to return.", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "limit", + "in": "query", + "description": "The maximum number of chat completions to return.", + "required": false, + "schema": { + "type": "integer" + } + }, + { + "name": "model", + "in": "query", + "description": "The model to filter by.", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "order", + "in": "query", + "description": "The order to sort the chat completions by: \"asc\" or \"desc\". Defaults to \"desc\".", + "required": false, + "schema": { + "$ref": "#/components/schemas/Order" + } + } + ], + "deprecated": true + }, + "post": { + "responses": { + "200": { + "description": "An OpenAIChatCompletion.", + "content": { + "application/json": { + "schema": { + "oneOf": [ + { + "$ref": "#/components/schemas/OpenAIChatCompletion" + }, + { + "$ref": "#/components/schemas/OpenAIChatCompletionChunk" + } + ] + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "Inference" + ], + "summary": "Generate an OpenAI-compatible chat completion for the given messages using the specified model.", + "description": "Generate an OpenAI-compatible chat completion for the given messages using the specified model.", + "parameters": [], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenaiChatCompletionRequest" + } + } + }, + "required": true + }, + "deprecated": true + } + }, + "/v1/openai/v1/chat/completions/{completion_id}": { + "get": { + "responses": { + "200": { + "description": "A OpenAICompletionWithInputMessages.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenAICompletionWithInputMessages" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "Inference" + ], + "summary": "Describe a chat completion by its ID.", + "description": "Describe a chat completion by its ID.", + "parameters": [ + { + "name": "completion_id", + "in": "path", + "description": "ID of the chat completion.", + "required": true, + "schema": { + "type": "string" + } + } + ], + "deprecated": true + } + }, + "/v1/openai/v1/completions": { + "post": { + "responses": { + "200": { + "description": "An OpenAICompletion.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenAICompletion" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "Inference" + ], + "summary": "Generate an OpenAI-compatible completion for the given prompt using the specified model.", + "description": "Generate an OpenAI-compatible completion for the given prompt using the specified model.", + "parameters": [], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenaiCompletionRequest" + } + } + }, + "required": true + }, + "deprecated": true + } + }, + "/v1/openai/v1/embeddings": { + "post": { + "responses": { + "200": { + "description": "An OpenAIEmbeddingsResponse containing the embeddings.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenAIEmbeddingsResponse" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "Inference" + ], + "summary": "Generate OpenAI-compatible embeddings for the given input using the specified model.", + "description": "Generate OpenAI-compatible embeddings for the given input using the specified model.", + "parameters": [], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenaiEmbeddingsRequest" + } + } + }, + "required": true + }, + "deprecated": true + } + }, + "/v1/openai/v1/files": { + "get": { + "responses": { + "200": { + "description": "An ListOpenAIFileResponse containing the list of files.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ListOpenAIFileResponse" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "Files" + ], + "summary": "Returns a list of files that belong to the user's organization.", + "description": "Returns a list of files that belong to the user's organization.", + "parameters": [ + { + "name": "after", + "in": "query", + "description": "A cursor for use in pagination. `after` is an object ID that defines your place in the list. For instance, if you make a list request and receive 100 objects, ending with obj_foo, your subsequent call can include after=obj_foo in order to fetch the next page of the list.", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "limit", + "in": "query", + "description": "A limit on the number of objects to be returned. Limit can range between 1 and 10,000, and the default is 10,000.", + "required": false, + "schema": { + "type": "integer" + } + }, + { + "name": "order", + "in": "query", + "description": "Sort order by the `created_at` timestamp of the objects. `asc` for ascending order and `desc` for descending order.", + "required": false, + "schema": { + "$ref": "#/components/schemas/Order" + } + }, + { + "name": "purpose", + "in": "query", + "description": "Only return files with the given purpose.", + "required": false, + "schema": { + "$ref": "#/components/schemas/OpenAIFilePurpose" + } + } + ], + "deprecated": true + }, + "post": { + "responses": { + "200": { + "description": "An OpenAIFileObject representing the uploaded file.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenAIFileObject" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "Files" + ], + "summary": "Upload a file that can be used across various endpoints.", + "description": "Upload a file that can be used across various endpoints.\nThe file upload should be a multipart form request with:\n- file: The File object (not file name) to be uploaded.\n- purpose: The intended purpose of the uploaded file.\n- expires_after: Optional form values describing expiration for the file.", + "parameters": [], + "requestBody": { + "content": { + "multipart/form-data": { + "schema": { + "type": "object", + "properties": { + "file": { + "type": "string", + "format": "binary" + }, + "purpose": { + "$ref": "#/components/schemas/OpenAIFilePurpose" + }, + "expires_after": { + "$ref": "#/components/schemas/ExpiresAfter" + } + }, + "required": [ + "file", + "purpose" + ] + } + } + }, + "required": true + }, + "deprecated": true + } + }, + "/v1/openai/v1/files/{file_id}": { + "get": { + "responses": { + "200": { + "description": "An OpenAIFileObject containing file information.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenAIFileObject" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "Files" + ], + "summary": "Returns information about a specific file.", + "description": "Returns information about a specific file.", + "parameters": [ + { + "name": "file_id", + "in": "path", + "description": "The ID of the file to use for this request.", + "required": true, + "schema": { + "type": "string" + } + } + ], + "deprecated": true + }, + "delete": { + "responses": { + "200": { + "description": "An OpenAIFileDeleteResponse indicating successful deletion.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenAIFileDeleteResponse" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "Files" + ], + "summary": "Delete a file.", + "description": "Delete a file.", + "parameters": [ + { + "name": "file_id", + "in": "path", + "description": "The ID of the file to use for this request.", + "required": true, + "schema": { + "type": "string" + } + } + ], + "deprecated": true + } + }, + "/v1/openai/v1/files/{file_id}/content": { + "get": { + "responses": { + "200": { + "description": "The raw file content as a binary response.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Response" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "Files" + ], + "summary": "Returns the contents of the specified file.", + "description": "Returns the contents of the specified file.", + "parameters": [ + { + "name": "file_id", + "in": "path", + "description": "The ID of the file to use for this request.", + "required": true, + "schema": { + "type": "string" + } + } + ], + "deprecated": true + } + }, + "/v1/openai/v1/models": { + "get": { + "responses": { + "200": { + "description": "A OpenAIListModelsResponse.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenAIListModelsResponse" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "Models" + ], + "summary": "List models using the OpenAI API.", + "description": "List models using the OpenAI API.", + "parameters": [], + "deprecated": true + } + }, + "/v1/openai/v1/moderations": { + "post": { + "responses": { + "200": { + "description": "A moderation object.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ModerationObject" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "Safety" + ], + "summary": "Classifies if text and/or image inputs are potentially harmful.", + "description": "Classifies if text and/or image inputs are potentially harmful.", + "parameters": [], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RunModerationRequest" + } + } + }, + "required": true + }, + "deprecated": true + } + }, + "/v1/openai/v1/responses": { + "get": { + "responses": { + "200": { + "description": "A ListOpenAIResponseObject.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ListOpenAIResponseObject" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "Agents" + ], + "summary": "List all OpenAI responses.", + "description": "List all OpenAI responses.", + "parameters": [ + { + "name": "after", + "in": "query", + "description": "The ID of the last response to return.", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "limit", + "in": "query", + "description": "The number of responses to return.", + "required": false, + "schema": { + "type": "integer" + } + }, + { + "name": "model", + "in": "query", + "description": "The model to filter responses by.", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "order", + "in": "query", + "description": "The order to sort responses by when sorted by created_at ('asc' or 'desc').", + "required": false, + "schema": { + "$ref": "#/components/schemas/Order" + } + } + ], + "deprecated": true + }, + "post": { + "responses": { + "200": { + "description": "A ListOpenAIResponseObject.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ListOpenAIResponseObject" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "Agents" + ], + "summary": "List all OpenAI responses.", + "description": "List all OpenAI responses.", + "parameters": [], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ListOpenaiResponsesRequest" + } + } + }, + "required": true + }, + "deprecated": true + } + }, + "/v1/openai/v1/responses/{response_id}": { + "get": { + "responses": { + "200": { + "description": "An OpenAIResponseObject.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenAIResponseObject" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "Agents" + ], + "summary": "Retrieve an OpenAI response by its ID.", + "description": "Retrieve an OpenAI response by its ID.", + "parameters": [ + { + "name": "response_id", + "in": "path", + "description": "The ID of the OpenAI response to retrieve.", + "required": true, + "schema": { + "type": "string" + } + } + ], + "deprecated": true + }, + "delete": { + "responses": { + "200": { + "description": "An OpenAIDeleteResponseObject", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenAIDeleteResponseObject" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "Agents" + ], + "summary": "Delete an OpenAI response by its ID.", + "description": "Delete an OpenAI response by its ID.", + "parameters": [ + { + "name": "response_id", + "in": "path", + "description": "The ID of the OpenAI response to delete.", + "required": true, + "schema": { + "type": "string" + } + } + ], + "deprecated": true + } + }, + "/v1/openai/v1/responses/{response_id}/input_items": { + "get": { + "responses": { + "200": { + "description": "An ListOpenAIResponseInputItem.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ListOpenAIResponseInputItem" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "Agents" + ], + "summary": "List input items for a given OpenAI response.", + "description": "List input items for a given OpenAI response.", + "parameters": [ + { + "name": "response_id", + "in": "path", + "description": "The ID of the response to retrieve input items for.", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "after", + "in": "query", + "description": "An item ID to list items after, used for pagination.", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "before", + "in": "query", + "description": "An item ID to list items before, used for pagination.", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "include", + "in": "query", + "description": "Additional fields to include in the response.", + "required": false, + "schema": { + "type": "array", + "items": { + "type": "string" + } + } + }, + { + "name": "limit", + "in": "query", + "description": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20.", + "required": false, + "schema": { + "type": "integer" + } + }, + { + "name": "order", + "in": "query", + "description": "The order to return the input items in. Default is desc.", + "required": false, + "schema": { + "$ref": "#/components/schemas/Order" + } + } + ], + "deprecated": true + } + }, + "/v1/openai/v1/vector_stores": { + "get": { + "responses": { + "200": { + "description": "A VectorStoreListResponse containing the list of vector stores.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/VectorStoreListResponse" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "VectorIO" + ], + "summary": "Returns a list of vector stores.", + "description": "Returns a list of vector stores.", + "parameters": [ + { + "name": "limit", + "in": "query", + "description": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20.", + "required": false, + "schema": { + "type": "integer" + } + }, + { + "name": "order", + "in": "query", + "description": "Sort order by the `created_at` timestamp of the objects. `asc` for ascending order and `desc` for descending order.", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "after", + "in": "query", + "description": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "before", + "in": "query", + "description": "A cursor for use in pagination. `before` is an object ID that defines your place in the list.", + "required": false, + "schema": { + "type": "string" + } + } + ], + "deprecated": true + }, + "post": { + "responses": { + "200": { + "description": "A VectorStoreObject representing the created vector store.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/VectorStoreObject" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "VectorIO" + ], + "summary": "Creates a vector store.", + "description": "Creates a vector store.", + "parameters": [], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenaiCreateVectorStoreRequest" + } + } + }, + "required": true + }, + "deprecated": true + } + }, + "/v1/openai/v1/vector_stores/{vector_store_id}": { + "get": { + "responses": { + "200": { + "description": "A VectorStoreObject representing the vector store.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/VectorStoreObject" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "VectorIO" + ], + "summary": "Retrieves a vector store.", + "description": "Retrieves a vector store.", + "parameters": [ + { + "name": "vector_store_id", + "in": "path", + "description": "The ID of the vector store to retrieve.", + "required": true, + "schema": { + "type": "string" + } + } + ], + "deprecated": true + }, + "post": { + "responses": { + "200": { + "description": "A VectorStoreObject representing the updated vector store.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/VectorStoreObject" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "VectorIO" + ], + "summary": "Updates a vector store.", + "description": "Updates a vector store.", + "parameters": [ + { + "name": "vector_store_id", + "in": "path", + "description": "The ID of the vector store to update.", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenaiUpdateVectorStoreRequest" + } + } + }, + "required": true + }, + "deprecated": true + }, + "delete": { + "responses": { + "200": { + "description": "A VectorStoreDeleteResponse indicating the deletion status.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/VectorStoreDeleteResponse" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "VectorIO" + ], + "summary": "Delete a vector store.", + "description": "Delete a vector store.", + "parameters": [ + { + "name": "vector_store_id", + "in": "path", + "description": "The ID of the vector store to delete.", + "required": true, + "schema": { + "type": "string" + } + } + ], + "deprecated": true + } + }, + "/v1/openai/v1/vector_stores/{vector_store_id}/file_batches": { + "post": { + "responses": { + "200": { + "description": "A VectorStoreFileBatchObject representing the created file batch.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/VectorStoreFileBatchObject" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "VectorIO" + ], + "summary": "Create a vector store file batch.", + "description": "Create a vector store file batch.", + "parameters": [ + { + "name": "vector_store_id", + "in": "path", + "description": "The ID of the vector store to create the file batch for.", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenaiCreateVectorStoreFileBatchRequest" + } + } + }, + "required": true + }, + "deprecated": true + } + }, + "/v1/openai/v1/vector_stores/{vector_store_id}/file_batches/{batch_id}": { + "get": { + "responses": { + "200": { + "description": "A VectorStoreFileBatchObject representing the file batch.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/VectorStoreFileBatchObject" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "VectorIO" + ], + "summary": "Retrieve a vector store file batch.", + "description": "Retrieve a vector store file batch.", + "parameters": [ + { + "name": "batch_id", + "in": "path", + "description": "The ID of the file batch to retrieve.", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "vector_store_id", + "in": "path", + "description": "The ID of the vector store containing the file batch.", + "required": true, + "schema": { + "type": "string" + } + } + ], + "deprecated": true + } + }, + "/v1/openai/v1/vector_stores/{vector_store_id}/file_batches/{batch_id}/cancel": { + "post": { + "responses": { + "200": { + "description": "A VectorStoreFileBatchObject representing the cancelled file batch.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/VectorStoreFileBatchObject" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "VectorIO" + ], + "summary": "Cancels a vector store file batch.", + "description": "Cancels a vector store file batch.", + "parameters": [ + { + "name": "batch_id", + "in": "path", + "description": "The ID of the file batch to cancel.", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "vector_store_id", + "in": "path", + "description": "The ID of the vector store containing the file batch.", + "required": true, + "schema": { + "type": "string" + } + } + ], + "deprecated": true + } + }, + "/v1/openai/v1/vector_stores/{vector_store_id}/file_batches/{batch_id}/files": { + "get": { + "responses": { + "200": { + "description": "A VectorStoreFilesListInBatchResponse containing the list of files in the batch.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/VectorStoreFilesListInBatchResponse" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "VectorIO" + ], + "summary": "Returns a list of vector store files in a batch.", + "description": "Returns a list of vector store files in a batch.", + "parameters": [ + { + "name": "batch_id", + "in": "path", + "description": "The ID of the file batch to list files from.", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "vector_store_id", + "in": "path", + "description": "The ID of the vector store containing the file batch.", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "after", + "in": "query", + "description": "A cursor for use in pagination. `after` is an object ID that defines your place in the list.", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "before", + "in": "query", + "description": "A cursor for use in pagination. `before` is an object ID that defines your place in the list.", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "filter", + "in": "query", + "description": "Filter by file status. One of in_progress, completed, failed, cancelled.", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "limit", + "in": "query", + "description": "A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20.", + "required": false, + "schema": { + "type": "integer" + } + }, + { + "name": "order", + "in": "query", + "description": "Sort order by the `created_at` timestamp of the objects. `asc` for ascending order and `desc` for descending order.", + "required": false, + "schema": { + "type": "string" + } + } + ], + "deprecated": true + } + }, + "/v1/openai/v1/vector_stores/{vector_store_id}/files": { + "get": { + "responses": { + "200": { + "description": "A VectorStoreListFilesResponse containing the list of files.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/VectorStoreListFilesResponse" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "VectorIO" + ], + "summary": "List files in a vector store.", + "description": "List files in a vector store.", + "parameters": [ + { + "name": "vector_store_id", + "in": "path", + "description": "The ID of the vector store to list files from.", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "limit", + "in": "query", + "description": "(Optional) A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20.", + "required": false, + "schema": { + "type": "integer" + } + }, + { + "name": "order", + "in": "query", + "description": "(Optional) Sort order by the `created_at` timestamp of the objects. `asc` for ascending order and `desc` for descending order.", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "after", + "in": "query", + "description": "(Optional) A cursor for use in pagination. `after` is an object ID that defines your place in the list.", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "before", + "in": "query", + "description": "(Optional) A cursor for use in pagination. `before` is an object ID that defines your place in the list.", + "required": false, + "schema": { + "type": "string" + } + }, + { + "name": "filter", + "in": "query", + "description": "(Optional) Filter by file status to only return files with the specified status.", + "required": false, + "schema": { + "$ref": "#/components/schemas/VectorStoreFileStatus" + } + } + ], + "deprecated": true + }, + "post": { + "responses": { + "200": { + "description": "A VectorStoreFileObject representing the attached file.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/VectorStoreFileObject" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "VectorIO" + ], + "summary": "Attach a file to a vector store.", + "description": "Attach a file to a vector store.", + "parameters": [ + { + "name": "vector_store_id", + "in": "path", + "description": "The ID of the vector store to attach the file to.", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenaiAttachFileToVectorStoreRequest" + } + } + }, + "required": true + }, + "deprecated": true + } + }, + "/v1/openai/v1/vector_stores/{vector_store_id}/files/{file_id}": { + "get": { + "responses": { + "200": { + "description": "A VectorStoreFileObject representing the file.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/VectorStoreFileObject" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "VectorIO" + ], + "summary": "Retrieves a vector store file.", + "description": "Retrieves a vector store file.", + "parameters": [ + { + "name": "vector_store_id", + "in": "path", + "description": "The ID of the vector store containing the file to retrieve.", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "file_id", + "in": "path", + "description": "The ID of the file to retrieve.", + "required": true, + "schema": { + "type": "string" + } + } + ], + "deprecated": true + }, + "post": { + "responses": { + "200": { + "description": "A VectorStoreFileObject representing the updated file.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/VectorStoreFileObject" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "VectorIO" + ], + "summary": "Updates a vector store file.", + "description": "Updates a vector store file.", + "parameters": [ + { + "name": "vector_store_id", + "in": "path", + "description": "The ID of the vector store containing the file to update.", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "file_id", + "in": "path", + "description": "The ID of the file to update.", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenaiUpdateVectorStoreFileRequest" + } + } + }, + "required": true + }, + "deprecated": true + }, + "delete": { + "responses": { + "200": { + "description": "A VectorStoreFileDeleteResponse indicating the deletion status.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/VectorStoreFileDeleteResponse" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "VectorIO" + ], + "summary": "Delete a vector store file.", + "description": "Delete a vector store file.", + "parameters": [ + { + "name": "vector_store_id", + "in": "path", + "description": "The ID of the vector store containing the file to delete.", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "file_id", + "in": "path", + "description": "The ID of the file to delete.", + "required": true, + "schema": { + "type": "string" + } + } + ], + "deprecated": true + } + }, + "/v1/openai/v1/vector_stores/{vector_store_id}/files/{file_id}/content": { + "get": { + "responses": { + "200": { + "description": "A list of InterleavedContent representing the file contents.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/VectorStoreFileContentsResponse" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "VectorIO" + ], + "summary": "Retrieves the contents of a vector store file.", + "description": "Retrieves the contents of a vector store file.", + "parameters": [ + { + "name": "vector_store_id", + "in": "path", + "description": "The ID of the vector store containing the file to retrieve.", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "file_id", + "in": "path", + "description": "The ID of the file to retrieve.", + "required": true, + "schema": { + "type": "string" + } + } + ], + "deprecated": true + } + }, + "/v1/openai/v1/vector_stores/{vector_store_id}/search": { + "post": { + "responses": { + "200": { + "description": "A VectorStoreSearchResponse containing the search results.", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/VectorStoreSearchResponsePage" + } + } + } + }, + "400": { + "$ref": "#/components/responses/BadRequest400" + }, + "429": { + "$ref": "#/components/responses/TooManyRequests429" + }, + "500": { + "$ref": "#/components/responses/InternalServerError500" + }, + "default": { + "$ref": "#/components/responses/DefaultError" + } + }, + "tags": [ + "VectorIO" + ], + "summary": "Search for chunks in a vector store.", + "description": "Search for chunks in a vector store.\nSearches a vector store for relevant chunks based on a query and optional file attribute filters.", + "parameters": [ + { + "name": "vector_store_id", + "in": "path", + "description": "The ID of the vector store to search.", + "required": true, + "schema": { + "type": "string" + } + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/OpenaiSearchVectorStoreRequest" + } + } + }, + "required": true + }, + "deprecated": true + } + }, "/v1/post-training/job/artifacts": { "get": { "responses": { @@ -4965,6 +6800,5306 @@ "title": "Job", "description": "A job execution instance with status tracking." }, + "Order": { + "type": "string", + "enum": [ + "asc", + "desc" + ], + "title": "Order", + "description": "Sort order for paginated responses." + }, + "ListOpenAIChatCompletionResponse": { + "type": "object", + "properties": { + "data": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "The ID of the chat completion" + }, + "choices": { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIChoice" + }, + "description": "List of choices" + }, + "object": { + "type": "string", + "const": "chat.completion", + "default": "chat.completion", + "description": "The object type, which will be \"chat.completion\"" + }, + "created": { + "type": "integer", + "description": "The Unix timestamp in seconds when the chat completion was created" + }, + "model": { + "type": "string", + "description": "The model that was used to generate the chat completion" + }, + "input_messages": { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIMessageParam" + } + } + }, + "additionalProperties": false, + "required": [ + "id", + "choices", + "object", + "created", + "model", + "input_messages" + ], + "title": "OpenAICompletionWithInputMessages" + }, + "description": "List of chat completion objects with their input messages" + }, + "has_more": { + "type": "boolean", + "description": "Whether there are more completions available beyond this list" + }, + "first_id": { + "type": "string", + "description": "ID of the first completion in this list" + }, + "last_id": { + "type": "string", + "description": "ID of the last completion in this list" + }, + "object": { + "type": "string", + "const": "list", + "default": "list", + "description": "Must be \"list\" to identify this as a list response" + } + }, + "additionalProperties": false, + "required": [ + "data", + "has_more", + "first_id", + "last_id", + "object" + ], + "title": "ListOpenAIChatCompletionResponse", + "description": "Response from listing OpenAI-compatible chat completions." + }, + "OpenAIAssistantMessageParam": { + "type": "object", + "properties": { + "role": { + "type": "string", + "const": "assistant", + "default": "assistant", + "description": "Must be \"assistant\" to identify this as the model's response" + }, + "content": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIChatCompletionContentPartTextParam" + } + } + ], + "description": "The content of the model's response" + }, + "name": { + "type": "string", + "description": "(Optional) The name of the assistant message participant." + }, + "tool_calls": { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIChatCompletionToolCall" + }, + "description": "List of tool calls. Each tool call is an OpenAIChatCompletionToolCall object." + } + }, + "additionalProperties": false, + "required": [ + "role" + ], + "title": "OpenAIAssistantMessageParam", + "description": "A message containing the model's (assistant) response in an OpenAI-compatible chat completion request." + }, + "OpenAIChatCompletionContentPartImageParam": { + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "image_url", + "default": "image_url", + "description": "Must be \"image_url\" to identify this as image content" + }, + "image_url": { + "$ref": "#/components/schemas/OpenAIImageURL", + "description": "Image URL specification and processing details" + } + }, + "additionalProperties": false, + "required": [ + "type", + "image_url" + ], + "title": "OpenAIChatCompletionContentPartImageParam", + "description": "Image content part for OpenAI-compatible chat completion messages." + }, + "OpenAIChatCompletionContentPartParam": { + "oneOf": [ + { + "$ref": "#/components/schemas/OpenAIChatCompletionContentPartTextParam" + }, + { + "$ref": "#/components/schemas/OpenAIChatCompletionContentPartImageParam" + }, + { + "$ref": "#/components/schemas/OpenAIFile" + } + ], + "discriminator": { + "propertyName": "type", + "mapping": { + "text": "#/components/schemas/OpenAIChatCompletionContentPartTextParam", + "image_url": "#/components/schemas/OpenAIChatCompletionContentPartImageParam", + "file": "#/components/schemas/OpenAIFile" + } + } + }, + "OpenAIChatCompletionContentPartTextParam": { + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "text", + "default": "text", + "description": "Must be \"text\" to identify this as text content" + }, + "text": { + "type": "string", + "description": "The text content of the message" + } + }, + "additionalProperties": false, + "required": [ + "type", + "text" + ], + "title": "OpenAIChatCompletionContentPartTextParam", + "description": "Text content part for OpenAI-compatible chat completion messages." + }, + "OpenAIChatCompletionToolCall": { + "type": "object", + "properties": { + "index": { + "type": "integer", + "description": "(Optional) Index of the tool call in the list" + }, + "id": { + "type": "string", + "description": "(Optional) Unique identifier for the tool call" + }, + "type": { + "type": "string", + "const": "function", + "default": "function", + "description": "Must be \"function\" to identify this as a function call" + }, + "function": { + "$ref": "#/components/schemas/OpenAIChatCompletionToolCallFunction", + "description": "(Optional) Function call details" + } + }, + "additionalProperties": false, + "required": [ + "type" + ], + "title": "OpenAIChatCompletionToolCall", + "description": "Tool call specification for OpenAI-compatible chat completion responses." + }, + "OpenAIChatCompletionToolCallFunction": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "(Optional) Name of the function to call" + }, + "arguments": { + "type": "string", + "description": "(Optional) Arguments to pass to the function as a JSON string" + } + }, + "additionalProperties": false, + "title": "OpenAIChatCompletionToolCallFunction", + "description": "Function call details for OpenAI-compatible tool calls." + }, + "OpenAIChoice": { + "type": "object", + "properties": { + "message": { + "oneOf": [ + { + "$ref": "#/components/schemas/OpenAIUserMessageParam" + }, + { + "$ref": "#/components/schemas/OpenAISystemMessageParam" + }, + { + "$ref": "#/components/schemas/OpenAIAssistantMessageParam" + }, + { + "$ref": "#/components/schemas/OpenAIToolMessageParam" + }, + { + "$ref": "#/components/schemas/OpenAIDeveloperMessageParam" + } + ], + "discriminator": { + "propertyName": "role", + "mapping": { + "user": "#/components/schemas/OpenAIUserMessageParam", + "system": "#/components/schemas/OpenAISystemMessageParam", + "assistant": "#/components/schemas/OpenAIAssistantMessageParam", + "tool": "#/components/schemas/OpenAIToolMessageParam", + "developer": "#/components/schemas/OpenAIDeveloperMessageParam" + } + }, + "description": "The message from the model" + }, + "finish_reason": { + "type": "string", + "description": "The reason the model stopped generating" + }, + "index": { + "type": "integer", + "description": "The index of the choice" + }, + "logprobs": { + "$ref": "#/components/schemas/OpenAIChoiceLogprobs", + "description": "(Optional) The log probabilities for the tokens in the message" + } + }, + "additionalProperties": false, + "required": [ + "message", + "finish_reason", + "index" + ], + "title": "OpenAIChoice", + "description": "A choice from an OpenAI-compatible chat completion response." + }, + "OpenAIChoiceLogprobs": { + "type": "object", + "properties": { + "content": { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAITokenLogProb" + }, + "description": "(Optional) The log probabilities for the tokens in the message" + }, + "refusal": { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAITokenLogProb" + }, + "description": "(Optional) The log probabilities for the tokens in the message" + } + }, + "additionalProperties": false, + "title": "OpenAIChoiceLogprobs", + "description": "The log probabilities for the tokens in the message from an OpenAI-compatible chat completion response." + }, + "OpenAIDeveloperMessageParam": { + "type": "object", + "properties": { + "role": { + "type": "string", + "const": "developer", + "default": "developer", + "description": "Must be \"developer\" to identify this as a developer message" + }, + "content": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIChatCompletionContentPartTextParam" + } + } + ], + "description": "The content of the developer message" + }, + "name": { + "type": "string", + "description": "(Optional) The name of the developer message participant." + } + }, + "additionalProperties": false, + "required": [ + "role", + "content" + ], + "title": "OpenAIDeveloperMessageParam", + "description": "A message from the developer in an OpenAI-compatible chat completion request." + }, + "OpenAIFile": { + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "file", + "default": "file" + }, + "file": { + "$ref": "#/components/schemas/OpenAIFileFile" + } + }, + "additionalProperties": false, + "required": [ + "type", + "file" + ], + "title": "OpenAIFile" + }, + "OpenAIFileFile": { + "type": "object", + "properties": { + "file_data": { + "type": "string" + }, + "file_id": { + "type": "string" + }, + "filename": { + "type": "string" + } + }, + "additionalProperties": false, + "title": "OpenAIFileFile" + }, + "OpenAIImageURL": { + "type": "object", + "properties": { + "url": { + "type": "string", + "description": "URL of the image to include in the message" + }, + "detail": { + "type": "string", + "description": "(Optional) Level of detail for image processing. Can be \"low\", \"high\", or \"auto\"" + } + }, + "additionalProperties": false, + "required": [ + "url" + ], + "title": "OpenAIImageURL", + "description": "Image URL specification for OpenAI-compatible chat completion messages." + }, + "OpenAIMessageParam": { + "oneOf": [ + { + "$ref": "#/components/schemas/OpenAIUserMessageParam" + }, + { + "$ref": "#/components/schemas/OpenAISystemMessageParam" + }, + { + "$ref": "#/components/schemas/OpenAIAssistantMessageParam" + }, + { + "$ref": "#/components/schemas/OpenAIToolMessageParam" + }, + { + "$ref": "#/components/schemas/OpenAIDeveloperMessageParam" + } + ], + "discriminator": { + "propertyName": "role", + "mapping": { + "user": "#/components/schemas/OpenAIUserMessageParam", + "system": "#/components/schemas/OpenAISystemMessageParam", + "assistant": "#/components/schemas/OpenAIAssistantMessageParam", + "tool": "#/components/schemas/OpenAIToolMessageParam", + "developer": "#/components/schemas/OpenAIDeveloperMessageParam" + } + } + }, + "OpenAISystemMessageParam": { + "type": "object", + "properties": { + "role": { + "type": "string", + "const": "system", + "default": "system", + "description": "Must be \"system\" to identify this as a system message" + }, + "content": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIChatCompletionContentPartTextParam" + } + } + ], + "description": "The content of the \"system prompt\". If multiple system messages are provided, they are concatenated. The underlying Llama Stack code may also add other system messages (for example, for formatting tool definitions)." + }, + "name": { + "type": "string", + "description": "(Optional) The name of the system message participant." + } + }, + "additionalProperties": false, + "required": [ + "role", + "content" + ], + "title": "OpenAISystemMessageParam", + "description": "A system message providing instructions or context to the model." + }, + "OpenAITokenLogProb": { + "type": "object", + "properties": { + "token": { + "type": "string" + }, + "bytes": { + "type": "array", + "items": { + "type": "integer" + } + }, + "logprob": { + "type": "number" + }, + "top_logprobs": { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAITopLogProb" + } + } + }, + "additionalProperties": false, + "required": [ + "token", + "logprob", + "top_logprobs" + ], + "title": "OpenAITokenLogProb", + "description": "The log probability for a token from an OpenAI-compatible chat completion response." + }, + "OpenAIToolMessageParam": { + "type": "object", + "properties": { + "role": { + "type": "string", + "const": "tool", + "default": "tool", + "description": "Must be \"tool\" to identify this as a tool response" + }, + "tool_call_id": { + "type": "string", + "description": "Unique identifier for the tool call this response is for" + }, + "content": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIChatCompletionContentPartTextParam" + } + } + ], + "description": "The response content from the tool" + } + }, + "additionalProperties": false, + "required": [ + "role", + "tool_call_id", + "content" + ], + "title": "OpenAIToolMessageParam", + "description": "A message representing the result of a tool invocation in an OpenAI-compatible chat completion request." + }, + "OpenAITopLogProb": { + "type": "object", + "properties": { + "token": { + "type": "string" + }, + "bytes": { + "type": "array", + "items": { + "type": "integer" + } + }, + "logprob": { + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "token", + "logprob" + ], + "title": "OpenAITopLogProb", + "description": "The top log probability for a token from an OpenAI-compatible chat completion response." + }, + "OpenAIUserMessageParam": { + "type": "object", + "properties": { + "role": { + "type": "string", + "const": "user", + "default": "user", + "description": "Must be \"user\" to identify this as a user message" + }, + "content": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIChatCompletionContentPartParam" + } + } + ], + "description": "The content of the message, which can include text and other media" + }, + "name": { + "type": "string", + "description": "(Optional) The name of the user message participant." + } + }, + "additionalProperties": false, + "required": [ + "role", + "content" + ], + "title": "OpenAIUserMessageParam", + "description": "A message from the user in an OpenAI-compatible chat completion request." + }, + "OpenAIJSONSchema": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "Name of the schema" + }, + "description": { + "type": "string", + "description": "(Optional) Description of the schema" + }, + "strict": { + "type": "boolean", + "description": "(Optional) Whether to enforce strict adherence to the schema" + }, + "schema": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "(Optional) The JSON schema definition" + } + }, + "additionalProperties": false, + "required": [ + "name" + ], + "title": "OpenAIJSONSchema", + "description": "JSON schema specification for OpenAI-compatible structured response format." + }, + "OpenAIResponseFormatJSONObject": { + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "json_object", + "default": "json_object", + "description": "Must be \"json_object\" to indicate generic JSON object response format" + } + }, + "additionalProperties": false, + "required": [ + "type" + ], + "title": "OpenAIResponseFormatJSONObject", + "description": "JSON object response format for OpenAI-compatible chat completion requests." + }, + "OpenAIResponseFormatJSONSchema": { + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "json_schema", + "default": "json_schema", + "description": "Must be \"json_schema\" to indicate structured JSON response format" + }, + "json_schema": { + "$ref": "#/components/schemas/OpenAIJSONSchema", + "description": "The JSON schema specification for the response" + } + }, + "additionalProperties": false, + "required": [ + "type", + "json_schema" + ], + "title": "OpenAIResponseFormatJSONSchema", + "description": "JSON schema response format for OpenAI-compatible chat completion requests." + }, + "OpenAIResponseFormatParam": { + "oneOf": [ + { + "$ref": "#/components/schemas/OpenAIResponseFormatText" + }, + { + "$ref": "#/components/schemas/OpenAIResponseFormatJSONSchema" + }, + { + "$ref": "#/components/schemas/OpenAIResponseFormatJSONObject" + } + ], + "discriminator": { + "propertyName": "type", + "mapping": { + "text": "#/components/schemas/OpenAIResponseFormatText", + "json_schema": "#/components/schemas/OpenAIResponseFormatJSONSchema", + "json_object": "#/components/schemas/OpenAIResponseFormatJSONObject" + } + } + }, + "OpenAIResponseFormatText": { + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "text", + "default": "text", + "description": "Must be \"text\" to indicate plain text response format" + } + }, + "additionalProperties": false, + "required": [ + "type" + ], + "title": "OpenAIResponseFormatText", + "description": "Text response format for OpenAI-compatible chat completion requests." + }, + "OpenaiChatCompletionRequest": { + "type": "object", + "properties": { + "model": { + "type": "string", + "description": "The identifier of the model to use. The model must be registered with Llama Stack and available via the /models endpoint." + }, + "messages": { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIMessageParam" + }, + "description": "List of messages in the conversation." + }, + "frequency_penalty": { + "type": "number", + "description": "(Optional) The penalty for repeated tokens." + }, + "function_call": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + } + } + ], + "description": "(Optional) The function call to use." + }, + "functions": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + } + }, + "description": "(Optional) List of functions to use." + }, + "logit_bias": { + "type": "object", + "additionalProperties": { + "type": "number" + }, + "description": "(Optional) The logit bias to use." + }, + "logprobs": { + "type": "boolean", + "description": "(Optional) The log probabilities to use." + }, + "max_completion_tokens": { + "type": "integer", + "description": "(Optional) The maximum number of tokens to generate." + }, + "max_tokens": { + "type": "integer", + "description": "(Optional) The maximum number of tokens to generate." + }, + "n": { + "type": "integer", + "description": "(Optional) The number of completions to generate." + }, + "parallel_tool_calls": { + "type": "boolean", + "description": "(Optional) Whether to parallelize tool calls." + }, + "presence_penalty": { + "type": "number", + "description": "(Optional) The penalty for repeated tokens." + }, + "response_format": { + "$ref": "#/components/schemas/OpenAIResponseFormatParam", + "description": "(Optional) The response format to use." + }, + "seed": { + "type": "integer", + "description": "(Optional) The seed to use." + }, + "stop": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + } + ], + "description": "(Optional) The stop tokens to use." + }, + "stream": { + "type": "boolean", + "description": "(Optional) Whether to stream the response." + }, + "stream_options": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "(Optional) The stream options to use." + }, + "temperature": { + "type": "number", + "description": "(Optional) The temperature to use." + }, + "tool_choice": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + } + } + ], + "description": "(Optional) The tool choice to use." + }, + "tools": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + } + }, + "description": "(Optional) The tools to use." + }, + "top_logprobs": { + "type": "integer", + "description": "(Optional) The top log probabilities to use." + }, + "top_p": { + "type": "number", + "description": "(Optional) The top p to use." + }, + "user": { + "type": "string", + "description": "(Optional) The user to use." + } + }, + "additionalProperties": false, + "required": [ + "model", + "messages" + ], + "title": "OpenaiChatCompletionRequest" + }, + "OpenAIChatCompletion": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "The ID of the chat completion" + }, + "choices": { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIChoice" + }, + "description": "List of choices" + }, + "object": { + "type": "string", + "const": "chat.completion", + "default": "chat.completion", + "description": "The object type, which will be \"chat.completion\"" + }, + "created": { + "type": "integer", + "description": "The Unix timestamp in seconds when the chat completion was created" + }, + "model": { + "type": "string", + "description": "The model that was used to generate the chat completion" + } + }, + "additionalProperties": false, + "required": [ + "id", + "choices", + "object", + "created", + "model" + ], + "title": "OpenAIChatCompletion", + "description": "Response from an OpenAI-compatible chat completion request." + }, + "OpenAIChatCompletionChunk": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "The ID of the chat completion" + }, + "choices": { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIChunkChoice" + }, + "description": "List of choices" + }, + "object": { + "type": "string", + "const": "chat.completion.chunk", + "default": "chat.completion.chunk", + "description": "The object type, which will be \"chat.completion.chunk\"" + }, + "created": { + "type": "integer", + "description": "The Unix timestamp in seconds when the chat completion was created" + }, + "model": { + "type": "string", + "description": "The model that was used to generate the chat completion" + } + }, + "additionalProperties": false, + "required": [ + "id", + "choices", + "object", + "created", + "model" + ], + "title": "OpenAIChatCompletionChunk", + "description": "Chunk from a streaming response to an OpenAI-compatible chat completion request." + }, + "OpenAIChoiceDelta": { + "type": "object", + "properties": { + "content": { + "type": "string", + "description": "(Optional) The content of the delta" + }, + "refusal": { + "type": "string", + "description": "(Optional) The refusal of the delta" + }, + "role": { + "type": "string", + "description": "(Optional) The role of the delta" + }, + "tool_calls": { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIChatCompletionToolCall" + }, + "description": "(Optional) The tool calls of the delta" + } + }, + "additionalProperties": false, + "title": "OpenAIChoiceDelta", + "description": "A delta from an OpenAI-compatible chat completion streaming response." + }, + "OpenAIChunkChoice": { + "type": "object", + "properties": { + "delta": { + "$ref": "#/components/schemas/OpenAIChoiceDelta", + "description": "The delta from the chunk" + }, + "finish_reason": { + "type": "string", + "description": "The reason the model stopped generating" + }, + "index": { + "type": "integer", + "description": "The index of the choice" + }, + "logprobs": { + "$ref": "#/components/schemas/OpenAIChoiceLogprobs", + "description": "(Optional) The log probabilities for the tokens in the message" + } + }, + "additionalProperties": false, + "required": [ + "delta", + "finish_reason", + "index" + ], + "title": "OpenAIChunkChoice", + "description": "A chunk choice from an OpenAI-compatible chat completion streaming response." + }, + "OpenAICompletionWithInputMessages": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "The ID of the chat completion" + }, + "choices": { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIChoice" + }, + "description": "List of choices" + }, + "object": { + "type": "string", + "const": "chat.completion", + "default": "chat.completion", + "description": "The object type, which will be \"chat.completion\"" + }, + "created": { + "type": "integer", + "description": "The Unix timestamp in seconds when the chat completion was created" + }, + "model": { + "type": "string", + "description": "The model that was used to generate the chat completion" + }, + "input_messages": { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIMessageParam" + } + } + }, + "additionalProperties": false, + "required": [ + "id", + "choices", + "object", + "created", + "model", + "input_messages" + ], + "title": "OpenAICompletionWithInputMessages" + }, + "OpenaiCompletionRequest": { + "type": "object", + "properties": { + "model": { + "type": "string", + "description": "The identifier of the model to use. The model must be registered with Llama Stack and available via the /models endpoint." + }, + "prompt": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "array", + "items": { + "type": "integer" + } + }, + { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "integer" + } + } + } + ], + "description": "The prompt to generate a completion for." + }, + "best_of": { + "type": "integer", + "description": "(Optional) The number of completions to generate." + }, + "echo": { + "type": "boolean", + "description": "(Optional) Whether to echo the prompt." + }, + "frequency_penalty": { + "type": "number", + "description": "(Optional) The penalty for repeated tokens." + }, + "logit_bias": { + "type": "object", + "additionalProperties": { + "type": "number" + }, + "description": "(Optional) The logit bias to use." + }, + "logprobs": { + "type": "boolean", + "description": "(Optional) The log probabilities to use." + }, + "max_tokens": { + "type": "integer", + "description": "(Optional) The maximum number of tokens to generate." + }, + "n": { + "type": "integer", + "description": "(Optional) The number of completions to generate." + }, + "presence_penalty": { + "type": "number", + "description": "(Optional) The penalty for repeated tokens." + }, + "seed": { + "type": "integer", + "description": "(Optional) The seed to use." + }, + "stop": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + } + ], + "description": "(Optional) The stop tokens to use." + }, + "stream": { + "type": "boolean", + "description": "(Optional) Whether to stream the response." + }, + "stream_options": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "(Optional) The stream options to use." + }, + "temperature": { + "type": "number", + "description": "(Optional) The temperature to use." + }, + "top_p": { + "type": "number", + "description": "(Optional) The top p to use." + }, + "user": { + "type": "string", + "description": "(Optional) The user to use." + }, + "guided_choice": { + "type": "array", + "items": { + "type": "string" + } + }, + "prompt_logprobs": { + "type": "integer" + }, + "suffix": { + "type": "string", + "description": "(Optional) The suffix that should be appended to the completion." + } + }, + "additionalProperties": false, + "required": [ + "model", + "prompt" + ], + "title": "OpenaiCompletionRequest" + }, + "OpenAICompletion": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "choices": { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAICompletionChoice" + } + }, + "created": { + "type": "integer" + }, + "model": { + "type": "string" + }, + "object": { + "type": "string", + "const": "text_completion", + "default": "text_completion" + } + }, + "additionalProperties": false, + "required": [ + "id", + "choices", + "created", + "model", + "object" + ], + "title": "OpenAICompletion", + "description": "Response from an OpenAI-compatible completion request." + }, + "OpenAICompletionChoice": { + "type": "object", + "properties": { + "finish_reason": { + "type": "string" + }, + "text": { + "type": "string" + }, + "index": { + "type": "integer" + }, + "logprobs": { + "$ref": "#/components/schemas/OpenAIChoiceLogprobs" + } + }, + "additionalProperties": false, + "required": [ + "finish_reason", + "text", + "index" + ], + "title": "OpenAICompletionChoice", + "description": "A choice from an OpenAI-compatible completion response." + }, + "OpenaiEmbeddingsRequest": { + "type": "object", + "properties": { + "model": { + "type": "string", + "description": "The identifier of the model to use. The model must be an embedding model registered with Llama Stack and available via the /models endpoint." + }, + "input": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + } + ], + "description": "Input text to embed, encoded as a string or array of strings. To embed multiple inputs in a single request, pass an array of strings." + }, + "encoding_format": { + "type": "string", + "description": "(Optional) The format to return the embeddings in. Can be either \"float\" or \"base64\". Defaults to \"float\"." + }, + "dimensions": { + "type": "integer", + "description": "(Optional) The number of dimensions the resulting output embeddings should have. Only supported in text-embedding-3 and later models." + }, + "user": { + "type": "string", + "description": "(Optional) A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse." + } + }, + "additionalProperties": false, + "required": [ + "model", + "input" + ], + "title": "OpenaiEmbeddingsRequest" + }, + "OpenAIEmbeddingData": { + "type": "object", + "properties": { + "object": { + "type": "string", + "const": "embedding", + "default": "embedding", + "description": "The object type, which will be \"embedding\"" + }, + "embedding": { + "oneOf": [ + { + "type": "array", + "items": { + "type": "number" + } + }, + { + "type": "string" + } + ], + "description": "The embedding vector as a list of floats (when encoding_format=\"float\") or as a base64-encoded string (when encoding_format=\"base64\")" + }, + "index": { + "type": "integer", + "description": "The index of the embedding in the input list" + } + }, + "additionalProperties": false, + "required": [ + "object", + "embedding", + "index" + ], + "title": "OpenAIEmbeddingData", + "description": "A single embedding data object from an OpenAI-compatible embeddings response." + }, + "OpenAIEmbeddingUsage": { + "type": "object", + "properties": { + "prompt_tokens": { + "type": "integer", + "description": "The number of tokens in the input" + }, + "total_tokens": { + "type": "integer", + "description": "The total number of tokens used" + } + }, + "additionalProperties": false, + "required": [ + "prompt_tokens", + "total_tokens" + ], + "title": "OpenAIEmbeddingUsage", + "description": "Usage information for an OpenAI-compatible embeddings response." + }, + "OpenAIEmbeddingsResponse": { + "type": "object", + "properties": { + "object": { + "type": "string", + "const": "list", + "default": "list", + "description": "The object type, which will be \"list\"" + }, + "data": { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIEmbeddingData" + }, + "description": "List of embedding data objects" + }, + "model": { + "type": "string", + "description": "The model that was used to generate the embeddings" + }, + "usage": { + "$ref": "#/components/schemas/OpenAIEmbeddingUsage", + "description": "Usage information" + } + }, + "additionalProperties": false, + "required": [ + "object", + "data", + "model", + "usage" + ], + "title": "OpenAIEmbeddingsResponse", + "description": "Response from an OpenAI-compatible embeddings request." + }, + "OpenAIFilePurpose": { + "type": "string", + "enum": [ + "assistants", + "batch" + ], + "title": "OpenAIFilePurpose", + "description": "Valid purpose values for OpenAI Files API." + }, + "ListOpenAIFileResponse": { + "type": "object", + "properties": { + "data": { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIFileObject" + }, + "description": "List of file objects" + }, + "has_more": { + "type": "boolean", + "description": "Whether there are more files available beyond this page" + }, + "first_id": { + "type": "string", + "description": "ID of the first file in the list for pagination" + }, + "last_id": { + "type": "string", + "description": "ID of the last file in the list for pagination" + }, + "object": { + "type": "string", + "const": "list", + "default": "list", + "description": "The object type, which is always \"list\"" + } + }, + "additionalProperties": false, + "required": [ + "data", + "has_more", + "first_id", + "last_id", + "object" + ], + "title": "ListOpenAIFileResponse", + "description": "Response for listing files in OpenAI Files API." + }, + "OpenAIFileObject": { + "type": "object", + "properties": { + "object": { + "type": "string", + "const": "file", + "default": "file", + "description": "The object type, which is always \"file\"" + }, + "id": { + "type": "string", + "description": "The file identifier, which can be referenced in the API endpoints" + }, + "bytes": { + "type": "integer", + "description": "The size of the file, in bytes" + }, + "created_at": { + "type": "integer", + "description": "The Unix timestamp (in seconds) for when the file was created" + }, + "expires_at": { + "type": "integer", + "description": "The Unix timestamp (in seconds) for when the file expires" + }, + "filename": { + "type": "string", + "description": "The name of the file" + }, + "purpose": { + "type": "string", + "enum": [ + "assistants", + "batch" + ], + "description": "The intended purpose of the file" + } + }, + "additionalProperties": false, + "required": [ + "object", + "id", + "bytes", + "created_at", + "expires_at", + "filename", + "purpose" + ], + "title": "OpenAIFileObject", + "description": "OpenAI File object as defined in the OpenAI Files API." + }, + "ExpiresAfter": { + "type": "object", + "properties": { + "anchor": { + "type": "string", + "const": "created_at" + }, + "seconds": { + "type": "integer" + } + }, + "additionalProperties": false, + "required": [ + "anchor", + "seconds" + ], + "title": "ExpiresAfter", + "description": "Control expiration of uploaded files.\nParams:\n - anchor, must be \"created_at\"\n - seconds, must be int between 3600 and 2592000 (1 hour to 30 days)" + }, + "OpenAIFileDeleteResponse": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "The file identifier that was deleted" + }, + "object": { + "type": "string", + "const": "file", + "default": "file", + "description": "The object type, which is always \"file\"" + }, + "deleted": { + "type": "boolean", + "description": "Whether the file was successfully deleted" + } + }, + "additionalProperties": false, + "required": [ + "id", + "object", + "deleted" + ], + "title": "OpenAIFileDeleteResponse", + "description": "Response for deleting a file in OpenAI Files API." + }, + "Response": { + "type": "object", + "title": "Response" + }, + "OpenAIModel": { + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "object": { + "type": "string", + "const": "model", + "default": "model" + }, + "created": { + "type": "integer" + }, + "owned_by": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "id", + "object", + "created", + "owned_by" + ], + "title": "OpenAIModel", + "description": "A model from OpenAI." + }, + "OpenAIListModelsResponse": { + "type": "object", + "properties": { + "data": { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIModel" + } + } + }, + "additionalProperties": false, + "required": [ + "data" + ], + "title": "OpenAIListModelsResponse" + }, + "RunModerationRequest": { + "type": "object", + "properties": { + "input": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + } + ], + "description": "Input (or inputs) to classify. Can be a single string, an array of strings, or an array of multi-modal input objects similar to other models." + }, + "model": { + "type": "string", + "description": "The content moderation model you would like to use." + } + }, + "additionalProperties": false, + "required": [ + "input", + "model" + ], + "title": "RunModerationRequest" + }, + "ModerationObject": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "The unique identifier for the moderation request." + }, + "model": { + "type": "string", + "description": "The model used to generate the moderation results." + }, + "results": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ModerationObjectResults" + }, + "description": "A list of moderation objects" + } + }, + "additionalProperties": false, + "required": [ + "id", + "model", + "results" + ], + "title": "ModerationObject", + "description": "A moderation object." + }, + "ModerationObjectResults": { + "type": "object", + "properties": { + "flagged": { + "type": "boolean", + "description": "Whether any of the below categories are flagged." + }, + "categories": { + "type": "object", + "additionalProperties": { + "type": "boolean" + }, + "description": "A list of the categories, and whether they are flagged or not." + }, + "category_applied_input_types": { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": "A list of the categories along with the input type(s) that the score applies to." + }, + "category_scores": { + "type": "object", + "additionalProperties": { + "type": "number" + }, + "description": "A list of the categories along with their scores as predicted by model." + }, + "user_message": { + "type": "string" + }, + "metadata": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + } + } + }, + "additionalProperties": false, + "required": [ + "flagged", + "metadata" + ], + "title": "ModerationObjectResults", + "description": "A moderation object." + }, + "ListOpenAIResponseObject": { + "type": "object", + "properties": { + "data": { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIResponseObjectWithInput" + }, + "description": "List of response objects with their input context" + }, + "has_more": { + "type": "boolean", + "description": "Whether there are more results available beyond this page" + }, + "first_id": { + "type": "string", + "description": "Identifier of the first item in this page" + }, + "last_id": { + "type": "string", + "description": "Identifier of the last item in this page" + }, + "object": { + "type": "string", + "const": "list", + "default": "list", + "description": "Object type identifier, always \"list\"" + } + }, + "additionalProperties": false, + "required": [ + "data", + "has_more", + "first_id", + "last_id", + "object" + ], + "title": "ListOpenAIResponseObject", + "description": "Paginated list of OpenAI response objects with navigation metadata." + }, + "OpenAIResponseAnnotationCitation": { + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "url_citation", + "default": "url_citation", + "description": "Annotation type identifier, always \"url_citation\"" + }, + "end_index": { + "type": "integer", + "description": "End position of the citation span in the content" + }, + "start_index": { + "type": "integer", + "description": "Start position of the citation span in the content" + }, + "title": { + "type": "string", + "description": "Title of the referenced web resource" + }, + "url": { + "type": "string", + "description": "URL of the referenced web resource" + } + }, + "additionalProperties": false, + "required": [ + "type", + "end_index", + "start_index", + "title", + "url" + ], + "title": "OpenAIResponseAnnotationCitation", + "description": "URL citation annotation for referencing external web resources." + }, + "OpenAIResponseAnnotationContainerFileCitation": { + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "container_file_citation", + "default": "container_file_citation" + }, + "container_id": { + "type": "string" + }, + "end_index": { + "type": "integer" + }, + "file_id": { + "type": "string" + }, + "filename": { + "type": "string" + }, + "start_index": { + "type": "integer" + } + }, + "additionalProperties": false, + "required": [ + "type", + "container_id", + "end_index", + "file_id", + "filename", + "start_index" + ], + "title": "OpenAIResponseAnnotationContainerFileCitation" + }, + "OpenAIResponseAnnotationFileCitation": { + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "file_citation", + "default": "file_citation", + "description": "Annotation type identifier, always \"file_citation\"" + }, + "file_id": { + "type": "string", + "description": "Unique identifier of the referenced file" + }, + "filename": { + "type": "string", + "description": "Name of the referenced file" + }, + "index": { + "type": "integer", + "description": "Position index of the citation within the content" + } + }, + "additionalProperties": false, + "required": [ + "type", + "file_id", + "filename", + "index" + ], + "title": "OpenAIResponseAnnotationFileCitation", + "description": "File citation annotation for referencing specific files in response content." + }, + "OpenAIResponseAnnotationFilePath": { + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "file_path", + "default": "file_path" + }, + "file_id": { + "type": "string" + }, + "index": { + "type": "integer" + } + }, + "additionalProperties": false, + "required": [ + "type", + "file_id", + "index" + ], + "title": "OpenAIResponseAnnotationFilePath" + }, + "OpenAIResponseAnnotations": { + "oneOf": [ + { + "$ref": "#/components/schemas/OpenAIResponseAnnotationFileCitation" + }, + { + "$ref": "#/components/schemas/OpenAIResponseAnnotationCitation" + }, + { + "$ref": "#/components/schemas/OpenAIResponseAnnotationContainerFileCitation" + }, + { + "$ref": "#/components/schemas/OpenAIResponseAnnotationFilePath" + } + ], + "discriminator": { + "propertyName": "type", + "mapping": { + "file_citation": "#/components/schemas/OpenAIResponseAnnotationFileCitation", + "url_citation": "#/components/schemas/OpenAIResponseAnnotationCitation", + "container_file_citation": "#/components/schemas/OpenAIResponseAnnotationContainerFileCitation", + "file_path": "#/components/schemas/OpenAIResponseAnnotationFilePath" + } + } + }, + "OpenAIResponseError": { + "type": "object", + "properties": { + "code": { + "type": "string", + "description": "Error code identifying the type of failure" + }, + "message": { + "type": "string", + "description": "Human-readable error message describing the failure" + } + }, + "additionalProperties": false, + "required": [ + "code", + "message" + ], + "title": "OpenAIResponseError", + "description": "Error details for failed OpenAI response requests." + }, + "OpenAIResponseInput": { + "oneOf": [ + { + "$ref": "#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall" + }, + { + "$ref": "#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall" + }, + { + "$ref": "#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall" + }, + { + "$ref": "#/components/schemas/OpenAIResponseInputFunctionToolCallOutput" + }, + { + "$ref": "#/components/schemas/OpenAIResponseMCPApprovalRequest" + }, + { + "$ref": "#/components/schemas/OpenAIResponseMCPApprovalResponse" + }, + { + "$ref": "#/components/schemas/OpenAIResponseMessage" + } + ] + }, + "OpenAIResponseInputFunctionToolCallOutput": { + "type": "object", + "properties": { + "call_id": { + "type": "string" + }, + "output": { + "type": "string" + }, + "type": { + "type": "string", + "const": "function_call_output", + "default": "function_call_output" + }, + "id": { + "type": "string" + }, + "status": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "call_id", + "output", + "type" + ], + "title": "OpenAIResponseInputFunctionToolCallOutput", + "description": "This represents the output of a function call that gets passed back to the model." + }, + "OpenAIResponseInputMessageContent": { + "oneOf": [ + { + "$ref": "#/components/schemas/OpenAIResponseInputMessageContentText" + }, + { + "$ref": "#/components/schemas/OpenAIResponseInputMessageContentImage" + } + ], + "discriminator": { + "propertyName": "type", + "mapping": { + "input_text": "#/components/schemas/OpenAIResponseInputMessageContentText", + "input_image": "#/components/schemas/OpenAIResponseInputMessageContentImage" + } + } + }, + "OpenAIResponseInputMessageContentImage": { + "type": "object", + "properties": { + "detail": { + "oneOf": [ + { + "type": "string", + "const": "low" + }, + { + "type": "string", + "const": "high" + }, + { + "type": "string", + "const": "auto" + } + ], + "default": "auto", + "description": "Level of detail for image processing, can be \"low\", \"high\", or \"auto\"" + }, + "type": { + "type": "string", + "const": "input_image", + "default": "input_image", + "description": "Content type identifier, always \"input_image\"" + }, + "image_url": { + "type": "string", + "description": "(Optional) URL of the image content" + } + }, + "additionalProperties": false, + "required": [ + "detail", + "type" + ], + "title": "OpenAIResponseInputMessageContentImage", + "description": "Image content for input messages in OpenAI response format." + }, + "OpenAIResponseInputMessageContentText": { + "type": "object", + "properties": { + "text": { + "type": "string", + "description": "The text content of the input message" + }, + "type": { + "type": "string", + "const": "input_text", + "default": "input_text", + "description": "Content type identifier, always \"input_text\"" + } + }, + "additionalProperties": false, + "required": [ + "text", + "type" + ], + "title": "OpenAIResponseInputMessageContentText", + "description": "Text content for input messages in OpenAI response format." + }, + "OpenAIResponseMCPApprovalRequest": { + "type": "object", + "properties": { + "arguments": { + "type": "string" + }, + "id": { + "type": "string" + }, + "name": { + "type": "string" + }, + "server_label": { + "type": "string" + }, + "type": { + "type": "string", + "const": "mcp_approval_request", + "default": "mcp_approval_request" + } + }, + "additionalProperties": false, + "required": [ + "arguments", + "id", + "name", + "server_label", + "type" + ], + "title": "OpenAIResponseMCPApprovalRequest", + "description": "A request for human approval of a tool invocation." + }, + "OpenAIResponseMCPApprovalResponse": { + "type": "object", + "properties": { + "approval_request_id": { + "type": "string" + }, + "approve": { + "type": "boolean" + }, + "type": { + "type": "string", + "const": "mcp_approval_response", + "default": "mcp_approval_response" + }, + "id": { + "type": "string" + }, + "reason": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "approval_request_id", + "approve", + "type" + ], + "title": "OpenAIResponseMCPApprovalResponse", + "description": "A response to an MCP approval request." + }, + "OpenAIResponseMessage": { + "type": "object", + "properties": { + "content": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIResponseInputMessageContent" + } + }, + { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIResponseOutputMessageContent" + } + } + ] + }, + "role": { + "oneOf": [ + { + "type": "string", + "const": "system" + }, + { + "type": "string", + "const": "developer" + }, + { + "type": "string", + "const": "user" + }, + { + "type": "string", + "const": "assistant" + } + ] + }, + "type": { + "type": "string", + "const": "message", + "default": "message" + }, + "id": { + "type": "string" + }, + "status": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "content", + "role", + "type" + ], + "title": "OpenAIResponseMessage", + "description": "Corresponds to the various Message types in the Responses API. They are all under one type because the Responses API gives them all the same \"type\" value, and there is no way to tell them apart in certain scenarios." + }, + "OpenAIResponseObjectWithInput": { + "type": "object", + "properties": { + "created_at": { + "type": "integer", + "description": "Unix timestamp when the response was created" + }, + "error": { + "$ref": "#/components/schemas/OpenAIResponseError", + "description": "(Optional) Error details if the response generation failed" + }, + "id": { + "type": "string", + "description": "Unique identifier for this response" + }, + "model": { + "type": "string", + "description": "Model identifier used for generation" + }, + "object": { + "type": "string", + "const": "response", + "default": "response", + "description": "Object type identifier, always \"response\"" + }, + "output": { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIResponseOutput" + }, + "description": "List of generated output items (messages, tool calls, etc.)" + }, + "parallel_tool_calls": { + "type": "boolean", + "default": false, + "description": "Whether tool calls can be executed in parallel" + }, + "previous_response_id": { + "type": "string", + "description": "(Optional) ID of the previous response in a conversation" + }, + "status": { + "type": "string", + "description": "Current status of the response generation" + }, + "temperature": { + "type": "number", + "description": "(Optional) Sampling temperature used for generation" + }, + "text": { + "$ref": "#/components/schemas/OpenAIResponseText", + "description": "Text formatting configuration for the response" + }, + "top_p": { + "type": "number", + "description": "(Optional) Nucleus sampling parameter used for generation" + }, + "truncation": { + "type": "string", + "description": "(Optional) Truncation strategy applied to the response" + }, + "input": { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIResponseInput" + }, + "description": "List of input items that led to this response" + } + }, + "additionalProperties": false, + "required": [ + "created_at", + "id", + "model", + "object", + "output", + "parallel_tool_calls", + "status", + "text", + "input" + ], + "title": "OpenAIResponseObjectWithInput", + "description": "OpenAI response object extended with input context information." + }, + "OpenAIResponseOutput": { + "oneOf": [ + { + "$ref": "#/components/schemas/OpenAIResponseMessage" + }, + { + "$ref": "#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall" + }, + { + "$ref": "#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall" + }, + { + "$ref": "#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall" + }, + { + "$ref": "#/components/schemas/OpenAIResponseOutputMessageMCPCall" + }, + { + "$ref": "#/components/schemas/OpenAIResponseOutputMessageMCPListTools" + }, + { + "$ref": "#/components/schemas/OpenAIResponseMCPApprovalRequest" + } + ], + "discriminator": { + "propertyName": "type", + "mapping": { + "message": "#/components/schemas/OpenAIResponseMessage", + "web_search_call": "#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall", + "file_search_call": "#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall", + "function_call": "#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall", + "mcp_call": "#/components/schemas/OpenAIResponseOutputMessageMCPCall", + "mcp_list_tools": "#/components/schemas/OpenAIResponseOutputMessageMCPListTools", + "mcp_approval_request": "#/components/schemas/OpenAIResponseMCPApprovalRequest" + } + } + }, + "OpenAIResponseOutputMessageContent": { + "type": "object", + "properties": { + "text": { + "type": "string" + }, + "type": { + "type": "string", + "const": "output_text", + "default": "output_text" + }, + "annotations": { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIResponseAnnotations" + } + } + }, + "additionalProperties": false, + "required": [ + "text", + "type", + "annotations" + ], + "title": "OpenAIResponseOutputMessageContentOutputText" + }, + "OpenAIResponseOutputMessageFileSearchToolCall": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "Unique identifier for this tool call" + }, + "queries": { + "type": "array", + "items": { + "type": "string" + }, + "description": "List of search queries executed" + }, + "status": { + "type": "string", + "description": "Current status of the file search operation" + }, + "type": { + "type": "string", + "const": "file_search_call", + "default": "file_search_call", + "description": "Tool call type identifier, always \"file_search_call\"" + }, + "results": { + "type": "array", + "items": { + "type": "object", + "properties": { + "attributes": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "(Optional) Key-value attributes associated with the file" + }, + "file_id": { + "type": "string", + "description": "Unique identifier of the file containing the result" + }, + "filename": { + "type": "string", + "description": "Name of the file containing the result" + }, + "score": { + "type": "number", + "description": "Relevance score for this search result (between 0 and 1)" + }, + "text": { + "type": "string", + "description": "Text content of the search result" + } + }, + "additionalProperties": false, + "required": [ + "attributes", + "file_id", + "filename", + "score", + "text" + ], + "title": "OpenAIResponseOutputMessageFileSearchToolCallResults", + "description": "Search results returned by the file search operation." + }, + "description": "(Optional) Search results returned by the file search operation" + } + }, + "additionalProperties": false, + "required": [ + "id", + "queries", + "status", + "type" + ], + "title": "OpenAIResponseOutputMessageFileSearchToolCall", + "description": "File search tool call output message for OpenAI responses." + }, + "OpenAIResponseOutputMessageFunctionToolCall": { + "type": "object", + "properties": { + "call_id": { + "type": "string", + "description": "Unique identifier for the function call" + }, + "name": { + "type": "string", + "description": "Name of the function being called" + }, + "arguments": { + "type": "string", + "description": "JSON string containing the function arguments" + }, + "type": { + "type": "string", + "const": "function_call", + "default": "function_call", + "description": "Tool call type identifier, always \"function_call\"" + }, + "id": { + "type": "string", + "description": "(Optional) Additional identifier for the tool call" + }, + "status": { + "type": "string", + "description": "(Optional) Current status of the function call execution" + } + }, + "additionalProperties": false, + "required": [ + "call_id", + "name", + "arguments", + "type" + ], + "title": "OpenAIResponseOutputMessageFunctionToolCall", + "description": "Function tool call output message for OpenAI responses." + }, + "OpenAIResponseOutputMessageMCPCall": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "Unique identifier for this MCP call" + }, + "type": { + "type": "string", + "const": "mcp_call", + "default": "mcp_call", + "description": "Tool call type identifier, always \"mcp_call\"" + }, + "arguments": { + "type": "string", + "description": "JSON string containing the MCP call arguments" + }, + "name": { + "type": "string", + "description": "Name of the MCP method being called" + }, + "server_label": { + "type": "string", + "description": "Label identifying the MCP server handling the call" + }, + "error": { + "type": "string", + "description": "(Optional) Error message if the MCP call failed" + }, + "output": { + "type": "string", + "description": "(Optional) Output result from the successful MCP call" + } + }, + "additionalProperties": false, + "required": [ + "id", + "type", + "arguments", + "name", + "server_label" + ], + "title": "OpenAIResponseOutputMessageMCPCall", + "description": "Model Context Protocol (MCP) call output message for OpenAI responses." + }, + "OpenAIResponseOutputMessageMCPListTools": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "Unique identifier for this MCP list tools operation" + }, + "type": { + "type": "string", + "const": "mcp_list_tools", + "default": "mcp_list_tools", + "description": "Tool call type identifier, always \"mcp_list_tools\"" + }, + "server_label": { + "type": "string", + "description": "Label identifying the MCP server providing the tools" + }, + "tools": { + "type": "array", + "items": { + "type": "object", + "properties": { + "input_schema": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "JSON schema defining the tool's input parameters" + }, + "name": { + "type": "string", + "description": "Name of the tool" + }, + "description": { + "type": "string", + "description": "(Optional) Description of what the tool does" + } + }, + "additionalProperties": false, + "required": [ + "input_schema", + "name" + ], + "title": "MCPListToolsTool", + "description": "Tool definition returned by MCP list tools operation." + }, + "description": "List of available tools provided by the MCP server" + } + }, + "additionalProperties": false, + "required": [ + "id", + "type", + "server_label", + "tools" + ], + "title": "OpenAIResponseOutputMessageMCPListTools", + "description": "MCP list tools output message containing available tools from an MCP server." + }, + "OpenAIResponseOutputMessageWebSearchToolCall": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "Unique identifier for this tool call" + }, + "status": { + "type": "string", + "description": "Current status of the web search operation" + }, + "type": { + "type": "string", + "const": "web_search_call", + "default": "web_search_call", + "description": "Tool call type identifier, always \"web_search_call\"" + } + }, + "additionalProperties": false, + "required": [ + "id", + "status", + "type" + ], + "title": "OpenAIResponseOutputMessageWebSearchToolCall", + "description": "Web search tool call output message for OpenAI responses." + }, + "OpenAIResponseText": { + "type": "object", + "properties": { + "format": { + "type": "object", + "properties": { + "type": { + "oneOf": [ + { + "type": "string", + "const": "text" + }, + { + "type": "string", + "const": "json_schema" + }, + { + "type": "string", + "const": "json_object" + } + ], + "description": "Must be \"text\", \"json_schema\", or \"json_object\" to identify the format type" + }, + "name": { + "type": "string", + "description": "The name of the response format. Only used for json_schema." + }, + "schema": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "The JSON schema the response should conform to. In a Python SDK, this is often a `pydantic` model. Only used for json_schema." + }, + "description": { + "type": "string", + "description": "(Optional) A description of the response format. Only used for json_schema." + }, + "strict": { + "type": "boolean", + "description": "(Optional) Whether to strictly enforce the JSON schema. If true, the response must match the schema exactly. Only used for json_schema." + } + }, + "additionalProperties": false, + "required": [ + "type" + ], + "description": "(Optional) Text format configuration specifying output format requirements" + } + }, + "additionalProperties": false, + "title": "OpenAIResponseText", + "description": "Text response configuration for OpenAI responses." + }, + "OpenAIResponseInputTool": { + "oneOf": [ + { + "$ref": "#/components/schemas/OpenAIResponseInputToolWebSearch" + }, + { + "$ref": "#/components/schemas/OpenAIResponseInputToolFileSearch" + }, + { + "$ref": "#/components/schemas/OpenAIResponseInputToolFunction" + }, + { + "$ref": "#/components/schemas/OpenAIResponseInputToolMCP" + } + ], + "discriminator": { + "propertyName": "type", + "mapping": { + "web_search": "#/components/schemas/OpenAIResponseInputToolWebSearch", + "file_search": "#/components/schemas/OpenAIResponseInputToolFileSearch", + "function": "#/components/schemas/OpenAIResponseInputToolFunction", + "mcp": "#/components/schemas/OpenAIResponseInputToolMCP" + } + } + }, + "OpenAIResponseInputToolFileSearch": { + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "file_search", + "default": "file_search", + "description": "Tool type identifier, always \"file_search\"" + }, + "vector_store_ids": { + "type": "array", + "items": { + "type": "string" + }, + "description": "List of vector store identifiers to search within" + }, + "filters": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "(Optional) Additional filters to apply to the search" + }, + "max_num_results": { + "type": "integer", + "default": 10, + "description": "(Optional) Maximum number of search results to return (1-50)" + }, + "ranking_options": { + "type": "object", + "properties": { + "ranker": { + "type": "string", + "description": "(Optional) Name of the ranking algorithm to use" + }, + "score_threshold": { + "type": "number", + "default": 0.0, + "description": "(Optional) Minimum relevance score threshold for results" + } + }, + "additionalProperties": false, + "description": "(Optional) Options for ranking and scoring search results" + } + }, + "additionalProperties": false, + "required": [ + "type", + "vector_store_ids" + ], + "title": "OpenAIResponseInputToolFileSearch", + "description": "File search tool configuration for OpenAI response inputs." + }, + "OpenAIResponseInputToolFunction": { + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "function", + "default": "function", + "description": "Tool type identifier, always \"function\"" + }, + "name": { + "type": "string", + "description": "Name of the function that can be called" + }, + "description": { + "type": "string", + "description": "(Optional) Description of what the function does" + }, + "parameters": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "(Optional) JSON schema defining the function's parameters" + }, + "strict": { + "type": "boolean", + "description": "(Optional) Whether to enforce strict parameter validation" + } + }, + "additionalProperties": false, + "required": [ + "type", + "name" + ], + "title": "OpenAIResponseInputToolFunction", + "description": "Function tool configuration for OpenAI response inputs." + }, + "OpenAIResponseInputToolMCP": { + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "mcp", + "default": "mcp", + "description": "Tool type identifier, always \"mcp\"" + }, + "server_label": { + "type": "string", + "description": "Label to identify this MCP server" + }, + "server_url": { + "type": "string", + "description": "URL endpoint of the MCP server" + }, + "headers": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "(Optional) HTTP headers to include when connecting to the server" + }, + "require_approval": { + "oneOf": [ + { + "type": "string", + "const": "always" + }, + { + "type": "string", + "const": "never" + }, + { + "type": "object", + "properties": { + "always": { + "type": "array", + "items": { + "type": "string" + }, + "description": "(Optional) List of tool names that always require approval" + }, + "never": { + "type": "array", + "items": { + "type": "string" + }, + "description": "(Optional) List of tool names that never require approval" + } + }, + "additionalProperties": false, + "title": "ApprovalFilter", + "description": "Filter configuration for MCP tool approval requirements." + } + ], + "default": "never", + "description": "Approval requirement for tool calls (\"always\", \"never\", or filter)" + }, + "allowed_tools": { + "oneOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "object", + "properties": { + "tool_names": { + "type": "array", + "items": { + "type": "string" + }, + "description": "(Optional) List of specific tool names that are allowed" + } + }, + "additionalProperties": false, + "title": "AllowedToolsFilter", + "description": "Filter configuration for restricting which MCP tools can be used." + } + ], + "description": "(Optional) Restriction on which tools can be used from this server" + } + }, + "additionalProperties": false, + "required": [ + "type", + "server_label", + "server_url", + "require_approval" + ], + "title": "OpenAIResponseInputToolMCP", + "description": "Model Context Protocol (MCP) tool configuration for OpenAI response inputs." + }, + "OpenAIResponseInputToolWebSearch": { + "type": "object", + "properties": { + "type": { + "oneOf": [ + { + "type": "string", + "const": "web_search" + }, + { + "type": "string", + "const": "web_search_preview" + }, + { + "type": "string", + "const": "web_search_preview_2025_03_11" + } + ], + "default": "web_search", + "description": "Web search tool type variant to use" + }, + "search_context_size": { + "type": "string", + "default": "medium", + "description": "(Optional) Size of search context, must be \"low\", \"medium\", or \"high\"" + } + }, + "additionalProperties": false, + "required": [ + "type" + ], + "title": "OpenAIResponseInputToolWebSearch", + "description": "Web search tool configuration for OpenAI response inputs." + }, + "CreateOpenaiResponseRequest": { + "type": "object", + "properties": { + "input": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIResponseInput" + } + } + ], + "description": "Input message(s) to create the response." + }, + "model": { + "type": "string", + "description": "The underlying LLM used for completions." + }, + "instructions": { + "type": "string" + }, + "previous_response_id": { + "type": "string", + "description": "(Optional) if specified, the new response will be a continuation of the previous response. This can be used to easily fork-off new responses from existing responses." + }, + "store": { + "type": "boolean" + }, + "stream": { + "type": "boolean" + }, + "temperature": { + "type": "number" + }, + "text": { + "$ref": "#/components/schemas/OpenAIResponseText" + }, + "tools": { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIResponseInputTool" + } + }, + "include": { + "type": "array", + "items": { + "type": "string" + }, + "description": "(Optional) Additional fields to include in the response." + }, + "max_infer_iters": { + "type": "integer" + } + }, + "additionalProperties": false, + "required": [ + "input", + "model" + ], + "title": "CreateOpenaiResponseRequest" + }, + "OpenAIResponseObject": { + "type": "object", + "properties": { + "created_at": { + "type": "integer", + "description": "Unix timestamp when the response was created" + }, + "error": { + "$ref": "#/components/schemas/OpenAIResponseError", + "description": "(Optional) Error details if the response generation failed" + }, + "id": { + "type": "string", + "description": "Unique identifier for this response" + }, + "model": { + "type": "string", + "description": "Model identifier used for generation" + }, + "object": { + "type": "string", + "const": "response", + "default": "response", + "description": "Object type identifier, always \"response\"" + }, + "output": { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIResponseOutput" + }, + "description": "List of generated output items (messages, tool calls, etc.)" + }, + "parallel_tool_calls": { + "type": "boolean", + "default": false, + "description": "Whether tool calls can be executed in parallel" + }, + "previous_response_id": { + "type": "string", + "description": "(Optional) ID of the previous response in a conversation" + }, + "status": { + "type": "string", + "description": "Current status of the response generation" + }, + "temperature": { + "type": "number", + "description": "(Optional) Sampling temperature used for generation" + }, + "text": { + "$ref": "#/components/schemas/OpenAIResponseText", + "description": "Text formatting configuration for the response" + }, + "top_p": { + "type": "number", + "description": "(Optional) Nucleus sampling parameter used for generation" + }, + "truncation": { + "type": "string", + "description": "(Optional) Truncation strategy applied to the response" + } + }, + "additionalProperties": false, + "required": [ + "created_at", + "id", + "model", + "object", + "output", + "parallel_tool_calls", + "status", + "text" + ], + "title": "OpenAIResponseObject", + "description": "Complete OpenAI response object containing generation results and metadata." + }, + "OpenAIResponseContentPartOutputText": { + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "output_text", + "default": "output_text" + }, + "text": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "type", + "text" + ], + "title": "OpenAIResponseContentPartOutputText" + }, + "OpenAIResponseContentPartRefusal": { + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "refusal", + "default": "refusal" + }, + "refusal": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "type", + "refusal" + ], + "title": "OpenAIResponseContentPartRefusal" + }, + "OpenAIResponseObjectStream": { + "oneOf": [ + { + "$ref": "#/components/schemas/OpenAIResponseObjectStreamResponseCreated" + }, + { + "$ref": "#/components/schemas/OpenAIResponseObjectStreamResponseOutputItemAdded" + }, + { + "$ref": "#/components/schemas/OpenAIResponseObjectStreamResponseOutputItemDone" + }, + { + "$ref": "#/components/schemas/OpenAIResponseObjectStreamResponseOutputTextDelta" + }, + { + "$ref": "#/components/schemas/OpenAIResponseObjectStreamResponseOutputTextDone" + }, + { + "$ref": "#/components/schemas/OpenAIResponseObjectStreamResponseFunctionCallArgumentsDelta" + }, + { + "$ref": "#/components/schemas/OpenAIResponseObjectStreamResponseFunctionCallArgumentsDone" + }, + { + "$ref": "#/components/schemas/OpenAIResponseObjectStreamResponseWebSearchCallInProgress" + }, + { + "$ref": "#/components/schemas/OpenAIResponseObjectStreamResponseWebSearchCallSearching" + }, + { + "$ref": "#/components/schemas/OpenAIResponseObjectStreamResponseWebSearchCallCompleted" + }, + { + "$ref": "#/components/schemas/OpenAIResponseObjectStreamResponseMcpListToolsInProgress" + }, + { + "$ref": "#/components/schemas/OpenAIResponseObjectStreamResponseMcpListToolsFailed" + }, + { + "$ref": "#/components/schemas/OpenAIResponseObjectStreamResponseMcpListToolsCompleted" + }, + { + "$ref": "#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallArgumentsDelta" + }, + { + "$ref": "#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallArgumentsDone" + }, + { + "$ref": "#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallInProgress" + }, + { + "$ref": "#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallFailed" + }, + { + "$ref": "#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallCompleted" + }, + { + "$ref": "#/components/schemas/OpenAIResponseObjectStreamResponseContentPartAdded" + }, + { + "$ref": "#/components/schemas/OpenAIResponseObjectStreamResponseContentPartDone" + }, + { + "$ref": "#/components/schemas/OpenAIResponseObjectStreamResponseCompleted" + } + ], + "discriminator": { + "propertyName": "type", + "mapping": { + "response.created": "#/components/schemas/OpenAIResponseObjectStreamResponseCreated", + "response.output_item.added": "#/components/schemas/OpenAIResponseObjectStreamResponseOutputItemAdded", + "response.output_item.done": "#/components/schemas/OpenAIResponseObjectStreamResponseOutputItemDone", + "response.output_text.delta": "#/components/schemas/OpenAIResponseObjectStreamResponseOutputTextDelta", + "response.output_text.done": "#/components/schemas/OpenAIResponseObjectStreamResponseOutputTextDone", + "response.function_call_arguments.delta": "#/components/schemas/OpenAIResponseObjectStreamResponseFunctionCallArgumentsDelta", + "response.function_call_arguments.done": "#/components/schemas/OpenAIResponseObjectStreamResponseFunctionCallArgumentsDone", + "response.web_search_call.in_progress": "#/components/schemas/OpenAIResponseObjectStreamResponseWebSearchCallInProgress", + "response.web_search_call.searching": "#/components/schemas/OpenAIResponseObjectStreamResponseWebSearchCallSearching", + "response.web_search_call.completed": "#/components/schemas/OpenAIResponseObjectStreamResponseWebSearchCallCompleted", + "response.mcp_list_tools.in_progress": "#/components/schemas/OpenAIResponseObjectStreamResponseMcpListToolsInProgress", + "response.mcp_list_tools.failed": "#/components/schemas/OpenAIResponseObjectStreamResponseMcpListToolsFailed", + "response.mcp_list_tools.completed": "#/components/schemas/OpenAIResponseObjectStreamResponseMcpListToolsCompleted", + "response.mcp_call.arguments.delta": "#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallArgumentsDelta", + "response.mcp_call.arguments.done": "#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallArgumentsDone", + "response.mcp_call.in_progress": "#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallInProgress", + "response.mcp_call.failed": "#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallFailed", + "response.mcp_call.completed": "#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallCompleted", + "response.content_part.added": "#/components/schemas/OpenAIResponseObjectStreamResponseContentPartAdded", + "response.content_part.done": "#/components/schemas/OpenAIResponseObjectStreamResponseContentPartDone", + "response.completed": "#/components/schemas/OpenAIResponseObjectStreamResponseCompleted" + } + } + }, + "OpenAIResponseObjectStreamResponseCompleted": { + "type": "object", + "properties": { + "response": { + "$ref": "#/components/schemas/OpenAIResponseObject", + "description": "The completed response object" + }, + "type": { + "type": "string", + "const": "response.completed", + "default": "response.completed", + "description": "Event type identifier, always \"response.completed\"" + } + }, + "additionalProperties": false, + "required": [ + "response", + "type" + ], + "title": "OpenAIResponseObjectStreamResponseCompleted", + "description": "Streaming event indicating a response has been completed." + }, + "OpenAIResponseObjectStreamResponseContentPartAdded": { + "type": "object", + "properties": { + "response_id": { + "type": "string", + "description": "Unique identifier of the response containing this content" + }, + "item_id": { + "type": "string", + "description": "Unique identifier of the output item containing this content part" + }, + "part": { + "oneOf": [ + { + "$ref": "#/components/schemas/OpenAIResponseContentPartOutputText" + }, + { + "$ref": "#/components/schemas/OpenAIResponseContentPartRefusal" + } + ], + "discriminator": { + "propertyName": "type", + "mapping": { + "output_text": "#/components/schemas/OpenAIResponseContentPartOutputText", + "refusal": "#/components/schemas/OpenAIResponseContentPartRefusal" + } + }, + "description": "The content part that was added" + }, + "sequence_number": { + "type": "integer", + "description": "Sequential number for ordering streaming events" + }, + "type": { + "type": "string", + "const": "response.content_part.added", + "default": "response.content_part.added", + "description": "Event type identifier, always \"response.content_part.added\"" + } + }, + "additionalProperties": false, + "required": [ + "response_id", + "item_id", + "part", + "sequence_number", + "type" + ], + "title": "OpenAIResponseObjectStreamResponseContentPartAdded", + "description": "Streaming event for when a new content part is added to a response item." + }, + "OpenAIResponseObjectStreamResponseContentPartDone": { + "type": "object", + "properties": { + "response_id": { + "type": "string", + "description": "Unique identifier of the response containing this content" + }, + "item_id": { + "type": "string", + "description": "Unique identifier of the output item containing this content part" + }, + "part": { + "oneOf": [ + { + "$ref": "#/components/schemas/OpenAIResponseContentPartOutputText" + }, + { + "$ref": "#/components/schemas/OpenAIResponseContentPartRefusal" + } + ], + "discriminator": { + "propertyName": "type", + "mapping": { + "output_text": "#/components/schemas/OpenAIResponseContentPartOutputText", + "refusal": "#/components/schemas/OpenAIResponseContentPartRefusal" + } + }, + "description": "The completed content part" + }, + "sequence_number": { + "type": "integer", + "description": "Sequential number for ordering streaming events" + }, + "type": { + "type": "string", + "const": "response.content_part.done", + "default": "response.content_part.done", + "description": "Event type identifier, always \"response.content_part.done\"" + } + }, + "additionalProperties": false, + "required": [ + "response_id", + "item_id", + "part", + "sequence_number", + "type" + ], + "title": "OpenAIResponseObjectStreamResponseContentPartDone", + "description": "Streaming event for when a content part is completed." + }, + "OpenAIResponseObjectStreamResponseCreated": { + "type": "object", + "properties": { + "response": { + "$ref": "#/components/schemas/OpenAIResponseObject", + "description": "The newly created response object" + }, + "type": { + "type": "string", + "const": "response.created", + "default": "response.created", + "description": "Event type identifier, always \"response.created\"" + } + }, + "additionalProperties": false, + "required": [ + "response", + "type" + ], + "title": "OpenAIResponseObjectStreamResponseCreated", + "description": "Streaming event indicating a new response has been created." + }, + "OpenAIResponseObjectStreamResponseFunctionCallArgumentsDelta": { + "type": "object", + "properties": { + "delta": { + "type": "string", + "description": "Incremental function call arguments being added" + }, + "item_id": { + "type": "string", + "description": "Unique identifier of the function call being updated" + }, + "output_index": { + "type": "integer", + "description": "Index position of the item in the output list" + }, + "sequence_number": { + "type": "integer", + "description": "Sequential number for ordering streaming events" + }, + "type": { + "type": "string", + "const": "response.function_call_arguments.delta", + "default": "response.function_call_arguments.delta", + "description": "Event type identifier, always \"response.function_call_arguments.delta\"" + } + }, + "additionalProperties": false, + "required": [ + "delta", + "item_id", + "output_index", + "sequence_number", + "type" + ], + "title": "OpenAIResponseObjectStreamResponseFunctionCallArgumentsDelta", + "description": "Streaming event for incremental function call argument updates." + }, + "OpenAIResponseObjectStreamResponseFunctionCallArgumentsDone": { + "type": "object", + "properties": { + "arguments": { + "type": "string", + "description": "Final complete arguments JSON string for the function call" + }, + "item_id": { + "type": "string", + "description": "Unique identifier of the completed function call" + }, + "output_index": { + "type": "integer", + "description": "Index position of the item in the output list" + }, + "sequence_number": { + "type": "integer", + "description": "Sequential number for ordering streaming events" + }, + "type": { + "type": "string", + "const": "response.function_call_arguments.done", + "default": "response.function_call_arguments.done", + "description": "Event type identifier, always \"response.function_call_arguments.done\"" + } + }, + "additionalProperties": false, + "required": [ + "arguments", + "item_id", + "output_index", + "sequence_number", + "type" + ], + "title": "OpenAIResponseObjectStreamResponseFunctionCallArgumentsDone", + "description": "Streaming event for when function call arguments are completed." + }, + "OpenAIResponseObjectStreamResponseMcpCallArgumentsDelta": { + "type": "object", + "properties": { + "delta": { + "type": "string" + }, + "item_id": { + "type": "string" + }, + "output_index": { + "type": "integer" + }, + "sequence_number": { + "type": "integer" + }, + "type": { + "type": "string", + "const": "response.mcp_call.arguments.delta", + "default": "response.mcp_call.arguments.delta" + } + }, + "additionalProperties": false, + "required": [ + "delta", + "item_id", + "output_index", + "sequence_number", + "type" + ], + "title": "OpenAIResponseObjectStreamResponseMcpCallArgumentsDelta" + }, + "OpenAIResponseObjectStreamResponseMcpCallArgumentsDone": { + "type": "object", + "properties": { + "arguments": { + "type": "string" + }, + "item_id": { + "type": "string" + }, + "output_index": { + "type": "integer" + }, + "sequence_number": { + "type": "integer" + }, + "type": { + "type": "string", + "const": "response.mcp_call.arguments.done", + "default": "response.mcp_call.arguments.done" + } + }, + "additionalProperties": false, + "required": [ + "arguments", + "item_id", + "output_index", + "sequence_number", + "type" + ], + "title": "OpenAIResponseObjectStreamResponseMcpCallArgumentsDone" + }, + "OpenAIResponseObjectStreamResponseMcpCallCompleted": { + "type": "object", + "properties": { + "sequence_number": { + "type": "integer", + "description": "Sequential number for ordering streaming events" + }, + "type": { + "type": "string", + "const": "response.mcp_call.completed", + "default": "response.mcp_call.completed", + "description": "Event type identifier, always \"response.mcp_call.completed\"" + } + }, + "additionalProperties": false, + "required": [ + "sequence_number", + "type" + ], + "title": "OpenAIResponseObjectStreamResponseMcpCallCompleted", + "description": "Streaming event for completed MCP calls." + }, + "OpenAIResponseObjectStreamResponseMcpCallFailed": { + "type": "object", + "properties": { + "sequence_number": { + "type": "integer", + "description": "Sequential number for ordering streaming events" + }, + "type": { + "type": "string", + "const": "response.mcp_call.failed", + "default": "response.mcp_call.failed", + "description": "Event type identifier, always \"response.mcp_call.failed\"" + } + }, + "additionalProperties": false, + "required": [ + "sequence_number", + "type" + ], + "title": "OpenAIResponseObjectStreamResponseMcpCallFailed", + "description": "Streaming event for failed MCP calls." + }, + "OpenAIResponseObjectStreamResponseMcpCallInProgress": { + "type": "object", + "properties": { + "item_id": { + "type": "string", + "description": "Unique identifier of the MCP call" + }, + "output_index": { + "type": "integer", + "description": "Index position of the item in the output list" + }, + "sequence_number": { + "type": "integer", + "description": "Sequential number for ordering streaming events" + }, + "type": { + "type": "string", + "const": "response.mcp_call.in_progress", + "default": "response.mcp_call.in_progress", + "description": "Event type identifier, always \"response.mcp_call.in_progress\"" + } + }, + "additionalProperties": false, + "required": [ + "item_id", + "output_index", + "sequence_number", + "type" + ], + "title": "OpenAIResponseObjectStreamResponseMcpCallInProgress", + "description": "Streaming event for MCP calls in progress." + }, + "OpenAIResponseObjectStreamResponseMcpListToolsCompleted": { + "type": "object", + "properties": { + "sequence_number": { + "type": "integer" + }, + "type": { + "type": "string", + "const": "response.mcp_list_tools.completed", + "default": "response.mcp_list_tools.completed" + } + }, + "additionalProperties": false, + "required": [ + "sequence_number", + "type" + ], + "title": "OpenAIResponseObjectStreamResponseMcpListToolsCompleted" + }, + "OpenAIResponseObjectStreamResponseMcpListToolsFailed": { + "type": "object", + "properties": { + "sequence_number": { + "type": "integer" + }, + "type": { + "type": "string", + "const": "response.mcp_list_tools.failed", + "default": "response.mcp_list_tools.failed" + } + }, + "additionalProperties": false, + "required": [ + "sequence_number", + "type" + ], + "title": "OpenAIResponseObjectStreamResponseMcpListToolsFailed" + }, + "OpenAIResponseObjectStreamResponseMcpListToolsInProgress": { + "type": "object", + "properties": { + "sequence_number": { + "type": "integer" + }, + "type": { + "type": "string", + "const": "response.mcp_list_tools.in_progress", + "default": "response.mcp_list_tools.in_progress" + } + }, + "additionalProperties": false, + "required": [ + "sequence_number", + "type" + ], + "title": "OpenAIResponseObjectStreamResponseMcpListToolsInProgress" + }, + "OpenAIResponseObjectStreamResponseOutputItemAdded": { + "type": "object", + "properties": { + "response_id": { + "type": "string", + "description": "Unique identifier of the response containing this output" + }, + "item": { + "oneOf": [ + { + "$ref": "#/components/schemas/OpenAIResponseMessage" + }, + { + "$ref": "#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall" + }, + { + "$ref": "#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall" + }, + { + "$ref": "#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall" + }, + { + "$ref": "#/components/schemas/OpenAIResponseOutputMessageMCPCall" + }, + { + "$ref": "#/components/schemas/OpenAIResponseOutputMessageMCPListTools" + }, + { + "$ref": "#/components/schemas/OpenAIResponseMCPApprovalRequest" + } + ], + "discriminator": { + "propertyName": "type", + "mapping": { + "message": "#/components/schemas/OpenAIResponseMessage", + "web_search_call": "#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall", + "file_search_call": "#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall", + "function_call": "#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall", + "mcp_call": "#/components/schemas/OpenAIResponseOutputMessageMCPCall", + "mcp_list_tools": "#/components/schemas/OpenAIResponseOutputMessageMCPListTools", + "mcp_approval_request": "#/components/schemas/OpenAIResponseMCPApprovalRequest" + } + }, + "description": "The output item that was added (message, tool call, etc.)" + }, + "output_index": { + "type": "integer", + "description": "Index position of this item in the output list" + }, + "sequence_number": { + "type": "integer", + "description": "Sequential number for ordering streaming events" + }, + "type": { + "type": "string", + "const": "response.output_item.added", + "default": "response.output_item.added", + "description": "Event type identifier, always \"response.output_item.added\"" + } + }, + "additionalProperties": false, + "required": [ + "response_id", + "item", + "output_index", + "sequence_number", + "type" + ], + "title": "OpenAIResponseObjectStreamResponseOutputItemAdded", + "description": "Streaming event for when a new output item is added to the response." + }, + "OpenAIResponseObjectStreamResponseOutputItemDone": { + "type": "object", + "properties": { + "response_id": { + "type": "string", + "description": "Unique identifier of the response containing this output" + }, + "item": { + "oneOf": [ + { + "$ref": "#/components/schemas/OpenAIResponseMessage" + }, + { + "$ref": "#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall" + }, + { + "$ref": "#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall" + }, + { + "$ref": "#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall" + }, + { + "$ref": "#/components/schemas/OpenAIResponseOutputMessageMCPCall" + }, + { + "$ref": "#/components/schemas/OpenAIResponseOutputMessageMCPListTools" + }, + { + "$ref": "#/components/schemas/OpenAIResponseMCPApprovalRequest" + } + ], + "discriminator": { + "propertyName": "type", + "mapping": { + "message": "#/components/schemas/OpenAIResponseMessage", + "web_search_call": "#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall", + "file_search_call": "#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall", + "function_call": "#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall", + "mcp_call": "#/components/schemas/OpenAIResponseOutputMessageMCPCall", + "mcp_list_tools": "#/components/schemas/OpenAIResponseOutputMessageMCPListTools", + "mcp_approval_request": "#/components/schemas/OpenAIResponseMCPApprovalRequest" + } + }, + "description": "The completed output item (message, tool call, etc.)" + }, + "output_index": { + "type": "integer", + "description": "Index position of this item in the output list" + }, + "sequence_number": { + "type": "integer", + "description": "Sequential number for ordering streaming events" + }, + "type": { + "type": "string", + "const": "response.output_item.done", + "default": "response.output_item.done", + "description": "Event type identifier, always \"response.output_item.done\"" + } + }, + "additionalProperties": false, + "required": [ + "response_id", + "item", + "output_index", + "sequence_number", + "type" + ], + "title": "OpenAIResponseObjectStreamResponseOutputItemDone", + "description": "Streaming event for when an output item is completed." + }, + "OpenAIResponseObjectStreamResponseOutputTextDelta": { + "type": "object", + "properties": { + "content_index": { + "type": "integer", + "description": "Index position within the text content" + }, + "delta": { + "type": "string", + "description": "Incremental text content being added" + }, + "item_id": { + "type": "string", + "description": "Unique identifier of the output item being updated" + }, + "output_index": { + "type": "integer", + "description": "Index position of the item in the output list" + }, + "sequence_number": { + "type": "integer", + "description": "Sequential number for ordering streaming events" + }, + "type": { + "type": "string", + "const": "response.output_text.delta", + "default": "response.output_text.delta", + "description": "Event type identifier, always \"response.output_text.delta\"" + } + }, + "additionalProperties": false, + "required": [ + "content_index", + "delta", + "item_id", + "output_index", + "sequence_number", + "type" + ], + "title": "OpenAIResponseObjectStreamResponseOutputTextDelta", + "description": "Streaming event for incremental text content updates." + }, + "OpenAIResponseObjectStreamResponseOutputTextDone": { + "type": "object", + "properties": { + "content_index": { + "type": "integer", + "description": "Index position within the text content" + }, + "text": { + "type": "string", + "description": "Final complete text content of the output item" + }, + "item_id": { + "type": "string", + "description": "Unique identifier of the completed output item" + }, + "output_index": { + "type": "integer", + "description": "Index position of the item in the output list" + }, + "sequence_number": { + "type": "integer", + "description": "Sequential number for ordering streaming events" + }, + "type": { + "type": "string", + "const": "response.output_text.done", + "default": "response.output_text.done", + "description": "Event type identifier, always \"response.output_text.done\"" + } + }, + "additionalProperties": false, + "required": [ + "content_index", + "text", + "item_id", + "output_index", + "sequence_number", + "type" + ], + "title": "OpenAIResponseObjectStreamResponseOutputTextDone", + "description": "Streaming event for when text output is completed." + }, + "OpenAIResponseObjectStreamResponseWebSearchCallCompleted": { + "type": "object", + "properties": { + "item_id": { + "type": "string", + "description": "Unique identifier of the completed web search call" + }, + "output_index": { + "type": "integer", + "description": "Index position of the item in the output list" + }, + "sequence_number": { + "type": "integer", + "description": "Sequential number for ordering streaming events" + }, + "type": { + "type": "string", + "const": "response.web_search_call.completed", + "default": "response.web_search_call.completed", + "description": "Event type identifier, always \"response.web_search_call.completed\"" + } + }, + "additionalProperties": false, + "required": [ + "item_id", + "output_index", + "sequence_number", + "type" + ], + "title": "OpenAIResponseObjectStreamResponseWebSearchCallCompleted", + "description": "Streaming event for completed web search calls." + }, + "OpenAIResponseObjectStreamResponseWebSearchCallInProgress": { + "type": "object", + "properties": { + "item_id": { + "type": "string", + "description": "Unique identifier of the web search call" + }, + "output_index": { + "type": "integer", + "description": "Index position of the item in the output list" + }, + "sequence_number": { + "type": "integer", + "description": "Sequential number for ordering streaming events" + }, + "type": { + "type": "string", + "const": "response.web_search_call.in_progress", + "default": "response.web_search_call.in_progress", + "description": "Event type identifier, always \"response.web_search_call.in_progress\"" + } + }, + "additionalProperties": false, + "required": [ + "item_id", + "output_index", + "sequence_number", + "type" + ], + "title": "OpenAIResponseObjectStreamResponseWebSearchCallInProgress", + "description": "Streaming event for web search calls in progress." + }, + "OpenAIResponseObjectStreamResponseWebSearchCallSearching": { + "type": "object", + "properties": { + "item_id": { + "type": "string" + }, + "output_index": { + "type": "integer" + }, + "sequence_number": { + "type": "integer" + }, + "type": { + "type": "string", + "const": "response.web_search_call.searching", + "default": "response.web_search_call.searching" + } + }, + "additionalProperties": false, + "required": [ + "item_id", + "output_index", + "sequence_number", + "type" + ], + "title": "OpenAIResponseObjectStreamResponseWebSearchCallSearching" + }, + "ListOpenaiResponsesRequest": { + "type": "object", + "properties": { + "after": { + "type": "string", + "description": "The ID of the last response to return." + }, + "limit": { + "type": "integer", + "description": "The number of responses to return." + }, + "model": { + "type": "string", + "description": "The model to filter responses by." + }, + "order": { + "type": "string", + "enum": [ + "asc", + "desc" + ], + "description": "The order to sort responses by when sorted by created_at ('asc' or 'desc')." + } + }, + "additionalProperties": false, + "title": "ListOpenaiResponsesRequest" + }, + "OpenAIDeleteResponseObject": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "Unique identifier of the deleted response" + }, + "object": { + "type": "string", + "const": "response", + "default": "response", + "description": "Object type identifier, always \"response\"" + }, + "deleted": { + "type": "boolean", + "default": true, + "description": "Deletion confirmation flag, always True" + } + }, + "additionalProperties": false, + "required": [ + "id", + "object", + "deleted" + ], + "title": "OpenAIDeleteResponseObject", + "description": "Response object confirming deletion of an OpenAI response." + }, + "ListOpenAIResponseInputItem": { + "type": "object", + "properties": { + "data": { + "type": "array", + "items": { + "$ref": "#/components/schemas/OpenAIResponseInput" + }, + "description": "List of input items" + }, + "object": { + "type": "string", + "const": "list", + "default": "list", + "description": "Object type identifier, always \"list\"" + } + }, + "additionalProperties": false, + "required": [ + "data", + "object" + ], + "title": "ListOpenAIResponseInputItem", + "description": "List container for OpenAI response input items." + }, + "VectorStoreFileCounts": { + "type": "object", + "properties": { + "completed": { + "type": "integer", + "description": "Number of files that have been successfully processed" + }, + "cancelled": { + "type": "integer", + "description": "Number of files that had their processing cancelled" + }, + "failed": { + "type": "integer", + "description": "Number of files that failed to process" + }, + "in_progress": { + "type": "integer", + "description": "Number of files currently being processed" + }, + "total": { + "type": "integer", + "description": "Total number of files in the vector store" + } + }, + "additionalProperties": false, + "required": [ + "completed", + "cancelled", + "failed", + "in_progress", + "total" + ], + "title": "VectorStoreFileCounts", + "description": "File processing status counts for a vector store." + }, + "VectorStoreListResponse": { + "type": "object", + "properties": { + "object": { + "type": "string", + "default": "list", + "description": "Object type identifier, always \"list\"" + }, + "data": { + "type": "array", + "items": { + "$ref": "#/components/schemas/VectorStoreObject" + }, + "description": "List of vector store objects" + }, + "first_id": { + "type": "string", + "description": "(Optional) ID of the first vector store in the list for pagination" + }, + "last_id": { + "type": "string", + "description": "(Optional) ID of the last vector store in the list for pagination" + }, + "has_more": { + "type": "boolean", + "default": false, + "description": "Whether there are more vector stores available beyond this page" + } + }, + "additionalProperties": false, + "required": [ + "object", + "data", + "has_more" + ], + "title": "VectorStoreListResponse", + "description": "Response from listing vector stores." + }, + "VectorStoreObject": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "Unique identifier for the vector store" + }, + "object": { + "type": "string", + "default": "vector_store", + "description": "Object type identifier, always \"vector_store\"" + }, + "created_at": { + "type": "integer", + "description": "Timestamp when the vector store was created" + }, + "name": { + "type": "string", + "description": "(Optional) Name of the vector store" + }, + "usage_bytes": { + "type": "integer", + "default": 0, + "description": "Storage space used by the vector store in bytes" + }, + "file_counts": { + "$ref": "#/components/schemas/VectorStoreFileCounts", + "description": "File processing status counts for the vector store" + }, + "status": { + "type": "string", + "default": "completed", + "description": "Current status of the vector store" + }, + "expires_after": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "(Optional) Expiration policy for the vector store" + }, + "expires_at": { + "type": "integer", + "description": "(Optional) Timestamp when the vector store will expire" + }, + "last_active_at": { + "type": "integer", + "description": "(Optional) Timestamp of last activity on the vector store" + }, + "metadata": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "Set of key-value pairs that can be attached to the vector store" + } + }, + "additionalProperties": false, + "required": [ + "id", + "object", + "created_at", + "usage_bytes", + "file_counts", + "status", + "metadata" + ], + "title": "VectorStoreObject", + "description": "OpenAI Vector Store object." + }, + "OpenaiCreateVectorStoreRequest": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "A name for the vector store." + }, + "file_ids": { + "type": "array", + "items": { + "type": "string" + }, + "description": "A list of File IDs that the vector store should use. Useful for tools like `file_search` that can access files." + }, + "expires_after": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "The expiration policy for a vector store." + }, + "chunking_strategy": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "The chunking strategy used to chunk the file(s). If not set, will use the `auto` strategy." + }, + "metadata": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "Set of 16 key-value pairs that can be attached to an object." + }, + "embedding_model": { + "type": "string", + "description": "The embedding model to use for this vector store." + }, + "embedding_dimension": { + "type": "integer", + "description": "The dimension of the embedding vectors (default: 384)." + }, + "provider_id": { + "type": "string", + "description": "The ID of the provider to use for this vector store." + } + }, + "additionalProperties": false, + "title": "OpenaiCreateVectorStoreRequest" + }, + "OpenaiUpdateVectorStoreRequest": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The name of the vector store." + }, + "expires_after": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "The expiration policy for a vector store." + }, + "metadata": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "Set of 16 key-value pairs that can be attached to an object." + } + }, + "additionalProperties": false, + "title": "OpenaiUpdateVectorStoreRequest" + }, + "VectorStoreDeleteResponse": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "Unique identifier of the deleted vector store" + }, + "object": { + "type": "string", + "default": "vector_store.deleted", + "description": "Object type identifier for the deletion response" + }, + "deleted": { + "type": "boolean", + "default": true, + "description": "Whether the deletion operation was successful" + } + }, + "additionalProperties": false, + "required": [ + "id", + "object", + "deleted" + ], + "title": "VectorStoreDeleteResponse", + "description": "Response from deleting a vector store." + }, + "VectorStoreChunkingStrategy": { + "oneOf": [ + { + "$ref": "#/components/schemas/VectorStoreChunkingStrategyAuto" + }, + { + "$ref": "#/components/schemas/VectorStoreChunkingStrategyStatic" + } + ], + "discriminator": { + "propertyName": "type", + "mapping": { + "auto": "#/components/schemas/VectorStoreChunkingStrategyAuto", + "static": "#/components/schemas/VectorStoreChunkingStrategyStatic" + } + } + }, + "VectorStoreChunkingStrategyAuto": { + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "auto", + "default": "auto", + "description": "Strategy type, always \"auto\" for automatic chunking" + } + }, + "additionalProperties": false, + "required": [ + "type" + ], + "title": "VectorStoreChunkingStrategyAuto", + "description": "Automatic chunking strategy for vector store files." + }, + "VectorStoreChunkingStrategyStatic": { + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "static", + "default": "static", + "description": "Strategy type, always \"static\" for static chunking" + }, + "static": { + "$ref": "#/components/schemas/VectorStoreChunkingStrategyStaticConfig", + "description": "Configuration parameters for the static chunking strategy" + } + }, + "additionalProperties": false, + "required": [ + "type", + "static" + ], + "title": "VectorStoreChunkingStrategyStatic", + "description": "Static chunking strategy with configurable parameters." + }, + "VectorStoreChunkingStrategyStaticConfig": { + "type": "object", + "properties": { + "chunk_overlap_tokens": { + "type": "integer", + "default": 400, + "description": "Number of tokens to overlap between adjacent chunks" + }, + "max_chunk_size_tokens": { + "type": "integer", + "default": 800, + "description": "Maximum number of tokens per chunk, must be between 100 and 4096" + } + }, + "additionalProperties": false, + "required": [ + "chunk_overlap_tokens", + "max_chunk_size_tokens" + ], + "title": "VectorStoreChunkingStrategyStaticConfig", + "description": "Configuration for static chunking strategy." + }, + "OpenaiCreateVectorStoreFileBatchRequest": { + "type": "object", + "properties": { + "file_ids": { + "type": "array", + "items": { + "type": "string" + }, + "description": "A list of File IDs that the vector store should use." + }, + "attributes": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "(Optional) Key-value attributes to store with the files." + }, + "chunking_strategy": { + "$ref": "#/components/schemas/VectorStoreChunkingStrategy", + "description": "(Optional) The chunking strategy used to chunk the file(s). Defaults to auto." + } + }, + "additionalProperties": false, + "required": [ + "file_ids" + ], + "title": "OpenaiCreateVectorStoreFileBatchRequest" + }, + "VectorStoreFileBatchObject": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "Unique identifier for the file batch" + }, + "object": { + "type": "string", + "default": "vector_store.file_batch", + "description": "Object type identifier, always \"vector_store.file_batch\"" + }, + "created_at": { + "type": "integer", + "description": "Timestamp when the file batch was created" + }, + "vector_store_id": { + "type": "string", + "description": "ID of the vector store containing the file batch" + }, + "status": { + "$ref": "#/components/schemas/VectorStoreFileStatus", + "description": "Current processing status of the file batch" + }, + "file_counts": { + "$ref": "#/components/schemas/VectorStoreFileCounts", + "description": "File processing status counts for the batch" + } + }, + "additionalProperties": false, + "required": [ + "id", + "object", + "created_at", + "vector_store_id", + "status", + "file_counts" + ], + "title": "VectorStoreFileBatchObject", + "description": "OpenAI Vector Store File Batch object." + }, + "VectorStoreFileStatus": { + "oneOf": [ + { + "type": "string", + "const": "completed" + }, + { + "type": "string", + "const": "in_progress" + }, + { + "type": "string", + "const": "cancelled" + }, + { + "type": "string", + "const": "failed" + } + ] + }, + "VectorStoreFileLastError": { + "type": "object", + "properties": { + "code": { + "oneOf": [ + { + "type": "string", + "const": "server_error" + }, + { + "type": "string", + "const": "rate_limit_exceeded" + } + ], + "description": "Error code indicating the type of failure" + }, + "message": { + "type": "string", + "description": "Human-readable error message describing the failure" + } + }, + "additionalProperties": false, + "required": [ + "code", + "message" + ], + "title": "VectorStoreFileLastError", + "description": "Error information for failed vector store file processing." + }, + "VectorStoreFileObject": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "Unique identifier for the file" + }, + "object": { + "type": "string", + "default": "vector_store.file", + "description": "Object type identifier, always \"vector_store.file\"" + }, + "attributes": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "Key-value attributes associated with the file" + }, + "chunking_strategy": { + "oneOf": [ + { + "$ref": "#/components/schemas/VectorStoreChunkingStrategyAuto" + }, + { + "$ref": "#/components/schemas/VectorStoreChunkingStrategyStatic" + } + ], + "discriminator": { + "propertyName": "type", + "mapping": { + "auto": "#/components/schemas/VectorStoreChunkingStrategyAuto", + "static": "#/components/schemas/VectorStoreChunkingStrategyStatic" + } + }, + "description": "Strategy used for splitting the file into chunks" + }, + "created_at": { + "type": "integer", + "description": "Timestamp when the file was added to the vector store" + }, + "last_error": { + "$ref": "#/components/schemas/VectorStoreFileLastError", + "description": "(Optional) Error information if file processing failed" + }, + "status": { + "$ref": "#/components/schemas/VectorStoreFileStatus", + "description": "Current processing status of the file" + }, + "usage_bytes": { + "type": "integer", + "default": 0, + "description": "Storage space used by this file in bytes" + }, + "vector_store_id": { + "type": "string", + "description": "ID of the vector store containing this file" + } + }, + "additionalProperties": false, + "required": [ + "id", + "object", + "attributes", + "chunking_strategy", + "created_at", + "status", + "usage_bytes", + "vector_store_id" + ], + "title": "VectorStoreFileObject", + "description": "OpenAI Vector Store File object." + }, + "VectorStoreFilesListInBatchResponse": { + "type": "object", + "properties": { + "object": { + "type": "string", + "default": "list", + "description": "Object type identifier, always \"list\"" + }, + "data": { + "type": "array", + "items": { + "$ref": "#/components/schemas/VectorStoreFileObject" + }, + "description": "List of vector store file objects in the batch" + }, + "first_id": { + "type": "string", + "description": "(Optional) ID of the first file in the list for pagination" + }, + "last_id": { + "type": "string", + "description": "(Optional) ID of the last file in the list for pagination" + }, + "has_more": { + "type": "boolean", + "default": false, + "description": "Whether there are more files available beyond this page" + } + }, + "additionalProperties": false, + "required": [ + "object", + "data", + "has_more" + ], + "title": "VectorStoreFilesListInBatchResponse", + "description": "Response from listing files in a vector store file batch." + }, + "VectorStoreListFilesResponse": { + "type": "object", + "properties": { + "object": { + "type": "string", + "default": "list", + "description": "Object type identifier, always \"list\"" + }, + "data": { + "type": "array", + "items": { + "$ref": "#/components/schemas/VectorStoreFileObject" + }, + "description": "List of vector store file objects" + }, + "first_id": { + "type": "string", + "description": "(Optional) ID of the first file in the list for pagination" + }, + "last_id": { + "type": "string", + "description": "(Optional) ID of the last file in the list for pagination" + }, + "has_more": { + "type": "boolean", + "default": false, + "description": "Whether there are more files available beyond this page" + } + }, + "additionalProperties": false, + "required": [ + "object", + "data", + "has_more" + ], + "title": "VectorStoreListFilesResponse", + "description": "Response from listing files in a vector store." + }, + "OpenaiAttachFileToVectorStoreRequest": { + "type": "object", + "properties": { + "file_id": { + "type": "string", + "description": "The ID of the file to attach to the vector store." + }, + "attributes": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "The key-value attributes stored with the file, which can be used for filtering." + }, + "chunking_strategy": { + "$ref": "#/components/schemas/VectorStoreChunkingStrategy", + "description": "The chunking strategy to use for the file." + } + }, + "additionalProperties": false, + "required": [ + "file_id" + ], + "title": "OpenaiAttachFileToVectorStoreRequest" + }, + "OpenaiUpdateVectorStoreFileRequest": { + "type": "object", + "properties": { + "attributes": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "The updated key-value attributes to store with the file." + } + }, + "additionalProperties": false, + "required": [ + "attributes" + ], + "title": "OpenaiUpdateVectorStoreFileRequest" + }, + "VectorStoreFileDeleteResponse": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "Unique identifier of the deleted file" + }, + "object": { + "type": "string", + "default": "vector_store.file.deleted", + "description": "Object type identifier for the deletion response" + }, + "deleted": { + "type": "boolean", + "default": true, + "description": "Whether the deletion operation was successful" + } + }, + "additionalProperties": false, + "required": [ + "id", + "object", + "deleted" + ], + "title": "VectorStoreFileDeleteResponse", + "description": "Response from deleting a vector store file." + }, + "VectorStoreContent": { + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "text", + "description": "Content type, currently only \"text\" is supported" + }, + "text": { + "type": "string", + "description": "The actual text content" + } + }, + "additionalProperties": false, + "required": [ + "type", + "text" + ], + "title": "VectorStoreContent", + "description": "Content item from a vector store file or search result." + }, + "VectorStoreFileContentsResponse": { + "type": "object", + "properties": { + "file_id": { + "type": "string", + "description": "Unique identifier for the file" + }, + "filename": { + "type": "string", + "description": "Name of the file" + }, + "attributes": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "Key-value attributes associated with the file" + }, + "content": { + "type": "array", + "items": { + "$ref": "#/components/schemas/VectorStoreContent" + }, + "description": "List of content items from the file" + } + }, + "additionalProperties": false, + "required": [ + "file_id", + "filename", + "attributes", + "content" + ], + "title": "VectorStoreFileContentsResponse", + "description": "Response from retrieving the contents of a vector store file." + }, + "OpenaiSearchVectorStoreRequest": { + "type": "object", + "properties": { + "query": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + } + ], + "description": "The query string or array for performing the search." + }, + "filters": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "Filters based on file attributes to narrow the search results." + }, + "max_num_results": { + "type": "integer", + "description": "Maximum number of results to return (1 to 50 inclusive, default 10)." + }, + "ranking_options": { + "type": "object", + "properties": { + "ranker": { + "type": "string", + "description": "(Optional) Name of the ranking algorithm to use" + }, + "score_threshold": { + "type": "number", + "default": 0.0, + "description": "(Optional) Minimum relevance score threshold for results" + } + }, + "additionalProperties": false, + "description": "Ranking options for fine-tuning the search results." + }, + "rewrite_query": { + "type": "boolean", + "description": "Whether to rewrite the natural language query for vector search (default false)" + }, + "search_mode": { + "type": "string", + "description": "The search mode to use - \"keyword\", \"vector\", or \"hybrid\" (default \"vector\")" + } + }, + "additionalProperties": false, + "required": [ + "query" + ], + "title": "OpenaiSearchVectorStoreRequest" + }, + "VectorStoreSearchResponse": { + "type": "object", + "properties": { + "file_id": { + "type": "string", + "description": "Unique identifier of the file containing the result" + }, + "filename": { + "type": "string", + "description": "Name of the file containing the result" + }, + "score": { + "type": "number", + "description": "Relevance score for this search result" + }, + "attributes": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "boolean" + } + ] + }, + "description": "(Optional) Key-value attributes associated with the file" + }, + "content": { + "type": "array", + "items": { + "$ref": "#/components/schemas/VectorStoreContent" + }, + "description": "List of content items matching the search query" + } + }, + "additionalProperties": false, + "required": [ + "file_id", + "filename", + "score", + "content" + ], + "title": "VectorStoreSearchResponse", + "description": "Response from searching a vector store." + }, + "VectorStoreSearchResponsePage": { + "type": "object", + "properties": { + "object": { + "type": "string", + "default": "vector_store.search_results.page", + "description": "Object type identifier for the search results page" + }, + "search_query": { + "type": "string", + "description": "The original search query that was executed" + }, + "data": { + "type": "array", + "items": { + "$ref": "#/components/schemas/VectorStoreSearchResponse" + }, + "description": "List of search result objects" + }, + "has_more": { + "type": "boolean", + "default": false, + "description": "Whether there are more results available beyond this page" + }, + "next_page": { + "type": "string", + "description": "(Optional) Token for retrieving the next page of results" + } + }, + "additionalProperties": false, + "required": [ + "object", + "search_query", + "data", + "has_more" + ], + "title": "VectorStoreSearchResponsePage", + "description": "Paginated response from searching a vector store." + }, "Checkpoint": { "type": "object", "properties": { @@ -6302,13 +13437,34 @@ "description": "", "x-displayName": "Llama Stack Evaluation API for running evaluations on model and agent candidates." }, + { + "name": "Files", + "description": "" + }, + { + "name": "Inference", + "description": "This API provides the raw interface to the underlying models. Two kinds of models are supported:\n- LLM models: these models generate \"raw\" and \"chat\" (conversational) completions.\n- Embedding models: these models generate embeddings to be used for semantic search.", + "x-displayName": "Llama Stack Inference API for generating completions, chat completions, and embeddings." + }, + { + "name": "Models", + "description": "" + }, { "name": "PostTraining (Coming Soon)", "description": "" }, + { + "name": "Safety", + "description": "" + }, { "name": "Telemetry", "description": "" + }, + { + "name": "VectorIO", + "description": "" } ], "x-tagGroups": [ @@ -6320,8 +13476,13 @@ "DatasetIO", "Datasets", "Eval", + "Files", + "Inference", + "Models", "PostTraining (Coming Soon)", - "Telemetry" + "Safety", + "Telemetry", + "VectorIO" ] } ] diff --git a/docs/static/deprecated-llama-stack-spec.yaml b/docs/static/deprecated-llama-stack-spec.yaml index ee8458c4e..d2e595b5d 100644 --- a/docs/static/deprecated-llama-stack-spec.yaml +++ b/docs/static/deprecated-llama-stack-spec.yaml @@ -1012,6 +1012,1387 @@ paths: schema: type: string deprecated: true + /v1/openai/v1/chat/completions: + get: + responses: + '200': + description: A ListOpenAIChatCompletionResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/ListOpenAIChatCompletionResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Inference + summary: List all chat completions. + description: List all chat completions. + parameters: + - name: after + in: query + description: >- + The ID of the last chat completion to return. + required: false + schema: + type: string + - name: limit + in: query + description: >- + The maximum number of chat completions to return. + required: false + schema: + type: integer + - name: model + in: query + description: The model to filter by. + required: false + schema: + type: string + - name: order + in: query + description: >- + The order to sort the chat completions by: "asc" or "desc". Defaults to + "desc". + required: false + schema: + $ref: '#/components/schemas/Order' + deprecated: true + post: + responses: + '200': + description: An OpenAIChatCompletion. + content: + application/json: + schema: + oneOf: + - $ref: '#/components/schemas/OpenAIChatCompletion' + - $ref: '#/components/schemas/OpenAIChatCompletionChunk' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Inference + summary: >- + Generate an OpenAI-compatible chat completion for the given messages using + the specified model. + description: >- + Generate an OpenAI-compatible chat completion for the given messages using + the specified model. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenaiChatCompletionRequest' + required: true + deprecated: true + /v1/openai/v1/chat/completions/{completion_id}: + get: + responses: + '200': + description: A OpenAICompletionWithInputMessages. + content: + application/json: + schema: + $ref: '#/components/schemas/OpenAICompletionWithInputMessages' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Inference + summary: Describe a chat completion by its ID. + description: Describe a chat completion by its ID. + parameters: + - name: completion_id + in: path + description: ID of the chat completion. + required: true + schema: + type: string + deprecated: true + /v1/openai/v1/completions: + post: + responses: + '200': + description: An OpenAICompletion. + content: + application/json: + schema: + $ref: '#/components/schemas/OpenAICompletion' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Inference + summary: >- + Generate an OpenAI-compatible completion for the given prompt using the specified + model. + description: >- + Generate an OpenAI-compatible completion for the given prompt using the specified + model. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenaiCompletionRequest' + required: true + deprecated: true + /v1/openai/v1/embeddings: + post: + responses: + '200': + description: >- + An OpenAIEmbeddingsResponse containing the embeddings. + content: + application/json: + schema: + $ref: '#/components/schemas/OpenAIEmbeddingsResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Inference + summary: >- + Generate OpenAI-compatible embeddings for the given input using the specified + model. + description: >- + Generate OpenAI-compatible embeddings for the given input using the specified + model. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenaiEmbeddingsRequest' + required: true + deprecated: true + /v1/openai/v1/files: + get: + responses: + '200': + description: >- + An ListOpenAIFileResponse containing the list of files. + content: + application/json: + schema: + $ref: '#/components/schemas/ListOpenAIFileResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Files + summary: >- + Returns a list of files that belong to the user's organization. + description: >- + Returns a list of files that belong to the user's organization. + parameters: + - name: after + in: query + description: >- + A cursor for use in pagination. `after` is an object ID that defines your + place in the list. For instance, if you make a list request and receive + 100 objects, ending with obj_foo, your subsequent call can include after=obj_foo + in order to fetch the next page of the list. + required: false + schema: + type: string + - name: limit + in: query + description: >- + A limit on the number of objects to be returned. Limit can range between + 1 and 10,000, and the default is 10,000. + required: false + schema: + type: integer + - name: order + in: query + description: >- + Sort order by the `created_at` timestamp of the objects. `asc` for ascending + order and `desc` for descending order. + required: false + schema: + $ref: '#/components/schemas/Order' + - name: purpose + in: query + description: >- + Only return files with the given purpose. + required: false + schema: + $ref: '#/components/schemas/OpenAIFilePurpose' + deprecated: true + post: + responses: + '200': + description: >- + An OpenAIFileObject representing the uploaded file. + content: + application/json: + schema: + $ref: '#/components/schemas/OpenAIFileObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Files + summary: >- + Upload a file that can be used across various endpoints. + description: >- + Upload a file that can be used across various endpoints. + + The file upload should be a multipart form request with: + + - file: The File object (not file name) to be uploaded. + + - purpose: The intended purpose of the uploaded file. + + - expires_after: Optional form values describing expiration for the file. + parameters: [] + requestBody: + content: + multipart/form-data: + schema: + type: object + properties: + file: + type: string + format: binary + purpose: + $ref: '#/components/schemas/OpenAIFilePurpose' + expires_after: + $ref: '#/components/schemas/ExpiresAfter' + required: + - file + - purpose + required: true + deprecated: true + /v1/openai/v1/files/{file_id}: + get: + responses: + '200': + description: >- + An OpenAIFileObject containing file information. + content: + application/json: + schema: + $ref: '#/components/schemas/OpenAIFileObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Files + summary: >- + Returns information about a specific file. + description: >- + Returns information about a specific file. + parameters: + - name: file_id + in: path + description: >- + The ID of the file to use for this request. + required: true + schema: + type: string + deprecated: true + delete: + responses: + '200': + description: >- + An OpenAIFileDeleteResponse indicating successful deletion. + content: + application/json: + schema: + $ref: '#/components/schemas/OpenAIFileDeleteResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Files + summary: Delete a file. + description: Delete a file. + parameters: + - name: file_id + in: path + description: >- + The ID of the file to use for this request. + required: true + schema: + type: string + deprecated: true + /v1/openai/v1/files/{file_id}/content: + get: + responses: + '200': + description: >- + The raw file content as a binary response. + content: + application/json: + schema: + $ref: '#/components/schemas/Response' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Files + summary: >- + Returns the contents of the specified file. + description: >- + Returns the contents of the specified file. + parameters: + - name: file_id + in: path + description: >- + The ID of the file to use for this request. + required: true + schema: + type: string + deprecated: true + /v1/openai/v1/models: + get: + responses: + '200': + description: A OpenAIListModelsResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/OpenAIListModelsResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Models + summary: List models using the OpenAI API. + description: List models using the OpenAI API. + parameters: [] + deprecated: true + /v1/openai/v1/moderations: + post: + responses: + '200': + description: A moderation object. + content: + application/json: + schema: + $ref: '#/components/schemas/ModerationObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Safety + summary: >- + Classifies if text and/or image inputs are potentially harmful. + description: >- + Classifies if text and/or image inputs are potentially harmful. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/RunModerationRequest' + required: true + deprecated: true + /v1/openai/v1/responses: + get: + responses: + '200': + description: A ListOpenAIResponseObject. + content: + application/json: + schema: + $ref: '#/components/schemas/ListOpenAIResponseObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Agents + summary: List all OpenAI responses. + description: List all OpenAI responses. + parameters: + - name: after + in: query + description: The ID of the last response to return. + required: false + schema: + type: string + - name: limit + in: query + description: The number of responses to return. + required: false + schema: + type: integer + - name: model + in: query + description: The model to filter responses by. + required: false + schema: + type: string + - name: order + in: query + description: >- + The order to sort responses by when sorted by created_at ('asc' or 'desc'). + required: false + schema: + $ref: '#/components/schemas/Order' + deprecated: true + post: + responses: + '200': + description: A ListOpenAIResponseObject. + content: + application/json: + schema: + $ref: '#/components/schemas/ListOpenAIResponseObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Agents + summary: List all OpenAI responses. + description: List all OpenAI responses. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ListOpenaiResponsesRequest' + required: true + deprecated: true + /v1/openai/v1/responses/{response_id}: + get: + responses: + '200': + description: An OpenAIResponseObject. + content: + application/json: + schema: + $ref: '#/components/schemas/OpenAIResponseObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Agents + summary: Retrieve an OpenAI response by its ID. + description: Retrieve an OpenAI response by its ID. + parameters: + - name: response_id + in: path + description: >- + The ID of the OpenAI response to retrieve. + required: true + schema: + type: string + deprecated: true + delete: + responses: + '200': + description: An OpenAIDeleteResponseObject + content: + application/json: + schema: + $ref: '#/components/schemas/OpenAIDeleteResponseObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Agents + summary: Delete an OpenAI response by its ID. + description: Delete an OpenAI response by its ID. + parameters: + - name: response_id + in: path + description: The ID of the OpenAI response to delete. + required: true + schema: + type: string + deprecated: true + /v1/openai/v1/responses/{response_id}/input_items: + get: + responses: + '200': + description: An ListOpenAIResponseInputItem. + content: + application/json: + schema: + $ref: '#/components/schemas/ListOpenAIResponseInputItem' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Agents + summary: >- + List input items for a given OpenAI response. + description: >- + List input items for a given OpenAI response. + parameters: + - name: response_id + in: path + description: >- + The ID of the response to retrieve input items for. + required: true + schema: + type: string + - name: after + in: query + description: >- + An item ID to list items after, used for pagination. + required: false + schema: + type: string + - name: before + in: query + description: >- + An item ID to list items before, used for pagination. + required: false + schema: + type: string + - name: include + in: query + description: >- + Additional fields to include in the response. + required: false + schema: + type: array + items: + type: string + - name: limit + in: query + description: >- + A limit on the number of objects to be returned. Limit can range between + 1 and 100, and the default is 20. + required: false + schema: + type: integer + - name: order + in: query + description: >- + The order to return the input items in. Default is desc. + required: false + schema: + $ref: '#/components/schemas/Order' + deprecated: true + /v1/openai/v1/vector_stores: + get: + responses: + '200': + description: >- + A VectorStoreListResponse containing the list of vector stores. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreListResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Returns a list of vector stores. + description: Returns a list of vector stores. + parameters: + - name: limit + in: query + description: >- + A limit on the number of objects to be returned. Limit can range between + 1 and 100, and the default is 20. + required: false + schema: + type: integer + - name: order + in: query + description: >- + Sort order by the `created_at` timestamp of the objects. `asc` for ascending + order and `desc` for descending order. + required: false + schema: + type: string + - name: after + in: query + description: >- + A cursor for use in pagination. `after` is an object ID that defines your + place in the list. + required: false + schema: + type: string + - name: before + in: query + description: >- + A cursor for use in pagination. `before` is an object ID that defines + your place in the list. + required: false + schema: + type: string + deprecated: true + post: + responses: + '200': + description: >- + A VectorStoreObject representing the created vector store. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Creates a vector store. + description: Creates a vector store. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenaiCreateVectorStoreRequest' + required: true + deprecated: true + /v1/openai/v1/vector_stores/{vector_store_id}: + get: + responses: + '200': + description: >- + A VectorStoreObject representing the vector store. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Retrieves a vector store. + description: Retrieves a vector store. + parameters: + - name: vector_store_id + in: path + description: The ID of the vector store to retrieve. + required: true + schema: + type: string + deprecated: true + post: + responses: + '200': + description: >- + A VectorStoreObject representing the updated vector store. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Updates a vector store. + description: Updates a vector store. + parameters: + - name: vector_store_id + in: path + description: The ID of the vector store to update. + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenaiUpdateVectorStoreRequest' + required: true + deprecated: true + delete: + responses: + '200': + description: >- + A VectorStoreDeleteResponse indicating the deletion status. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreDeleteResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Delete a vector store. + description: Delete a vector store. + parameters: + - name: vector_store_id + in: path + description: The ID of the vector store to delete. + required: true + schema: + type: string + deprecated: true + /v1/openai/v1/vector_stores/{vector_store_id}/file_batches: + post: + responses: + '200': + description: >- + A VectorStoreFileBatchObject representing the created file batch. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreFileBatchObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Create a vector store file batch. + description: Create a vector store file batch. + parameters: + - name: vector_store_id + in: path + description: >- + The ID of the vector store to create the file batch for. + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenaiCreateVectorStoreFileBatchRequest' + required: true + deprecated: true + /v1/openai/v1/vector_stores/{vector_store_id}/file_batches/{batch_id}: + get: + responses: + '200': + description: >- + A VectorStoreFileBatchObject representing the file batch. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreFileBatchObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Retrieve a vector store file batch. + description: Retrieve a vector store file batch. + parameters: + - name: batch_id + in: path + description: The ID of the file batch to retrieve. + required: true + schema: + type: string + - name: vector_store_id + in: path + description: >- + The ID of the vector store containing the file batch. + required: true + schema: + type: string + deprecated: true + /v1/openai/v1/vector_stores/{vector_store_id}/file_batches/{batch_id}/cancel: + post: + responses: + '200': + description: >- + A VectorStoreFileBatchObject representing the cancelled file batch. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreFileBatchObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Cancels a vector store file batch. + description: Cancels a vector store file batch. + parameters: + - name: batch_id + in: path + description: The ID of the file batch to cancel. + required: true + schema: + type: string + - name: vector_store_id + in: path + description: >- + The ID of the vector store containing the file batch. + required: true + schema: + type: string + deprecated: true + /v1/openai/v1/vector_stores/{vector_store_id}/file_batches/{batch_id}/files: + get: + responses: + '200': + description: >- + A VectorStoreFilesListInBatchResponse containing the list of files in + the batch. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreFilesListInBatchResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: >- + Returns a list of vector store files in a batch. + description: >- + Returns a list of vector store files in a batch. + parameters: + - name: batch_id + in: path + description: >- + The ID of the file batch to list files from. + required: true + schema: + type: string + - name: vector_store_id + in: path + description: >- + The ID of the vector store containing the file batch. + required: true + schema: + type: string + - name: after + in: query + description: >- + A cursor for use in pagination. `after` is an object ID that defines your + place in the list. + required: false + schema: + type: string + - name: before + in: query + description: >- + A cursor for use in pagination. `before` is an object ID that defines + your place in the list. + required: false + schema: + type: string + - name: filter + in: query + description: >- + Filter by file status. One of in_progress, completed, failed, cancelled. + required: false + schema: + type: string + - name: limit + in: query + description: >- + A limit on the number of objects to be returned. Limit can range between + 1 and 100, and the default is 20. + required: false + schema: + type: integer + - name: order + in: query + description: >- + Sort order by the `created_at` timestamp of the objects. `asc` for ascending + order and `desc` for descending order. + required: false + schema: + type: string + deprecated: true + /v1/openai/v1/vector_stores/{vector_store_id}/files: + get: + responses: + '200': + description: >- + A VectorStoreListFilesResponse containing the list of files. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreListFilesResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: List files in a vector store. + description: List files in a vector store. + parameters: + - name: vector_store_id + in: path + description: >- + The ID of the vector store to list files from. + required: true + schema: + type: string + - name: limit + in: query + description: >- + (Optional) A limit on the number of objects to be returned. Limit can + range between 1 and 100, and the default is 20. + required: false + schema: + type: integer + - name: order + in: query + description: >- + (Optional) Sort order by the `created_at` timestamp of the objects. `asc` + for ascending order and `desc` for descending order. + required: false + schema: + type: string + - name: after + in: query + description: >- + (Optional) A cursor for use in pagination. `after` is an object ID that + defines your place in the list. + required: false + schema: + type: string + - name: before + in: query + description: >- + (Optional) A cursor for use in pagination. `before` is an object ID that + defines your place in the list. + required: false + schema: + type: string + - name: filter + in: query + description: >- + (Optional) Filter by file status to only return files with the specified + status. + required: false + schema: + $ref: '#/components/schemas/VectorStoreFileStatus' + deprecated: true + post: + responses: + '200': + description: >- + A VectorStoreFileObject representing the attached file. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreFileObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Attach a file to a vector store. + description: Attach a file to a vector store. + parameters: + - name: vector_store_id + in: path + description: >- + The ID of the vector store to attach the file to. + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenaiAttachFileToVectorStoreRequest' + required: true + deprecated: true + /v1/openai/v1/vector_stores/{vector_store_id}/files/{file_id}: + get: + responses: + '200': + description: >- + A VectorStoreFileObject representing the file. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreFileObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Retrieves a vector store file. + description: Retrieves a vector store file. + parameters: + - name: vector_store_id + in: path + description: >- + The ID of the vector store containing the file to retrieve. + required: true + schema: + type: string + - name: file_id + in: path + description: The ID of the file to retrieve. + required: true + schema: + type: string + deprecated: true + post: + responses: + '200': + description: >- + A VectorStoreFileObject representing the updated file. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreFileObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Updates a vector store file. + description: Updates a vector store file. + parameters: + - name: vector_store_id + in: path + description: >- + The ID of the vector store containing the file to update. + required: true + schema: + type: string + - name: file_id + in: path + description: The ID of the file to update. + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenaiUpdateVectorStoreFileRequest' + required: true + deprecated: true + delete: + responses: + '200': + description: >- + A VectorStoreFileDeleteResponse indicating the deletion status. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreFileDeleteResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Delete a vector store file. + description: Delete a vector store file. + parameters: + - name: vector_store_id + in: path + description: >- + The ID of the vector store containing the file to delete. + required: true + schema: + type: string + - name: file_id + in: path + description: The ID of the file to delete. + required: true + schema: + type: string + deprecated: true + /v1/openai/v1/vector_stores/{vector_store_id}/files/{file_id}/content: + get: + responses: + '200': + description: >- + A list of InterleavedContent representing the file contents. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreFileContentsResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: >- + Retrieves the contents of a vector store file. + description: >- + Retrieves the contents of a vector store file. + parameters: + - name: vector_store_id + in: path + description: >- + The ID of the vector store containing the file to retrieve. + required: true + schema: + type: string + - name: file_id + in: path + description: The ID of the file to retrieve. + required: true + schema: + type: string + deprecated: true + /v1/openai/v1/vector_stores/{vector_store_id}/search: + post: + responses: + '200': + description: >- + A VectorStoreSearchResponse containing the search results. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreSearchResponsePage' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Search for chunks in a vector store. + description: >- + Search for chunks in a vector store. + + Searches a vector store for relevant chunks based on a query and optional + file attribute filters. + parameters: + - name: vector_store_id + in: path + description: The ID of the vector store to search. + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenaiSearchVectorStoreRequest' + required: true + deprecated: true /v1/post-training/job/artifacts: get: responses: @@ -3608,6 +4989,4035 @@ components: title: Job description: >- A job execution instance with status tracking. + Order: + type: string + enum: + - asc + - desc + title: Order + description: Sort order for paginated responses. + ListOpenAIChatCompletionResponse: + type: object + properties: + data: + type: array + items: + type: object + properties: + id: + type: string + description: The ID of the chat completion + choices: + type: array + items: + $ref: '#/components/schemas/OpenAIChoice' + description: List of choices + object: + type: string + const: chat.completion + default: chat.completion + description: >- + The object type, which will be "chat.completion" + created: + type: integer + description: >- + The Unix timestamp in seconds when the chat completion was created + model: + type: string + description: >- + The model that was used to generate the chat completion + input_messages: + type: array + items: + $ref: '#/components/schemas/OpenAIMessageParam' + additionalProperties: false + required: + - id + - choices + - object + - created + - model + - input_messages + title: OpenAICompletionWithInputMessages + description: >- + List of chat completion objects with their input messages + has_more: + type: boolean + description: >- + Whether there are more completions available beyond this list + first_id: + type: string + description: ID of the first completion in this list + last_id: + type: string + description: ID of the last completion in this list + object: + type: string + const: list + default: list + description: >- + Must be "list" to identify this as a list response + additionalProperties: false + required: + - data + - has_more + - first_id + - last_id + - object + title: ListOpenAIChatCompletionResponse + description: >- + Response from listing OpenAI-compatible chat completions. + OpenAIAssistantMessageParam: + type: object + properties: + role: + type: string + const: assistant + default: assistant + description: >- + Must be "assistant" to identify this as the model's response + content: + oneOf: + - type: string + - type: array + items: + $ref: '#/components/schemas/OpenAIChatCompletionContentPartTextParam' + description: The content of the model's response + name: + type: string + description: >- + (Optional) The name of the assistant message participant. + tool_calls: + type: array + items: + $ref: '#/components/schemas/OpenAIChatCompletionToolCall' + description: >- + List of tool calls. Each tool call is an OpenAIChatCompletionToolCall + object. + additionalProperties: false + required: + - role + title: OpenAIAssistantMessageParam + description: >- + A message containing the model's (assistant) response in an OpenAI-compatible + chat completion request. + "OpenAIChatCompletionContentPartImageParam": + type: object + properties: + type: + type: string + const: image_url + default: image_url + description: >- + Must be "image_url" to identify this as image content + image_url: + $ref: '#/components/schemas/OpenAIImageURL' + description: >- + Image URL specification and processing details + additionalProperties: false + required: + - type + - image_url + title: >- + OpenAIChatCompletionContentPartImageParam + description: >- + Image content part for OpenAI-compatible chat completion messages. + OpenAIChatCompletionContentPartParam: + oneOf: + - $ref: '#/components/schemas/OpenAIChatCompletionContentPartTextParam' + - $ref: '#/components/schemas/OpenAIChatCompletionContentPartImageParam' + - $ref: '#/components/schemas/OpenAIFile' + discriminator: + propertyName: type + mapping: + text: '#/components/schemas/OpenAIChatCompletionContentPartTextParam' + image_url: '#/components/schemas/OpenAIChatCompletionContentPartImageParam' + file: '#/components/schemas/OpenAIFile' + OpenAIChatCompletionContentPartTextParam: + type: object + properties: + type: + type: string + const: text + default: text + description: >- + Must be "text" to identify this as text content + text: + type: string + description: The text content of the message + additionalProperties: false + required: + - type + - text + title: OpenAIChatCompletionContentPartTextParam + description: >- + Text content part for OpenAI-compatible chat completion messages. + OpenAIChatCompletionToolCall: + type: object + properties: + index: + type: integer + description: >- + (Optional) Index of the tool call in the list + id: + type: string + description: >- + (Optional) Unique identifier for the tool call + type: + type: string + const: function + default: function + description: >- + Must be "function" to identify this as a function call + function: + $ref: '#/components/schemas/OpenAIChatCompletionToolCallFunction' + description: (Optional) Function call details + additionalProperties: false + required: + - type + title: OpenAIChatCompletionToolCall + description: >- + Tool call specification for OpenAI-compatible chat completion responses. + OpenAIChatCompletionToolCallFunction: + type: object + properties: + name: + type: string + description: (Optional) Name of the function to call + arguments: + type: string + description: >- + (Optional) Arguments to pass to the function as a JSON string + additionalProperties: false + title: OpenAIChatCompletionToolCallFunction + description: >- + Function call details for OpenAI-compatible tool calls. + OpenAIChoice: + type: object + properties: + message: + oneOf: + - $ref: '#/components/schemas/OpenAIUserMessageParam' + - $ref: '#/components/schemas/OpenAISystemMessageParam' + - $ref: '#/components/schemas/OpenAIAssistantMessageParam' + - $ref: '#/components/schemas/OpenAIToolMessageParam' + - $ref: '#/components/schemas/OpenAIDeveloperMessageParam' + discriminator: + propertyName: role + mapping: + user: '#/components/schemas/OpenAIUserMessageParam' + system: '#/components/schemas/OpenAISystemMessageParam' + assistant: '#/components/schemas/OpenAIAssistantMessageParam' + tool: '#/components/schemas/OpenAIToolMessageParam' + developer: '#/components/schemas/OpenAIDeveloperMessageParam' + description: The message from the model + finish_reason: + type: string + description: The reason the model stopped generating + index: + type: integer + description: The index of the choice + logprobs: + $ref: '#/components/schemas/OpenAIChoiceLogprobs' + description: >- + (Optional) The log probabilities for the tokens in the message + additionalProperties: false + required: + - message + - finish_reason + - index + title: OpenAIChoice + description: >- + A choice from an OpenAI-compatible chat completion response. + OpenAIChoiceLogprobs: + type: object + properties: + content: + type: array + items: + $ref: '#/components/schemas/OpenAITokenLogProb' + description: >- + (Optional) The log probabilities for the tokens in the message + refusal: + type: array + items: + $ref: '#/components/schemas/OpenAITokenLogProb' + description: >- + (Optional) The log probabilities for the tokens in the message + additionalProperties: false + title: OpenAIChoiceLogprobs + description: >- + The log probabilities for the tokens in the message from an OpenAI-compatible + chat completion response. + OpenAIDeveloperMessageParam: + type: object + properties: + role: + type: string + const: developer + default: developer + description: >- + Must be "developer" to identify this as a developer message + content: + oneOf: + - type: string + - type: array + items: + $ref: '#/components/schemas/OpenAIChatCompletionContentPartTextParam' + description: The content of the developer message + name: + type: string + description: >- + (Optional) The name of the developer message participant. + additionalProperties: false + required: + - role + - content + title: OpenAIDeveloperMessageParam + description: >- + A message from the developer in an OpenAI-compatible chat completion request. + OpenAIFile: + type: object + properties: + type: + type: string + const: file + default: file + file: + $ref: '#/components/schemas/OpenAIFileFile' + additionalProperties: false + required: + - type + - file + title: OpenAIFile + OpenAIFileFile: + type: object + properties: + file_data: + type: string + file_id: + type: string + filename: + type: string + additionalProperties: false + title: OpenAIFileFile + OpenAIImageURL: + type: object + properties: + url: + type: string + description: >- + URL of the image to include in the message + detail: + type: string + description: >- + (Optional) Level of detail for image processing. Can be "low", "high", + or "auto" + additionalProperties: false + required: + - url + title: OpenAIImageURL + description: >- + Image URL specification for OpenAI-compatible chat completion messages. + OpenAIMessageParam: + oneOf: + - $ref: '#/components/schemas/OpenAIUserMessageParam' + - $ref: '#/components/schemas/OpenAISystemMessageParam' + - $ref: '#/components/schemas/OpenAIAssistantMessageParam' + - $ref: '#/components/schemas/OpenAIToolMessageParam' + - $ref: '#/components/schemas/OpenAIDeveloperMessageParam' + discriminator: + propertyName: role + mapping: + user: '#/components/schemas/OpenAIUserMessageParam' + system: '#/components/schemas/OpenAISystemMessageParam' + assistant: '#/components/schemas/OpenAIAssistantMessageParam' + tool: '#/components/schemas/OpenAIToolMessageParam' + developer: '#/components/schemas/OpenAIDeveloperMessageParam' + OpenAISystemMessageParam: + type: object + properties: + role: + type: string + const: system + default: system + description: >- + Must be "system" to identify this as a system message + content: + oneOf: + - type: string + - type: array + items: + $ref: '#/components/schemas/OpenAIChatCompletionContentPartTextParam' + description: >- + The content of the "system prompt". If multiple system messages are provided, + they are concatenated. The underlying Llama Stack code may also add other + system messages (for example, for formatting tool definitions). + name: + type: string + description: >- + (Optional) The name of the system message participant. + additionalProperties: false + required: + - role + - content + title: OpenAISystemMessageParam + description: >- + A system message providing instructions or context to the model. + OpenAITokenLogProb: + type: object + properties: + token: + type: string + bytes: + type: array + items: + type: integer + logprob: + type: number + top_logprobs: + type: array + items: + $ref: '#/components/schemas/OpenAITopLogProb' + additionalProperties: false + required: + - token + - logprob + - top_logprobs + title: OpenAITokenLogProb + description: >- + The log probability for a token from an OpenAI-compatible chat completion + response. + OpenAIToolMessageParam: + type: object + properties: + role: + type: string + const: tool + default: tool + description: >- + Must be "tool" to identify this as a tool response + tool_call_id: + type: string + description: >- + Unique identifier for the tool call this response is for + content: + oneOf: + - type: string + - type: array + items: + $ref: '#/components/schemas/OpenAIChatCompletionContentPartTextParam' + description: The response content from the tool + additionalProperties: false + required: + - role + - tool_call_id + - content + title: OpenAIToolMessageParam + description: >- + A message representing the result of a tool invocation in an OpenAI-compatible + chat completion request. + OpenAITopLogProb: + type: object + properties: + token: + type: string + bytes: + type: array + items: + type: integer + logprob: + type: number + additionalProperties: false + required: + - token + - logprob + title: OpenAITopLogProb + description: >- + The top log probability for a token from an OpenAI-compatible chat completion + response. + OpenAIUserMessageParam: + type: object + properties: + role: + type: string + const: user + default: user + description: >- + Must be "user" to identify this as a user message + content: + oneOf: + - type: string + - type: array + items: + $ref: '#/components/schemas/OpenAIChatCompletionContentPartParam' + description: >- + The content of the message, which can include text and other media + name: + type: string + description: >- + (Optional) The name of the user message participant. + additionalProperties: false + required: + - role + - content + title: OpenAIUserMessageParam + description: >- + A message from the user in an OpenAI-compatible chat completion request. + OpenAIJSONSchema: + type: object + properties: + name: + type: string + description: Name of the schema + description: + type: string + description: (Optional) Description of the schema + strict: + type: boolean + description: >- + (Optional) Whether to enforce strict adherence to the schema + schema: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: (Optional) The JSON schema definition + additionalProperties: false + required: + - name + title: OpenAIJSONSchema + description: >- + JSON schema specification for OpenAI-compatible structured response format. + OpenAIResponseFormatJSONObject: + type: object + properties: + type: + type: string + const: json_object + default: json_object + description: >- + Must be "json_object" to indicate generic JSON object response format + additionalProperties: false + required: + - type + title: OpenAIResponseFormatJSONObject + description: >- + JSON object response format for OpenAI-compatible chat completion requests. + OpenAIResponseFormatJSONSchema: + type: object + properties: + type: + type: string + const: json_schema + default: json_schema + description: >- + Must be "json_schema" to indicate structured JSON response format + json_schema: + $ref: '#/components/schemas/OpenAIJSONSchema' + description: >- + The JSON schema specification for the response + additionalProperties: false + required: + - type + - json_schema + title: OpenAIResponseFormatJSONSchema + description: >- + JSON schema response format for OpenAI-compatible chat completion requests. + OpenAIResponseFormatParam: + oneOf: + - $ref: '#/components/schemas/OpenAIResponseFormatText' + - $ref: '#/components/schemas/OpenAIResponseFormatJSONSchema' + - $ref: '#/components/schemas/OpenAIResponseFormatJSONObject' + discriminator: + propertyName: type + mapping: + text: '#/components/schemas/OpenAIResponseFormatText' + json_schema: '#/components/schemas/OpenAIResponseFormatJSONSchema' + json_object: '#/components/schemas/OpenAIResponseFormatJSONObject' + OpenAIResponseFormatText: + type: object + properties: + type: + type: string + const: text + default: text + description: >- + Must be "text" to indicate plain text response format + additionalProperties: false + required: + - type + title: OpenAIResponseFormatText + description: >- + Text response format for OpenAI-compatible chat completion requests. + OpenaiChatCompletionRequest: + type: object + properties: + model: + type: string + description: >- + The identifier of the model to use. The model must be registered with + Llama Stack and available via the /models endpoint. + messages: + type: array + items: + $ref: '#/components/schemas/OpenAIMessageParam' + description: List of messages in the conversation. + frequency_penalty: + type: number + description: >- + (Optional) The penalty for repeated tokens. + function_call: + oneOf: + - type: string + - type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: (Optional) The function call to use. + functions: + type: array + items: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: (Optional) List of functions to use. + logit_bias: + type: object + additionalProperties: + type: number + description: (Optional) The logit bias to use. + logprobs: + type: boolean + description: (Optional) The log probabilities to use. + max_completion_tokens: + type: integer + description: >- + (Optional) The maximum number of tokens to generate. + max_tokens: + type: integer + description: >- + (Optional) The maximum number of tokens to generate. + n: + type: integer + description: >- + (Optional) The number of completions to generate. + parallel_tool_calls: + type: boolean + description: >- + (Optional) Whether to parallelize tool calls. + presence_penalty: + type: number + description: >- + (Optional) The penalty for repeated tokens. + response_format: + $ref: '#/components/schemas/OpenAIResponseFormatParam' + description: (Optional) The response format to use. + seed: + type: integer + description: (Optional) The seed to use. + stop: + oneOf: + - type: string + - type: array + items: + type: string + description: (Optional) The stop tokens to use. + stream: + type: boolean + description: >- + (Optional) Whether to stream the response. + stream_options: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: (Optional) The stream options to use. + temperature: + type: number + description: (Optional) The temperature to use. + tool_choice: + oneOf: + - type: string + - type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: (Optional) The tool choice to use. + tools: + type: array + items: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: (Optional) The tools to use. + top_logprobs: + type: integer + description: >- + (Optional) The top log probabilities to use. + top_p: + type: number + description: (Optional) The top p to use. + user: + type: string + description: (Optional) The user to use. + additionalProperties: false + required: + - model + - messages + title: OpenaiChatCompletionRequest + OpenAIChatCompletion: + type: object + properties: + id: + type: string + description: The ID of the chat completion + choices: + type: array + items: + $ref: '#/components/schemas/OpenAIChoice' + description: List of choices + object: + type: string + const: chat.completion + default: chat.completion + description: >- + The object type, which will be "chat.completion" + created: + type: integer + description: >- + The Unix timestamp in seconds when the chat completion was created + model: + type: string + description: >- + The model that was used to generate the chat completion + additionalProperties: false + required: + - id + - choices + - object + - created + - model + title: OpenAIChatCompletion + description: >- + Response from an OpenAI-compatible chat completion request. + OpenAIChatCompletionChunk: + type: object + properties: + id: + type: string + description: The ID of the chat completion + choices: + type: array + items: + $ref: '#/components/schemas/OpenAIChunkChoice' + description: List of choices + object: + type: string + const: chat.completion.chunk + default: chat.completion.chunk + description: >- + The object type, which will be "chat.completion.chunk" + created: + type: integer + description: >- + The Unix timestamp in seconds when the chat completion was created + model: + type: string + description: >- + The model that was used to generate the chat completion + additionalProperties: false + required: + - id + - choices + - object + - created + - model + title: OpenAIChatCompletionChunk + description: >- + Chunk from a streaming response to an OpenAI-compatible chat completion request. + OpenAIChoiceDelta: + type: object + properties: + content: + type: string + description: (Optional) The content of the delta + refusal: + type: string + description: (Optional) The refusal of the delta + role: + type: string + description: (Optional) The role of the delta + tool_calls: + type: array + items: + $ref: '#/components/schemas/OpenAIChatCompletionToolCall' + description: (Optional) The tool calls of the delta + additionalProperties: false + title: OpenAIChoiceDelta + description: >- + A delta from an OpenAI-compatible chat completion streaming response. + OpenAIChunkChoice: + type: object + properties: + delta: + $ref: '#/components/schemas/OpenAIChoiceDelta' + description: The delta from the chunk + finish_reason: + type: string + description: The reason the model stopped generating + index: + type: integer + description: The index of the choice + logprobs: + $ref: '#/components/schemas/OpenAIChoiceLogprobs' + description: >- + (Optional) The log probabilities for the tokens in the message + additionalProperties: false + required: + - delta + - finish_reason + - index + title: OpenAIChunkChoice + description: >- + A chunk choice from an OpenAI-compatible chat completion streaming response. + OpenAICompletionWithInputMessages: + type: object + properties: + id: + type: string + description: The ID of the chat completion + choices: + type: array + items: + $ref: '#/components/schemas/OpenAIChoice' + description: List of choices + object: + type: string + const: chat.completion + default: chat.completion + description: >- + The object type, which will be "chat.completion" + created: + type: integer + description: >- + The Unix timestamp in seconds when the chat completion was created + model: + type: string + description: >- + The model that was used to generate the chat completion + input_messages: + type: array + items: + $ref: '#/components/schemas/OpenAIMessageParam' + additionalProperties: false + required: + - id + - choices + - object + - created + - model + - input_messages + title: OpenAICompletionWithInputMessages + OpenaiCompletionRequest: + type: object + properties: + model: + type: string + description: >- + The identifier of the model to use. The model must be registered with + Llama Stack and available via the /models endpoint. + prompt: + oneOf: + - type: string + - type: array + items: + type: string + - type: array + items: + type: integer + - type: array + items: + type: array + items: + type: integer + description: The prompt to generate a completion for. + best_of: + type: integer + description: >- + (Optional) The number of completions to generate. + echo: + type: boolean + description: (Optional) Whether to echo the prompt. + frequency_penalty: + type: number + description: >- + (Optional) The penalty for repeated tokens. + logit_bias: + type: object + additionalProperties: + type: number + description: (Optional) The logit bias to use. + logprobs: + type: boolean + description: (Optional) The log probabilities to use. + max_tokens: + type: integer + description: >- + (Optional) The maximum number of tokens to generate. + n: + type: integer + description: >- + (Optional) The number of completions to generate. + presence_penalty: + type: number + description: >- + (Optional) The penalty for repeated tokens. + seed: + type: integer + description: (Optional) The seed to use. + stop: + oneOf: + - type: string + - type: array + items: + type: string + description: (Optional) The stop tokens to use. + stream: + type: boolean + description: >- + (Optional) Whether to stream the response. + stream_options: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: (Optional) The stream options to use. + temperature: + type: number + description: (Optional) The temperature to use. + top_p: + type: number + description: (Optional) The top p to use. + user: + type: string + description: (Optional) The user to use. + guided_choice: + type: array + items: + type: string + prompt_logprobs: + type: integer + suffix: + type: string + description: >- + (Optional) The suffix that should be appended to the completion. + additionalProperties: false + required: + - model + - prompt + title: OpenaiCompletionRequest + OpenAICompletion: + type: object + properties: + id: + type: string + choices: + type: array + items: + $ref: '#/components/schemas/OpenAICompletionChoice' + created: + type: integer + model: + type: string + object: + type: string + const: text_completion + default: text_completion + additionalProperties: false + required: + - id + - choices + - created + - model + - object + title: OpenAICompletion + description: >- + Response from an OpenAI-compatible completion request. + OpenAICompletionChoice: + type: object + properties: + finish_reason: + type: string + text: + type: string + index: + type: integer + logprobs: + $ref: '#/components/schemas/OpenAIChoiceLogprobs' + additionalProperties: false + required: + - finish_reason + - text + - index + title: OpenAICompletionChoice + description: >- + A choice from an OpenAI-compatible completion response. + OpenaiEmbeddingsRequest: + type: object + properties: + model: + type: string + description: >- + The identifier of the model to use. The model must be an embedding model + registered with Llama Stack and available via the /models endpoint. + input: + oneOf: + - type: string + - type: array + items: + type: string + description: >- + Input text to embed, encoded as a string or array of strings. To embed + multiple inputs in a single request, pass an array of strings. + encoding_format: + type: string + description: >- + (Optional) The format to return the embeddings in. Can be either "float" + or "base64". Defaults to "float". + dimensions: + type: integer + description: >- + (Optional) The number of dimensions the resulting output embeddings should + have. Only supported in text-embedding-3 and later models. + user: + type: string + description: >- + (Optional) A unique identifier representing your end-user, which can help + OpenAI to monitor and detect abuse. + additionalProperties: false + required: + - model + - input + title: OpenaiEmbeddingsRequest + OpenAIEmbeddingData: + type: object + properties: + object: + type: string + const: embedding + default: embedding + description: >- + The object type, which will be "embedding" + embedding: + oneOf: + - type: array + items: + type: number + - type: string + description: >- + The embedding vector as a list of floats (when encoding_format="float") + or as a base64-encoded string (when encoding_format="base64") + index: + type: integer + description: >- + The index of the embedding in the input list + additionalProperties: false + required: + - object + - embedding + - index + title: OpenAIEmbeddingData + description: >- + A single embedding data object from an OpenAI-compatible embeddings response. + OpenAIEmbeddingUsage: + type: object + properties: + prompt_tokens: + type: integer + description: The number of tokens in the input + total_tokens: + type: integer + description: The total number of tokens used + additionalProperties: false + required: + - prompt_tokens + - total_tokens + title: OpenAIEmbeddingUsage + description: >- + Usage information for an OpenAI-compatible embeddings response. + OpenAIEmbeddingsResponse: + type: object + properties: + object: + type: string + const: list + default: list + description: The object type, which will be "list" + data: + type: array + items: + $ref: '#/components/schemas/OpenAIEmbeddingData' + description: List of embedding data objects + model: + type: string + description: >- + The model that was used to generate the embeddings + usage: + $ref: '#/components/schemas/OpenAIEmbeddingUsage' + description: Usage information + additionalProperties: false + required: + - object + - data + - model + - usage + title: OpenAIEmbeddingsResponse + description: >- + Response from an OpenAI-compatible embeddings request. + OpenAIFilePurpose: + type: string + enum: + - assistants + - batch + title: OpenAIFilePurpose + description: >- + Valid purpose values for OpenAI Files API. + ListOpenAIFileResponse: + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/OpenAIFileObject' + description: List of file objects + has_more: + type: boolean + description: >- + Whether there are more files available beyond this page + first_id: + type: string + description: >- + ID of the first file in the list for pagination + last_id: + type: string + description: >- + ID of the last file in the list for pagination + object: + type: string + const: list + default: list + description: The object type, which is always "list" + additionalProperties: false + required: + - data + - has_more + - first_id + - last_id + - object + title: ListOpenAIFileResponse + description: >- + Response for listing files in OpenAI Files API. + OpenAIFileObject: + type: object + properties: + object: + type: string + const: file + default: file + description: The object type, which is always "file" + id: + type: string + description: >- + The file identifier, which can be referenced in the API endpoints + bytes: + type: integer + description: The size of the file, in bytes + created_at: + type: integer + description: >- + The Unix timestamp (in seconds) for when the file was created + expires_at: + type: integer + description: >- + The Unix timestamp (in seconds) for when the file expires + filename: + type: string + description: The name of the file + purpose: + type: string + enum: + - assistants + - batch + description: The intended purpose of the file + additionalProperties: false + required: + - object + - id + - bytes + - created_at + - expires_at + - filename + - purpose + title: OpenAIFileObject + description: >- + OpenAI File object as defined in the OpenAI Files API. + ExpiresAfter: + type: object + properties: + anchor: + type: string + const: created_at + seconds: + type: integer + additionalProperties: false + required: + - anchor + - seconds + title: ExpiresAfter + description: >- + Control expiration of uploaded files. + + Params: + - anchor, must be "created_at" + - seconds, must be int between 3600 and 2592000 (1 hour to 30 days) + OpenAIFileDeleteResponse: + type: object + properties: + id: + type: string + description: The file identifier that was deleted + object: + type: string + const: file + default: file + description: The object type, which is always "file" + deleted: + type: boolean + description: >- + Whether the file was successfully deleted + additionalProperties: false + required: + - id + - object + - deleted + title: OpenAIFileDeleteResponse + description: >- + Response for deleting a file in OpenAI Files API. + Response: + type: object + title: Response + OpenAIModel: + type: object + properties: + id: + type: string + object: + type: string + const: model + default: model + created: + type: integer + owned_by: + type: string + additionalProperties: false + required: + - id + - object + - created + - owned_by + title: OpenAIModel + description: A model from OpenAI. + OpenAIListModelsResponse: + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/OpenAIModel' + additionalProperties: false + required: + - data + title: OpenAIListModelsResponse + RunModerationRequest: + type: object + properties: + input: + oneOf: + - type: string + - type: array + items: + type: string + description: >- + Input (or inputs) to classify. Can be a single string, an array of strings, + or an array of multi-modal input objects similar to other models. + model: + type: string + description: >- + The content moderation model you would like to use. + additionalProperties: false + required: + - input + - model + title: RunModerationRequest + ModerationObject: + type: object + properties: + id: + type: string + description: >- + The unique identifier for the moderation request. + model: + type: string + description: >- + The model used to generate the moderation results. + results: + type: array + items: + $ref: '#/components/schemas/ModerationObjectResults' + description: A list of moderation objects + additionalProperties: false + required: + - id + - model + - results + title: ModerationObject + description: A moderation object. + ModerationObjectResults: + type: object + properties: + flagged: + type: boolean + description: >- + Whether any of the below categories are flagged. + categories: + type: object + additionalProperties: + type: boolean + description: >- + A list of the categories, and whether they are flagged or not. + category_applied_input_types: + type: object + additionalProperties: + type: array + items: + type: string + description: >- + A list of the categories along with the input type(s) that the score applies + to. + category_scores: + type: object + additionalProperties: + type: number + description: >- + A list of the categories along with their scores as predicted by model. + user_message: + type: string + metadata: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + additionalProperties: false + required: + - flagged + - metadata + title: ModerationObjectResults + description: A moderation object. + ListOpenAIResponseObject: + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/OpenAIResponseObjectWithInput' + description: >- + List of response objects with their input context + has_more: + type: boolean + description: >- + Whether there are more results available beyond this page + first_id: + type: string + description: >- + Identifier of the first item in this page + last_id: + type: string + description: Identifier of the last item in this page + object: + type: string + const: list + default: list + description: Object type identifier, always "list" + additionalProperties: false + required: + - data + - has_more + - first_id + - last_id + - object + title: ListOpenAIResponseObject + description: >- + Paginated list of OpenAI response objects with navigation metadata. + OpenAIResponseAnnotationCitation: + type: object + properties: + type: + type: string + const: url_citation + default: url_citation + description: >- + Annotation type identifier, always "url_citation" + end_index: + type: integer + description: >- + End position of the citation span in the content + start_index: + type: integer + description: >- + Start position of the citation span in the content + title: + type: string + description: Title of the referenced web resource + url: + type: string + description: URL of the referenced web resource + additionalProperties: false + required: + - type + - end_index + - start_index + - title + - url + title: OpenAIResponseAnnotationCitation + description: >- + URL citation annotation for referencing external web resources. + "OpenAIResponseAnnotationContainerFileCitation": + type: object + properties: + type: + type: string + const: container_file_citation + default: container_file_citation + container_id: + type: string + end_index: + type: integer + file_id: + type: string + filename: + type: string + start_index: + type: integer + additionalProperties: false + required: + - type + - container_id + - end_index + - file_id + - filename + - start_index + title: >- + OpenAIResponseAnnotationContainerFileCitation + OpenAIResponseAnnotationFileCitation: + type: object + properties: + type: + type: string + const: file_citation + default: file_citation + description: >- + Annotation type identifier, always "file_citation" + file_id: + type: string + description: Unique identifier of the referenced file + filename: + type: string + description: Name of the referenced file + index: + type: integer + description: >- + Position index of the citation within the content + additionalProperties: false + required: + - type + - file_id + - filename + - index + title: OpenAIResponseAnnotationFileCitation + description: >- + File citation annotation for referencing specific files in response content. + OpenAIResponseAnnotationFilePath: + type: object + properties: + type: + type: string + const: file_path + default: file_path + file_id: + type: string + index: + type: integer + additionalProperties: false + required: + - type + - file_id + - index + title: OpenAIResponseAnnotationFilePath + OpenAIResponseAnnotations: + oneOf: + - $ref: '#/components/schemas/OpenAIResponseAnnotationFileCitation' + - $ref: '#/components/schemas/OpenAIResponseAnnotationCitation' + - $ref: '#/components/schemas/OpenAIResponseAnnotationContainerFileCitation' + - $ref: '#/components/schemas/OpenAIResponseAnnotationFilePath' + discriminator: + propertyName: type + mapping: + file_citation: '#/components/schemas/OpenAIResponseAnnotationFileCitation' + url_citation: '#/components/schemas/OpenAIResponseAnnotationCitation' + container_file_citation: '#/components/schemas/OpenAIResponseAnnotationContainerFileCitation' + file_path: '#/components/schemas/OpenAIResponseAnnotationFilePath' + OpenAIResponseError: + type: object + properties: + code: + type: string + description: >- + Error code identifying the type of failure + message: + type: string + description: >- + Human-readable error message describing the failure + additionalProperties: false + required: + - code + - message + title: OpenAIResponseError + description: >- + Error details for failed OpenAI response requests. + OpenAIResponseInput: + oneOf: + - $ref: '#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall' + - $ref: '#/components/schemas/OpenAIResponseInputFunctionToolCallOutput' + - $ref: '#/components/schemas/OpenAIResponseMCPApprovalRequest' + - $ref: '#/components/schemas/OpenAIResponseMCPApprovalResponse' + - $ref: '#/components/schemas/OpenAIResponseMessage' + "OpenAIResponseInputFunctionToolCallOutput": + type: object + properties: + call_id: + type: string + output: + type: string + type: + type: string + const: function_call_output + default: function_call_output + id: + type: string + status: + type: string + additionalProperties: false + required: + - call_id + - output + - type + title: >- + OpenAIResponseInputFunctionToolCallOutput + description: >- + This represents the output of a function call that gets passed back to the + model. + OpenAIResponseInputMessageContent: + oneOf: + - $ref: '#/components/schemas/OpenAIResponseInputMessageContentText' + - $ref: '#/components/schemas/OpenAIResponseInputMessageContentImage' + discriminator: + propertyName: type + mapping: + input_text: '#/components/schemas/OpenAIResponseInputMessageContentText' + input_image: '#/components/schemas/OpenAIResponseInputMessageContentImage' + OpenAIResponseInputMessageContentImage: + type: object + properties: + detail: + oneOf: + - type: string + const: low + - type: string + const: high + - type: string + const: auto + default: auto + description: >- + Level of detail for image processing, can be "low", "high", or "auto" + type: + type: string + const: input_image + default: input_image + description: >- + Content type identifier, always "input_image" + image_url: + type: string + description: (Optional) URL of the image content + additionalProperties: false + required: + - detail + - type + title: OpenAIResponseInputMessageContentImage + description: >- + Image content for input messages in OpenAI response format. + OpenAIResponseInputMessageContentText: + type: object + properties: + text: + type: string + description: The text content of the input message + type: + type: string + const: input_text + default: input_text + description: >- + Content type identifier, always "input_text" + additionalProperties: false + required: + - text + - type + title: OpenAIResponseInputMessageContentText + description: >- + Text content for input messages in OpenAI response format. + OpenAIResponseMCPApprovalRequest: + type: object + properties: + arguments: + type: string + id: + type: string + name: + type: string + server_label: + type: string + type: + type: string + const: mcp_approval_request + default: mcp_approval_request + additionalProperties: false + required: + - arguments + - id + - name + - server_label + - type + title: OpenAIResponseMCPApprovalRequest + description: >- + A request for human approval of a tool invocation. + OpenAIResponseMCPApprovalResponse: + type: object + properties: + approval_request_id: + type: string + approve: + type: boolean + type: + type: string + const: mcp_approval_response + default: mcp_approval_response + id: + type: string + reason: + type: string + additionalProperties: false + required: + - approval_request_id + - approve + - type + title: OpenAIResponseMCPApprovalResponse + description: A response to an MCP approval request. + OpenAIResponseMessage: + type: object + properties: + content: + oneOf: + - type: string + - type: array + items: + $ref: '#/components/schemas/OpenAIResponseInputMessageContent' + - type: array + items: + $ref: '#/components/schemas/OpenAIResponseOutputMessageContent' + role: + oneOf: + - type: string + const: system + - type: string + const: developer + - type: string + const: user + - type: string + const: assistant + type: + type: string + const: message + default: message + id: + type: string + status: + type: string + additionalProperties: false + required: + - content + - role + - type + title: OpenAIResponseMessage + description: >- + Corresponds to the various Message types in the Responses API. They are all + under one type because the Responses API gives them all the same "type" value, + and there is no way to tell them apart in certain scenarios. + OpenAIResponseObjectWithInput: + type: object + properties: + created_at: + type: integer + description: >- + Unix timestamp when the response was created + error: + $ref: '#/components/schemas/OpenAIResponseError' + description: >- + (Optional) Error details if the response generation failed + id: + type: string + description: Unique identifier for this response + model: + type: string + description: Model identifier used for generation + object: + type: string + const: response + default: response + description: >- + Object type identifier, always "response" + output: + type: array + items: + $ref: '#/components/schemas/OpenAIResponseOutput' + description: >- + List of generated output items (messages, tool calls, etc.) + parallel_tool_calls: + type: boolean + default: false + description: >- + Whether tool calls can be executed in parallel + previous_response_id: + type: string + description: >- + (Optional) ID of the previous response in a conversation + status: + type: string + description: >- + Current status of the response generation + temperature: + type: number + description: >- + (Optional) Sampling temperature used for generation + text: + $ref: '#/components/schemas/OpenAIResponseText' + description: >- + Text formatting configuration for the response + top_p: + type: number + description: >- + (Optional) Nucleus sampling parameter used for generation + truncation: + type: string + description: >- + (Optional) Truncation strategy applied to the response + input: + type: array + items: + $ref: '#/components/schemas/OpenAIResponseInput' + description: >- + List of input items that led to this response + additionalProperties: false + required: + - created_at + - id + - model + - object + - output + - parallel_tool_calls + - status + - text + - input + title: OpenAIResponseObjectWithInput + description: >- + OpenAI response object extended with input context information. + OpenAIResponseOutput: + oneOf: + - $ref: '#/components/schemas/OpenAIResponseMessage' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageMCPCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageMCPListTools' + - $ref: '#/components/schemas/OpenAIResponseMCPApprovalRequest' + discriminator: + propertyName: type + mapping: + message: '#/components/schemas/OpenAIResponseMessage' + web_search_call: '#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall' + file_search_call: '#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall' + function_call: '#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall' + mcp_call: '#/components/schemas/OpenAIResponseOutputMessageMCPCall' + mcp_list_tools: '#/components/schemas/OpenAIResponseOutputMessageMCPListTools' + mcp_approval_request: '#/components/schemas/OpenAIResponseMCPApprovalRequest' + OpenAIResponseOutputMessageContent: + type: object + properties: + text: + type: string + type: + type: string + const: output_text + default: output_text + annotations: + type: array + items: + $ref: '#/components/schemas/OpenAIResponseAnnotations' + additionalProperties: false + required: + - text + - type + - annotations + title: >- + OpenAIResponseOutputMessageContentOutputText + "OpenAIResponseOutputMessageFileSearchToolCall": + type: object + properties: + id: + type: string + description: Unique identifier for this tool call + queries: + type: array + items: + type: string + description: List of search queries executed + status: + type: string + description: >- + Current status of the file search operation + type: + type: string + const: file_search_call + default: file_search_call + description: >- + Tool call type identifier, always "file_search_call" + results: + type: array + items: + type: object + properties: + attributes: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) Key-value attributes associated with the file + file_id: + type: string + description: >- + Unique identifier of the file containing the result + filename: + type: string + description: Name of the file containing the result + score: + type: number + description: >- + Relevance score for this search result (between 0 and 1) + text: + type: string + description: Text content of the search result + additionalProperties: false + required: + - attributes + - file_id + - filename + - score + - text + title: >- + OpenAIResponseOutputMessageFileSearchToolCallResults + description: >- + Search results returned by the file search operation. + description: >- + (Optional) Search results returned by the file search operation + additionalProperties: false + required: + - id + - queries + - status + - type + title: >- + OpenAIResponseOutputMessageFileSearchToolCall + description: >- + File search tool call output message for OpenAI responses. + "OpenAIResponseOutputMessageFunctionToolCall": + type: object + properties: + call_id: + type: string + description: Unique identifier for the function call + name: + type: string + description: Name of the function being called + arguments: + type: string + description: >- + JSON string containing the function arguments + type: + type: string + const: function_call + default: function_call + description: >- + Tool call type identifier, always "function_call" + id: + type: string + description: >- + (Optional) Additional identifier for the tool call + status: + type: string + description: >- + (Optional) Current status of the function call execution + additionalProperties: false + required: + - call_id + - name + - arguments + - type + title: >- + OpenAIResponseOutputMessageFunctionToolCall + description: >- + Function tool call output message for OpenAI responses. + OpenAIResponseOutputMessageMCPCall: + type: object + properties: + id: + type: string + description: Unique identifier for this MCP call + type: + type: string + const: mcp_call + default: mcp_call + description: >- + Tool call type identifier, always "mcp_call" + arguments: + type: string + description: >- + JSON string containing the MCP call arguments + name: + type: string + description: Name of the MCP method being called + server_label: + type: string + description: >- + Label identifying the MCP server handling the call + error: + type: string + description: >- + (Optional) Error message if the MCP call failed + output: + type: string + description: >- + (Optional) Output result from the successful MCP call + additionalProperties: false + required: + - id + - type + - arguments + - name + - server_label + title: OpenAIResponseOutputMessageMCPCall + description: >- + Model Context Protocol (MCP) call output message for OpenAI responses. + OpenAIResponseOutputMessageMCPListTools: + type: object + properties: + id: + type: string + description: >- + Unique identifier for this MCP list tools operation + type: + type: string + const: mcp_list_tools + default: mcp_list_tools + description: >- + Tool call type identifier, always "mcp_list_tools" + server_label: + type: string + description: >- + Label identifying the MCP server providing the tools + tools: + type: array + items: + type: object + properties: + input_schema: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + JSON schema defining the tool's input parameters + name: + type: string + description: Name of the tool + description: + type: string + description: >- + (Optional) Description of what the tool does + additionalProperties: false + required: + - input_schema + - name + title: MCPListToolsTool + description: >- + Tool definition returned by MCP list tools operation. + description: >- + List of available tools provided by the MCP server + additionalProperties: false + required: + - id + - type + - server_label + - tools + title: OpenAIResponseOutputMessageMCPListTools + description: >- + MCP list tools output message containing available tools from an MCP server. + "OpenAIResponseOutputMessageWebSearchToolCall": + type: object + properties: + id: + type: string + description: Unique identifier for this tool call + status: + type: string + description: >- + Current status of the web search operation + type: + type: string + const: web_search_call + default: web_search_call + description: >- + Tool call type identifier, always "web_search_call" + additionalProperties: false + required: + - id + - status + - type + title: >- + OpenAIResponseOutputMessageWebSearchToolCall + description: >- + Web search tool call output message for OpenAI responses. + OpenAIResponseText: + type: object + properties: + format: + type: object + properties: + type: + oneOf: + - type: string + const: text + - type: string + const: json_schema + - type: string + const: json_object + description: >- + Must be "text", "json_schema", or "json_object" to identify the format + type + name: + type: string + description: >- + The name of the response format. Only used for json_schema. + schema: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + The JSON schema the response should conform to. In a Python SDK, this + is often a `pydantic` model. Only used for json_schema. + description: + type: string + description: >- + (Optional) A description of the response format. Only used for json_schema. + strict: + type: boolean + description: >- + (Optional) Whether to strictly enforce the JSON schema. If true, the + response must match the schema exactly. Only used for json_schema. + additionalProperties: false + required: + - type + description: >- + (Optional) Text format configuration specifying output format requirements + additionalProperties: false + title: OpenAIResponseText + description: >- + Text response configuration for OpenAI responses. + OpenAIResponseInputTool: + oneOf: + - $ref: '#/components/schemas/OpenAIResponseInputToolWebSearch' + - $ref: '#/components/schemas/OpenAIResponseInputToolFileSearch' + - $ref: '#/components/schemas/OpenAIResponseInputToolFunction' + - $ref: '#/components/schemas/OpenAIResponseInputToolMCP' + discriminator: + propertyName: type + mapping: + web_search: '#/components/schemas/OpenAIResponseInputToolWebSearch' + file_search: '#/components/schemas/OpenAIResponseInputToolFileSearch' + function: '#/components/schemas/OpenAIResponseInputToolFunction' + mcp: '#/components/schemas/OpenAIResponseInputToolMCP' + OpenAIResponseInputToolFileSearch: + type: object + properties: + type: + type: string + const: file_search + default: file_search + description: >- + Tool type identifier, always "file_search" + vector_store_ids: + type: array + items: + type: string + description: >- + List of vector store identifiers to search within + filters: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) Additional filters to apply to the search + max_num_results: + type: integer + default: 10 + description: >- + (Optional) Maximum number of search results to return (1-50) + ranking_options: + type: object + properties: + ranker: + type: string + description: >- + (Optional) Name of the ranking algorithm to use + score_threshold: + type: number + default: 0.0 + description: >- + (Optional) Minimum relevance score threshold for results + additionalProperties: false + description: >- + (Optional) Options for ranking and scoring search results + additionalProperties: false + required: + - type + - vector_store_ids + title: OpenAIResponseInputToolFileSearch + description: >- + File search tool configuration for OpenAI response inputs. + OpenAIResponseInputToolFunction: + type: object + properties: + type: + type: string + const: function + default: function + description: Tool type identifier, always "function" + name: + type: string + description: Name of the function that can be called + description: + type: string + description: >- + (Optional) Description of what the function does + parameters: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) JSON schema defining the function's parameters + strict: + type: boolean + description: >- + (Optional) Whether to enforce strict parameter validation + additionalProperties: false + required: + - type + - name + title: OpenAIResponseInputToolFunction + description: >- + Function tool configuration for OpenAI response inputs. + OpenAIResponseInputToolMCP: + type: object + properties: + type: + type: string + const: mcp + default: mcp + description: Tool type identifier, always "mcp" + server_label: + type: string + description: Label to identify this MCP server + server_url: + type: string + description: URL endpoint of the MCP server + headers: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) HTTP headers to include when connecting to the server + require_approval: + oneOf: + - type: string + const: always + - type: string + const: never + - type: object + properties: + always: + type: array + items: + type: string + description: >- + (Optional) List of tool names that always require approval + never: + type: array + items: + type: string + description: >- + (Optional) List of tool names that never require approval + additionalProperties: false + title: ApprovalFilter + description: >- + Filter configuration for MCP tool approval requirements. + default: never + description: >- + Approval requirement for tool calls ("always", "never", or filter) + allowed_tools: + oneOf: + - type: array + items: + type: string + - type: object + properties: + tool_names: + type: array + items: + type: string + description: >- + (Optional) List of specific tool names that are allowed + additionalProperties: false + title: AllowedToolsFilter + description: >- + Filter configuration for restricting which MCP tools can be used. + description: >- + (Optional) Restriction on which tools can be used from this server + additionalProperties: false + required: + - type + - server_label + - server_url + - require_approval + title: OpenAIResponseInputToolMCP + description: >- + Model Context Protocol (MCP) tool configuration for OpenAI response inputs. + OpenAIResponseInputToolWebSearch: + type: object + properties: + type: + oneOf: + - type: string + const: web_search + - type: string + const: web_search_preview + - type: string + const: web_search_preview_2025_03_11 + default: web_search + description: Web search tool type variant to use + search_context_size: + type: string + default: medium + description: >- + (Optional) Size of search context, must be "low", "medium", or "high" + additionalProperties: false + required: + - type + title: OpenAIResponseInputToolWebSearch + description: >- + Web search tool configuration for OpenAI response inputs. + CreateOpenaiResponseRequest: + type: object + properties: + input: + oneOf: + - type: string + - type: array + items: + $ref: '#/components/schemas/OpenAIResponseInput' + description: Input message(s) to create the response. + model: + type: string + description: The underlying LLM used for completions. + instructions: + type: string + previous_response_id: + type: string + description: >- + (Optional) if specified, the new response will be a continuation of the + previous response. This can be used to easily fork-off new responses from + existing responses. + store: + type: boolean + stream: + type: boolean + temperature: + type: number + text: + $ref: '#/components/schemas/OpenAIResponseText' + tools: + type: array + items: + $ref: '#/components/schemas/OpenAIResponseInputTool' + include: + type: array + items: + type: string + description: >- + (Optional) Additional fields to include in the response. + max_infer_iters: + type: integer + additionalProperties: false + required: + - input + - model + title: CreateOpenaiResponseRequest + OpenAIResponseObject: + type: object + properties: + created_at: + type: integer + description: >- + Unix timestamp when the response was created + error: + $ref: '#/components/schemas/OpenAIResponseError' + description: >- + (Optional) Error details if the response generation failed + id: + type: string + description: Unique identifier for this response + model: + type: string + description: Model identifier used for generation + object: + type: string + const: response + default: response + description: >- + Object type identifier, always "response" + output: + type: array + items: + $ref: '#/components/schemas/OpenAIResponseOutput' + description: >- + List of generated output items (messages, tool calls, etc.) + parallel_tool_calls: + type: boolean + default: false + description: >- + Whether tool calls can be executed in parallel + previous_response_id: + type: string + description: >- + (Optional) ID of the previous response in a conversation + status: + type: string + description: >- + Current status of the response generation + temperature: + type: number + description: >- + (Optional) Sampling temperature used for generation + text: + $ref: '#/components/schemas/OpenAIResponseText' + description: >- + Text formatting configuration for the response + top_p: + type: number + description: >- + (Optional) Nucleus sampling parameter used for generation + truncation: + type: string + description: >- + (Optional) Truncation strategy applied to the response + additionalProperties: false + required: + - created_at + - id + - model + - object + - output + - parallel_tool_calls + - status + - text + title: OpenAIResponseObject + description: >- + Complete OpenAI response object containing generation results and metadata. + OpenAIResponseContentPartOutputText: + type: object + properties: + type: + type: string + const: output_text + default: output_text + text: + type: string + additionalProperties: false + required: + - type + - text + title: OpenAIResponseContentPartOutputText + OpenAIResponseContentPartRefusal: + type: object + properties: + type: + type: string + const: refusal + default: refusal + refusal: + type: string + additionalProperties: false + required: + - type + - refusal + title: OpenAIResponseContentPartRefusal + OpenAIResponseObjectStream: + oneOf: + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseCreated' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseOutputItemAdded' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseOutputItemDone' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseOutputTextDelta' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseOutputTextDone' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseFunctionCallArgumentsDelta' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseFunctionCallArgumentsDone' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseWebSearchCallInProgress' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseWebSearchCallSearching' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseWebSearchCallCompleted' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpListToolsInProgress' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpListToolsFailed' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpListToolsCompleted' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallArgumentsDelta' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallArgumentsDone' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallInProgress' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallFailed' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallCompleted' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseContentPartAdded' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseContentPartDone' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseCompleted' + discriminator: + propertyName: type + mapping: + response.created: '#/components/schemas/OpenAIResponseObjectStreamResponseCreated' + response.output_item.added: '#/components/schemas/OpenAIResponseObjectStreamResponseOutputItemAdded' + response.output_item.done: '#/components/schemas/OpenAIResponseObjectStreamResponseOutputItemDone' + response.output_text.delta: '#/components/schemas/OpenAIResponseObjectStreamResponseOutputTextDelta' + response.output_text.done: '#/components/schemas/OpenAIResponseObjectStreamResponseOutputTextDone' + response.function_call_arguments.delta: '#/components/schemas/OpenAIResponseObjectStreamResponseFunctionCallArgumentsDelta' + response.function_call_arguments.done: '#/components/schemas/OpenAIResponseObjectStreamResponseFunctionCallArgumentsDone' + response.web_search_call.in_progress: '#/components/schemas/OpenAIResponseObjectStreamResponseWebSearchCallInProgress' + response.web_search_call.searching: '#/components/schemas/OpenAIResponseObjectStreamResponseWebSearchCallSearching' + response.web_search_call.completed: '#/components/schemas/OpenAIResponseObjectStreamResponseWebSearchCallCompleted' + response.mcp_list_tools.in_progress: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpListToolsInProgress' + response.mcp_list_tools.failed: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpListToolsFailed' + response.mcp_list_tools.completed: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpListToolsCompleted' + response.mcp_call.arguments.delta: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallArgumentsDelta' + response.mcp_call.arguments.done: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallArgumentsDone' + response.mcp_call.in_progress: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallInProgress' + response.mcp_call.failed: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallFailed' + response.mcp_call.completed: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallCompleted' + response.content_part.added: '#/components/schemas/OpenAIResponseObjectStreamResponseContentPartAdded' + response.content_part.done: '#/components/schemas/OpenAIResponseObjectStreamResponseContentPartDone' + response.completed: '#/components/schemas/OpenAIResponseObjectStreamResponseCompleted' + "OpenAIResponseObjectStreamResponseCompleted": + type: object + properties: + response: + $ref: '#/components/schemas/OpenAIResponseObject' + description: The completed response object + type: + type: string + const: response.completed + default: response.completed + description: >- + Event type identifier, always "response.completed" + additionalProperties: false + required: + - response + - type + title: >- + OpenAIResponseObjectStreamResponseCompleted + description: >- + Streaming event indicating a response has been completed. + "OpenAIResponseObjectStreamResponseContentPartAdded": + type: object + properties: + response_id: + type: string + description: >- + Unique identifier of the response containing this content + item_id: + type: string + description: >- + Unique identifier of the output item containing this content part + part: + oneOf: + - $ref: '#/components/schemas/OpenAIResponseContentPartOutputText' + - $ref: '#/components/schemas/OpenAIResponseContentPartRefusal' + discriminator: + propertyName: type + mapping: + output_text: '#/components/schemas/OpenAIResponseContentPartOutputText' + refusal: '#/components/schemas/OpenAIResponseContentPartRefusal' + description: The content part that was added + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.content_part.added + default: response.content_part.added + description: >- + Event type identifier, always "response.content_part.added" + additionalProperties: false + required: + - response_id + - item_id + - part + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseContentPartAdded + description: >- + Streaming event for when a new content part is added to a response item. + "OpenAIResponseObjectStreamResponseContentPartDone": + type: object + properties: + response_id: + type: string + description: >- + Unique identifier of the response containing this content + item_id: + type: string + description: >- + Unique identifier of the output item containing this content part + part: + oneOf: + - $ref: '#/components/schemas/OpenAIResponseContentPartOutputText' + - $ref: '#/components/schemas/OpenAIResponseContentPartRefusal' + discriminator: + propertyName: type + mapping: + output_text: '#/components/schemas/OpenAIResponseContentPartOutputText' + refusal: '#/components/schemas/OpenAIResponseContentPartRefusal' + description: The completed content part + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.content_part.done + default: response.content_part.done + description: >- + Event type identifier, always "response.content_part.done" + additionalProperties: false + required: + - response_id + - item_id + - part + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseContentPartDone + description: >- + Streaming event for when a content part is completed. + "OpenAIResponseObjectStreamResponseCreated": + type: object + properties: + response: + $ref: '#/components/schemas/OpenAIResponseObject' + description: The newly created response object + type: + type: string + const: response.created + default: response.created + description: >- + Event type identifier, always "response.created" + additionalProperties: false + required: + - response + - type + title: >- + OpenAIResponseObjectStreamResponseCreated + description: >- + Streaming event indicating a new response has been created. + "OpenAIResponseObjectStreamResponseFunctionCallArgumentsDelta": + type: object + properties: + delta: + type: string + description: >- + Incremental function call arguments being added + item_id: + type: string + description: >- + Unique identifier of the function call being updated + output_index: + type: integer + description: >- + Index position of the item in the output list + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.function_call_arguments.delta + default: response.function_call_arguments.delta + description: >- + Event type identifier, always "response.function_call_arguments.delta" + additionalProperties: false + required: + - delta + - item_id + - output_index + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseFunctionCallArgumentsDelta + description: >- + Streaming event for incremental function call argument updates. + "OpenAIResponseObjectStreamResponseFunctionCallArgumentsDone": + type: object + properties: + arguments: + type: string + description: >- + Final complete arguments JSON string for the function call + item_id: + type: string + description: >- + Unique identifier of the completed function call + output_index: + type: integer + description: >- + Index position of the item in the output list + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.function_call_arguments.done + default: response.function_call_arguments.done + description: >- + Event type identifier, always "response.function_call_arguments.done" + additionalProperties: false + required: + - arguments + - item_id + - output_index + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseFunctionCallArgumentsDone + description: >- + Streaming event for when function call arguments are completed. + "OpenAIResponseObjectStreamResponseMcpCallArgumentsDelta": + type: object + properties: + delta: + type: string + item_id: + type: string + output_index: + type: integer + sequence_number: + type: integer + type: + type: string + const: response.mcp_call.arguments.delta + default: response.mcp_call.arguments.delta + additionalProperties: false + required: + - delta + - item_id + - output_index + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseMcpCallArgumentsDelta + "OpenAIResponseObjectStreamResponseMcpCallArgumentsDone": + type: object + properties: + arguments: + type: string + item_id: + type: string + output_index: + type: integer + sequence_number: + type: integer + type: + type: string + const: response.mcp_call.arguments.done + default: response.mcp_call.arguments.done + additionalProperties: false + required: + - arguments + - item_id + - output_index + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseMcpCallArgumentsDone + "OpenAIResponseObjectStreamResponseMcpCallCompleted": + type: object + properties: + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.mcp_call.completed + default: response.mcp_call.completed + description: >- + Event type identifier, always "response.mcp_call.completed" + additionalProperties: false + required: + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseMcpCallCompleted + description: Streaming event for completed MCP calls. + "OpenAIResponseObjectStreamResponseMcpCallFailed": + type: object + properties: + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.mcp_call.failed + default: response.mcp_call.failed + description: >- + Event type identifier, always "response.mcp_call.failed" + additionalProperties: false + required: + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseMcpCallFailed + description: Streaming event for failed MCP calls. + "OpenAIResponseObjectStreamResponseMcpCallInProgress": + type: object + properties: + item_id: + type: string + description: Unique identifier of the MCP call + output_index: + type: integer + description: >- + Index position of the item in the output list + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.mcp_call.in_progress + default: response.mcp_call.in_progress + description: >- + Event type identifier, always "response.mcp_call.in_progress" + additionalProperties: false + required: + - item_id + - output_index + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseMcpCallInProgress + description: >- + Streaming event for MCP calls in progress. + "OpenAIResponseObjectStreamResponseMcpListToolsCompleted": + type: object + properties: + sequence_number: + type: integer + type: + type: string + const: response.mcp_list_tools.completed + default: response.mcp_list_tools.completed + additionalProperties: false + required: + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseMcpListToolsCompleted + "OpenAIResponseObjectStreamResponseMcpListToolsFailed": + type: object + properties: + sequence_number: + type: integer + type: + type: string + const: response.mcp_list_tools.failed + default: response.mcp_list_tools.failed + additionalProperties: false + required: + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseMcpListToolsFailed + "OpenAIResponseObjectStreamResponseMcpListToolsInProgress": + type: object + properties: + sequence_number: + type: integer + type: + type: string + const: response.mcp_list_tools.in_progress + default: response.mcp_list_tools.in_progress + additionalProperties: false + required: + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseMcpListToolsInProgress + "OpenAIResponseObjectStreamResponseOutputItemAdded": + type: object + properties: + response_id: + type: string + description: >- + Unique identifier of the response containing this output + item: + oneOf: + - $ref: '#/components/schemas/OpenAIResponseMessage' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageMCPCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageMCPListTools' + - $ref: '#/components/schemas/OpenAIResponseMCPApprovalRequest' + discriminator: + propertyName: type + mapping: + message: '#/components/schemas/OpenAIResponseMessage' + web_search_call: '#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall' + file_search_call: '#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall' + function_call: '#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall' + mcp_call: '#/components/schemas/OpenAIResponseOutputMessageMCPCall' + mcp_list_tools: '#/components/schemas/OpenAIResponseOutputMessageMCPListTools' + mcp_approval_request: '#/components/schemas/OpenAIResponseMCPApprovalRequest' + description: >- + The output item that was added (message, tool call, etc.) + output_index: + type: integer + description: >- + Index position of this item in the output list + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.output_item.added + default: response.output_item.added + description: >- + Event type identifier, always "response.output_item.added" + additionalProperties: false + required: + - response_id + - item + - output_index + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseOutputItemAdded + description: >- + Streaming event for when a new output item is added to the response. + "OpenAIResponseObjectStreamResponseOutputItemDone": + type: object + properties: + response_id: + type: string + description: >- + Unique identifier of the response containing this output + item: + oneOf: + - $ref: '#/components/schemas/OpenAIResponseMessage' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageMCPCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageMCPListTools' + - $ref: '#/components/schemas/OpenAIResponseMCPApprovalRequest' + discriminator: + propertyName: type + mapping: + message: '#/components/schemas/OpenAIResponseMessage' + web_search_call: '#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall' + file_search_call: '#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall' + function_call: '#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall' + mcp_call: '#/components/schemas/OpenAIResponseOutputMessageMCPCall' + mcp_list_tools: '#/components/schemas/OpenAIResponseOutputMessageMCPListTools' + mcp_approval_request: '#/components/schemas/OpenAIResponseMCPApprovalRequest' + description: >- + The completed output item (message, tool call, etc.) + output_index: + type: integer + description: >- + Index position of this item in the output list + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.output_item.done + default: response.output_item.done + description: >- + Event type identifier, always "response.output_item.done" + additionalProperties: false + required: + - response_id + - item + - output_index + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseOutputItemDone + description: >- + Streaming event for when an output item is completed. + "OpenAIResponseObjectStreamResponseOutputTextDelta": + type: object + properties: + content_index: + type: integer + description: Index position within the text content + delta: + type: string + description: Incremental text content being added + item_id: + type: string + description: >- + Unique identifier of the output item being updated + output_index: + type: integer + description: >- + Index position of the item in the output list + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.output_text.delta + default: response.output_text.delta + description: >- + Event type identifier, always "response.output_text.delta" + additionalProperties: false + required: + - content_index + - delta + - item_id + - output_index + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseOutputTextDelta + description: >- + Streaming event for incremental text content updates. + "OpenAIResponseObjectStreamResponseOutputTextDone": + type: object + properties: + content_index: + type: integer + description: Index position within the text content + text: + type: string + description: >- + Final complete text content of the output item + item_id: + type: string + description: >- + Unique identifier of the completed output item + output_index: + type: integer + description: >- + Index position of the item in the output list + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.output_text.done + default: response.output_text.done + description: >- + Event type identifier, always "response.output_text.done" + additionalProperties: false + required: + - content_index + - text + - item_id + - output_index + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseOutputTextDone + description: >- + Streaming event for when text output is completed. + "OpenAIResponseObjectStreamResponseWebSearchCallCompleted": + type: object + properties: + item_id: + type: string + description: >- + Unique identifier of the completed web search call + output_index: + type: integer + description: >- + Index position of the item in the output list + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.web_search_call.completed + default: response.web_search_call.completed + description: >- + Event type identifier, always "response.web_search_call.completed" + additionalProperties: false + required: + - item_id + - output_index + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseWebSearchCallCompleted + description: >- + Streaming event for completed web search calls. + "OpenAIResponseObjectStreamResponseWebSearchCallInProgress": + type: object + properties: + item_id: + type: string + description: Unique identifier of the web search call + output_index: + type: integer + description: >- + Index position of the item in the output list + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.web_search_call.in_progress + default: response.web_search_call.in_progress + description: >- + Event type identifier, always "response.web_search_call.in_progress" + additionalProperties: false + required: + - item_id + - output_index + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseWebSearchCallInProgress + description: >- + Streaming event for web search calls in progress. + "OpenAIResponseObjectStreamResponseWebSearchCallSearching": + type: object + properties: + item_id: + type: string + output_index: + type: integer + sequence_number: + type: integer + type: + type: string + const: response.web_search_call.searching + default: response.web_search_call.searching + additionalProperties: false + required: + - item_id + - output_index + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseWebSearchCallSearching + ListOpenaiResponsesRequest: + type: object + properties: + after: + type: string + description: The ID of the last response to return. + limit: + type: integer + description: The number of responses to return. + model: + type: string + description: The model to filter responses by. + order: + type: string + enum: + - asc + - desc + description: >- + The order to sort responses by when sorted by created_at ('asc' or 'desc'). + additionalProperties: false + title: ListOpenaiResponsesRequest + OpenAIDeleteResponseObject: + type: object + properties: + id: + type: string + description: >- + Unique identifier of the deleted response + object: + type: string + const: response + default: response + description: >- + Object type identifier, always "response" + deleted: + type: boolean + default: true + description: Deletion confirmation flag, always True + additionalProperties: false + required: + - id + - object + - deleted + title: OpenAIDeleteResponseObject + description: >- + Response object confirming deletion of an OpenAI response. + ListOpenAIResponseInputItem: + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/OpenAIResponseInput' + description: List of input items + object: + type: string + const: list + default: list + description: Object type identifier, always "list" + additionalProperties: false + required: + - data + - object + title: ListOpenAIResponseInputItem + description: >- + List container for OpenAI response input items. + VectorStoreFileCounts: + type: object + properties: + completed: + type: integer + description: >- + Number of files that have been successfully processed + cancelled: + type: integer + description: >- + Number of files that had their processing cancelled + failed: + type: integer + description: Number of files that failed to process + in_progress: + type: integer + description: >- + Number of files currently being processed + total: + type: integer + description: >- + Total number of files in the vector store + additionalProperties: false + required: + - completed + - cancelled + - failed + - in_progress + - total + title: VectorStoreFileCounts + description: >- + File processing status counts for a vector store. + VectorStoreListResponse: + type: object + properties: + object: + type: string + default: list + description: Object type identifier, always "list" + data: + type: array + items: + $ref: '#/components/schemas/VectorStoreObject' + description: List of vector store objects + first_id: + type: string + description: >- + (Optional) ID of the first vector store in the list for pagination + last_id: + type: string + description: >- + (Optional) ID of the last vector store in the list for pagination + has_more: + type: boolean + default: false + description: >- + Whether there are more vector stores available beyond this page + additionalProperties: false + required: + - object + - data + - has_more + title: VectorStoreListResponse + description: Response from listing vector stores. + VectorStoreObject: + type: object + properties: + id: + type: string + description: Unique identifier for the vector store + object: + type: string + default: vector_store + description: >- + Object type identifier, always "vector_store" + created_at: + type: integer + description: >- + Timestamp when the vector store was created + name: + type: string + description: (Optional) Name of the vector store + usage_bytes: + type: integer + default: 0 + description: >- + Storage space used by the vector store in bytes + file_counts: + $ref: '#/components/schemas/VectorStoreFileCounts' + description: >- + File processing status counts for the vector store + status: + type: string + default: completed + description: Current status of the vector store + expires_after: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) Expiration policy for the vector store + expires_at: + type: integer + description: >- + (Optional) Timestamp when the vector store will expire + last_active_at: + type: integer + description: >- + (Optional) Timestamp of last activity on the vector store + metadata: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + Set of key-value pairs that can be attached to the vector store + additionalProperties: false + required: + - id + - object + - created_at + - usage_bytes + - file_counts + - status + - metadata + title: VectorStoreObject + description: OpenAI Vector Store object. + OpenaiCreateVectorStoreRequest: + type: object + properties: + name: + type: string + description: A name for the vector store. + file_ids: + type: array + items: + type: string + description: >- + A list of File IDs that the vector store should use. Useful for tools + like `file_search` that can access files. + expires_after: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + The expiration policy for a vector store. + chunking_strategy: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + The chunking strategy used to chunk the file(s). If not set, will use + the `auto` strategy. + metadata: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + Set of 16 key-value pairs that can be attached to an object. + embedding_model: + type: string + description: >- + The embedding model to use for this vector store. + embedding_dimension: + type: integer + description: >- + The dimension of the embedding vectors (default: 384). + provider_id: + type: string + description: >- + The ID of the provider to use for this vector store. + additionalProperties: false + title: OpenaiCreateVectorStoreRequest + OpenaiUpdateVectorStoreRequest: + type: object + properties: + name: + type: string + description: The name of the vector store. + expires_after: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + The expiration policy for a vector store. + metadata: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + Set of 16 key-value pairs that can be attached to an object. + additionalProperties: false + title: OpenaiUpdateVectorStoreRequest + VectorStoreDeleteResponse: + type: object + properties: + id: + type: string + description: >- + Unique identifier of the deleted vector store + object: + type: string + default: vector_store.deleted + description: >- + Object type identifier for the deletion response + deleted: + type: boolean + default: true + description: >- + Whether the deletion operation was successful + additionalProperties: false + required: + - id + - object + - deleted + title: VectorStoreDeleteResponse + description: Response from deleting a vector store. + VectorStoreChunkingStrategy: + oneOf: + - $ref: '#/components/schemas/VectorStoreChunkingStrategyAuto' + - $ref: '#/components/schemas/VectorStoreChunkingStrategyStatic' + discriminator: + propertyName: type + mapping: + auto: '#/components/schemas/VectorStoreChunkingStrategyAuto' + static: '#/components/schemas/VectorStoreChunkingStrategyStatic' + VectorStoreChunkingStrategyAuto: + type: object + properties: + type: + type: string + const: auto + default: auto + description: >- + Strategy type, always "auto" for automatic chunking + additionalProperties: false + required: + - type + title: VectorStoreChunkingStrategyAuto + description: >- + Automatic chunking strategy for vector store files. + VectorStoreChunkingStrategyStatic: + type: object + properties: + type: + type: string + const: static + default: static + description: >- + Strategy type, always "static" for static chunking + static: + $ref: '#/components/schemas/VectorStoreChunkingStrategyStaticConfig' + description: >- + Configuration parameters for the static chunking strategy + additionalProperties: false + required: + - type + - static + title: VectorStoreChunkingStrategyStatic + description: >- + Static chunking strategy with configurable parameters. + VectorStoreChunkingStrategyStaticConfig: + type: object + properties: + chunk_overlap_tokens: + type: integer + default: 400 + description: >- + Number of tokens to overlap between adjacent chunks + max_chunk_size_tokens: + type: integer + default: 800 + description: >- + Maximum number of tokens per chunk, must be between 100 and 4096 + additionalProperties: false + required: + - chunk_overlap_tokens + - max_chunk_size_tokens + title: VectorStoreChunkingStrategyStaticConfig + description: >- + Configuration for static chunking strategy. + OpenaiCreateVectorStoreFileBatchRequest: + type: object + properties: + file_ids: + type: array + items: + type: string + description: >- + A list of File IDs that the vector store should use. + attributes: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) Key-value attributes to store with the files. + chunking_strategy: + $ref: '#/components/schemas/VectorStoreChunkingStrategy' + description: >- + (Optional) The chunking strategy used to chunk the file(s). Defaults to + auto. + additionalProperties: false + required: + - file_ids + title: OpenaiCreateVectorStoreFileBatchRequest + VectorStoreFileBatchObject: + type: object + properties: + id: + type: string + description: Unique identifier for the file batch + object: + type: string + default: vector_store.file_batch + description: >- + Object type identifier, always "vector_store.file_batch" + created_at: + type: integer + description: >- + Timestamp when the file batch was created + vector_store_id: + type: string + description: >- + ID of the vector store containing the file batch + status: + $ref: '#/components/schemas/VectorStoreFileStatus' + description: >- + Current processing status of the file batch + file_counts: + $ref: '#/components/schemas/VectorStoreFileCounts' + description: >- + File processing status counts for the batch + additionalProperties: false + required: + - id + - object + - created_at + - vector_store_id + - status + - file_counts + title: VectorStoreFileBatchObject + description: OpenAI Vector Store File Batch object. + VectorStoreFileStatus: + oneOf: + - type: string + const: completed + - type: string + const: in_progress + - type: string + const: cancelled + - type: string + const: failed + VectorStoreFileLastError: + type: object + properties: + code: + oneOf: + - type: string + const: server_error + - type: string + const: rate_limit_exceeded + description: >- + Error code indicating the type of failure + message: + type: string + description: >- + Human-readable error message describing the failure + additionalProperties: false + required: + - code + - message + title: VectorStoreFileLastError + description: >- + Error information for failed vector store file processing. + VectorStoreFileObject: + type: object + properties: + id: + type: string + description: Unique identifier for the file + object: + type: string + default: vector_store.file + description: >- + Object type identifier, always "vector_store.file" + attributes: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + Key-value attributes associated with the file + chunking_strategy: + oneOf: + - $ref: '#/components/schemas/VectorStoreChunkingStrategyAuto' + - $ref: '#/components/schemas/VectorStoreChunkingStrategyStatic' + discriminator: + propertyName: type + mapping: + auto: '#/components/schemas/VectorStoreChunkingStrategyAuto' + static: '#/components/schemas/VectorStoreChunkingStrategyStatic' + description: >- + Strategy used for splitting the file into chunks + created_at: + type: integer + description: >- + Timestamp when the file was added to the vector store + last_error: + $ref: '#/components/schemas/VectorStoreFileLastError' + description: >- + (Optional) Error information if file processing failed + status: + $ref: '#/components/schemas/VectorStoreFileStatus' + description: Current processing status of the file + usage_bytes: + type: integer + default: 0 + description: Storage space used by this file in bytes + vector_store_id: + type: string + description: >- + ID of the vector store containing this file + additionalProperties: false + required: + - id + - object + - attributes + - chunking_strategy + - created_at + - status + - usage_bytes + - vector_store_id + title: VectorStoreFileObject + description: OpenAI Vector Store File object. + VectorStoreFilesListInBatchResponse: + type: object + properties: + object: + type: string + default: list + description: Object type identifier, always "list" + data: + type: array + items: + $ref: '#/components/schemas/VectorStoreFileObject' + description: >- + List of vector store file objects in the batch + first_id: + type: string + description: >- + (Optional) ID of the first file in the list for pagination + last_id: + type: string + description: >- + (Optional) ID of the last file in the list for pagination + has_more: + type: boolean + default: false + description: >- + Whether there are more files available beyond this page + additionalProperties: false + required: + - object + - data + - has_more + title: VectorStoreFilesListInBatchResponse + description: >- + Response from listing files in a vector store file batch. + VectorStoreListFilesResponse: + type: object + properties: + object: + type: string + default: list + description: Object type identifier, always "list" + data: + type: array + items: + $ref: '#/components/schemas/VectorStoreFileObject' + description: List of vector store file objects + first_id: + type: string + description: >- + (Optional) ID of the first file in the list for pagination + last_id: + type: string + description: >- + (Optional) ID of the last file in the list for pagination + has_more: + type: boolean + default: false + description: >- + Whether there are more files available beyond this page + additionalProperties: false + required: + - object + - data + - has_more + title: VectorStoreListFilesResponse + description: >- + Response from listing files in a vector store. + OpenaiAttachFileToVectorStoreRequest: + type: object + properties: + file_id: + type: string + description: >- + The ID of the file to attach to the vector store. + attributes: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + The key-value attributes stored with the file, which can be used for filtering. + chunking_strategy: + $ref: '#/components/schemas/VectorStoreChunkingStrategy' + description: >- + The chunking strategy to use for the file. + additionalProperties: false + required: + - file_id + title: OpenaiAttachFileToVectorStoreRequest + OpenaiUpdateVectorStoreFileRequest: + type: object + properties: + attributes: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + The updated key-value attributes to store with the file. + additionalProperties: false + required: + - attributes + title: OpenaiUpdateVectorStoreFileRequest + VectorStoreFileDeleteResponse: + type: object + properties: + id: + type: string + description: Unique identifier of the deleted file + object: + type: string + default: vector_store.file.deleted + description: >- + Object type identifier for the deletion response + deleted: + type: boolean + default: true + description: >- + Whether the deletion operation was successful + additionalProperties: false + required: + - id + - object + - deleted + title: VectorStoreFileDeleteResponse + description: >- + Response from deleting a vector store file. + VectorStoreContent: + type: object + properties: + type: + type: string + const: text + description: >- + Content type, currently only "text" is supported + text: + type: string + description: The actual text content + additionalProperties: false + required: + - type + - text + title: VectorStoreContent + description: >- + Content item from a vector store file or search result. + VectorStoreFileContentsResponse: + type: object + properties: + file_id: + type: string + description: Unique identifier for the file + filename: + type: string + description: Name of the file + attributes: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + Key-value attributes associated with the file + content: + type: array + items: + $ref: '#/components/schemas/VectorStoreContent' + description: List of content items from the file + additionalProperties: false + required: + - file_id + - filename + - attributes + - content + title: VectorStoreFileContentsResponse + description: >- + Response from retrieving the contents of a vector store file. + OpenaiSearchVectorStoreRequest: + type: object + properties: + query: + oneOf: + - type: string + - type: array + items: + type: string + description: >- + The query string or array for performing the search. + filters: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + Filters based on file attributes to narrow the search results. + max_num_results: + type: integer + description: >- + Maximum number of results to return (1 to 50 inclusive, default 10). + ranking_options: + type: object + properties: + ranker: + type: string + description: >- + (Optional) Name of the ranking algorithm to use + score_threshold: + type: number + default: 0.0 + description: >- + (Optional) Minimum relevance score threshold for results + additionalProperties: false + description: >- + Ranking options for fine-tuning the search results. + rewrite_query: + type: boolean + description: >- + Whether to rewrite the natural language query for vector search (default + false) + search_mode: + type: string + description: >- + The search mode to use - "keyword", "vector", or "hybrid" (default "vector") + additionalProperties: false + required: + - query + title: OpenaiSearchVectorStoreRequest + VectorStoreSearchResponse: + type: object + properties: + file_id: + type: string + description: >- + Unique identifier of the file containing the result + filename: + type: string + description: Name of the file containing the result + score: + type: number + description: Relevance score for this search result + attributes: + type: object + additionalProperties: + oneOf: + - type: string + - type: number + - type: boolean + description: >- + (Optional) Key-value attributes associated with the file + content: + type: array + items: + $ref: '#/components/schemas/VectorStoreContent' + description: >- + List of content items matching the search query + additionalProperties: false + required: + - file_id + - filename + - score + - content + title: VectorStoreSearchResponse + description: Response from searching a vector store. + VectorStoreSearchResponsePage: + type: object + properties: + object: + type: string + default: vector_store.search_results.page + description: >- + Object type identifier for the search results page + search_query: + type: string + description: >- + The original search query that was executed + data: + type: array + items: + $ref: '#/components/schemas/VectorStoreSearchResponse' + description: List of search result objects + has_more: + type: boolean + default: false + description: >- + Whether there are more results available beyond this page + next_page: + type: string + description: >- + (Optional) Token for retrieving the next page of results + additionalProperties: false + required: + - object + - search_query + - data + - has_more + title: VectorStoreSearchResponsePage + description: >- + Paginated response from searching a vector store. Checkpoint: type: object properties: @@ -4643,10 +10053,30 @@ tags: description: '' x-displayName: >- Llama Stack Evaluation API for running evaluations on model and agent candidates. + - name: Files + description: '' + - name: Inference + description: >- + This API provides the raw interface to the underlying models. Two kinds of models + are supported: + + - LLM models: these models generate "raw" and "chat" (conversational) completions. + + - Embedding models: these models generate embeddings to be used for semantic + search. + x-displayName: >- + Llama Stack Inference API for generating completions, chat completions, and + embeddings. + - name: Models + description: '' - name: PostTraining (Coming Soon) description: '' + - name: Safety + description: '' - name: Telemetry description: '' + - name: VectorIO + description: '' x-tagGroups: - name: Operations tags: @@ -4655,5 +10085,10 @@ x-tagGroups: - DatasetIO - Datasets - Eval + - Files + - Inference + - Models - PostTraining (Coming Soon) + - Safety - Telemetry + - VectorIO diff --git a/docs/static/llama-stack-spec.html b/docs/static/llama-stack-spec.html index fa16e62ee..3da721a4e 100644 --- a/docs/static/llama-stack-spec.html +++ b/docs/static/llama-stack-spec.html @@ -1310,16 +1310,11 @@ "post": { "responses": { "200": { - "description": "An OpenAIResponseObject.", + "description": "A ListOpenAIResponseObject.", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/OpenAIResponseObject" - } - }, - "text/event-stream": { - "schema": { - "$ref": "#/components/schemas/OpenAIResponseObjectStream" + "$ref": "#/components/schemas/ListOpenAIResponseObject" } } } @@ -1340,14 +1335,14 @@ "tags": [ "Agents" ], - "summary": "Create a new OpenAI response.", - "description": "Create a new OpenAI response.", + "summary": "List all OpenAI responses.", + "description": "List all OpenAI responses.", "parameters": [], "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/CreateOpenaiResponseRequest" + "$ref": "#/components/schemas/ListOpenaiResponsesRequest" } } }, @@ -8238,6 +8233,33 @@ ], "title": "OpenAIResponseObjectStreamResponseWebSearchCallSearching" }, + "ListOpenaiResponsesRequest": { + "type": "object", + "properties": { + "after": { + "type": "string", + "description": "The ID of the last response to return." + }, + "limit": { + "type": "integer", + "description": "The number of responses to return." + }, + "model": { + "type": "string", + "description": "The model to filter responses by." + }, + "order": { + "type": "string", + "enum": [ + "asc", + "desc" + ], + "description": "The order to sort responses by when sorted by created_at ('asc' or 'desc')." + } + }, + "additionalProperties": false, + "title": "ListOpenaiResponsesRequest" + }, "OpenAIDeleteResponseObject": { "type": "object", "properties": { diff --git a/docs/static/llama-stack-spec.yaml b/docs/static/llama-stack-spec.yaml index 733e2cd21..3927d3a94 100644 --- a/docs/static/llama-stack-spec.yaml +++ b/docs/static/llama-stack-spec.yaml @@ -967,14 +967,11 @@ paths: post: responses: '200': - description: An OpenAIResponseObject. + description: A ListOpenAIResponseObject. content: application/json: schema: - $ref: '#/components/schemas/OpenAIResponseObject' - text/event-stream: - schema: - $ref: '#/components/schemas/OpenAIResponseObjectStream' + $ref: '#/components/schemas/ListOpenAIResponseObject' '400': $ref: '#/components/responses/BadRequest400' '429': @@ -987,14 +984,14 @@ paths: $ref: '#/components/responses/DefaultError' tags: - Agents - summary: Create a new OpenAI response. - description: Create a new OpenAI response. + summary: List all OpenAI responses. + description: List all OpenAI responses. parameters: [] requestBody: content: application/json: schema: - $ref: '#/components/schemas/CreateOpenaiResponseRequest' + $ref: '#/components/schemas/ListOpenaiResponsesRequest' required: true deprecated: false /v1/responses/{response_id}: @@ -6199,6 +6196,27 @@ components: - type title: >- OpenAIResponseObjectStreamResponseWebSearchCallSearching + ListOpenaiResponsesRequest: + type: object + properties: + after: + type: string + description: The ID of the last response to return. + limit: + type: integer + description: The number of responses to return. + model: + type: string + description: The model to filter responses by. + order: + type: string + enum: + - asc + - desc + description: >- + The order to sort responses by when sorted by created_at ('asc' or 'desc'). + additionalProperties: false + title: ListOpenaiResponsesRequest OpenAIDeleteResponseObject: type: object properties: diff --git a/llama_stack/apis/agents/agents.py b/llama_stack/apis/agents/agents.py index 97d80af59..dcd0d83d2 100644 --- a/llama_stack/apis/agents/agents.py +++ b/llama_stack/apis/agents/agents.py @@ -772,6 +772,12 @@ class Agents(Protocol): # # Both of these APIs are inherently stateful. + @webmethod( + route="/openai/v1/responses/{response_id}", + method="GET", + level=LLAMA_STACK_API_V1, + deprecated=True, + ) @webmethod(route="/responses/{response_id}", method="GET", level=LLAMA_STACK_API_V1) async def get_openai_response( self, @@ -784,6 +790,7 @@ class Agents(Protocol): """ ... + @webmethod(route="/openai/v1/responses", method="POST", level=LLAMA_STACK_API_V1, deprecated=True) @webmethod(route="/responses", method="POST", level=LLAMA_STACK_API_V1) async def create_openai_response( self, @@ -809,6 +816,7 @@ class Agents(Protocol): """ ... + @webmethod(route="/openai/v1/responses", method="POST", level=LLAMA_STACK_API_V1, deprecated=True) @webmethod(route="/responses", method="GET", level=LLAMA_STACK_API_V1) async def list_openai_responses( self, @@ -828,10 +836,9 @@ class Agents(Protocol): ... @webmethod( - route="/responses/{response_id}/input_items", - method="GET", - level=LLAMA_STACK_API_V1, + route="/openai/v1/responses/{response_id}/input_items", method="GET", level=LLAMA_STACK_API_V1, deprecated=True ) + @webmethod(route="/responses/{response_id}/input_items", method="GET", level=LLAMA_STACK_API_V1) async def list_openai_response_input_items( self, response_id: str, @@ -853,6 +860,7 @@ class Agents(Protocol): """ ... + @webmethod(route="/openai/v1/responses/{response_id}", method="DELETE", level=LLAMA_STACK_API_V1, deprecated=True) @webmethod(route="/responses/{response_id}", method="DELETE", level=LLAMA_STACK_API_V1) async def delete_openai_response(self, response_id: str) -> OpenAIDeleteResponseObject: """Delete an OpenAI response by its ID. diff --git a/llama_stack/apis/batches/batches.py b/llama_stack/apis/batches/batches.py index 1ee9fdb15..2801fa658 100644 --- a/llama_stack/apis/batches/batches.py +++ b/llama_stack/apis/batches/batches.py @@ -43,6 +43,7 @@ class Batches(Protocol): Note: This API is currently under active development and may undergo changes. """ + @webmethod(route="/openai/v1/batches", method="POST", level=LLAMA_STACK_API_V1, deprecated=True) @webmethod(route="/batches", method="POST", level=LLAMA_STACK_API_V1) async def create_batch( self, @@ -63,6 +64,7 @@ class Batches(Protocol): """ ... + @webmethod(route="/openai/v1/batches/{batch_id}", method="GET", level=LLAMA_STACK_API_V1, deprecated=True) @webmethod(route="/batches/{batch_id}", method="GET", level=LLAMA_STACK_API_V1) async def retrieve_batch(self, batch_id: str) -> BatchObject: """Retrieve information about a specific batch. @@ -72,6 +74,7 @@ class Batches(Protocol): """ ... + @webmethod(route="/openai/v1/batches/{batch_id}/cancel", method="POST", level=LLAMA_STACK_API_V1, deprecated=True) @webmethod(route="/batches/{batch_id}/cancel", method="POST", level=LLAMA_STACK_API_V1) async def cancel_batch(self, batch_id: str) -> BatchObject: """Cancel a batch that is in progress. @@ -81,6 +84,7 @@ class Batches(Protocol): """ ... + @webmethod(route="/openai/v1/batches", method="GET", level=LLAMA_STACK_API_V1, deprecated=True) @webmethod(route="/batches", method="GET", level=LLAMA_STACK_API_V1) async def list_batches( self, diff --git a/llama_stack/apis/files/files.py b/llama_stack/apis/files/files.py index 0cc491fae..13f0e95fa 100644 --- a/llama_stack/apis/files/files.py +++ b/llama_stack/apis/files/files.py @@ -105,6 +105,7 @@ class OpenAIFileDeleteResponse(BaseModel): @trace_protocol class Files(Protocol): # OpenAI Files API Endpoints + @webmethod(route="/openai/v1/files", method="POST", level=LLAMA_STACK_API_V1, deprecated=True) @webmethod(route="/files", method="POST", level=LLAMA_STACK_API_V1) async def openai_upload_file( self, @@ -127,6 +128,7 @@ class Files(Protocol): """ ... + @webmethod(route="/openai/v1/files", method="GET", level=LLAMA_STACK_API_V1, deprecated=True) @webmethod(route="/files", method="GET", level=LLAMA_STACK_API_V1) async def openai_list_files( self, @@ -146,6 +148,7 @@ class Files(Protocol): """ ... + @webmethod(route="/openai/v1/files/{file_id}", method="GET", level=LLAMA_STACK_API_V1, deprecated=True) @webmethod(route="/files/{file_id}", method="GET", level=LLAMA_STACK_API_V1) async def openai_retrieve_file( self, @@ -159,6 +162,7 @@ class Files(Protocol): """ ... + @webmethod(route="/openai/v1/files/{file_id}", method="DELETE", level=LLAMA_STACK_API_V1, deprecated=True) @webmethod(route="/files/{file_id}", method="DELETE", level=LLAMA_STACK_API_V1) async def openai_delete_file( self, @@ -172,6 +176,7 @@ class Files(Protocol): """ ... + @webmethod(route="/openai/v1/files/{file_id}/content", method="GET", level=LLAMA_STACK_API_V1, deprecated=True) @webmethod(route="/files/{file_id}/content", method="GET", level=LLAMA_STACK_API_V1) async def openai_retrieve_file_content( self, diff --git a/llama_stack/apis/inference/inference.py b/llama_stack/apis/inference/inference.py index 5525e4597..d71aea38e 100644 --- a/llama_stack/apis/inference/inference.py +++ b/llama_stack/apis/inference/inference.py @@ -1066,6 +1066,7 @@ class InferenceProvider(Protocol): raise NotImplementedError("Reranking is not implemented") return # this is so mypy's safe-super rule will consider the method concrete + @webmethod(route="/openai/v1/completions", method="POST", level=LLAMA_STACK_API_V1, deprecated=True) @webmethod(route="/completions", method="POST", level=LLAMA_STACK_API_V1) async def openai_completion( self, @@ -1117,6 +1118,7 @@ class InferenceProvider(Protocol): """ ... + @webmethod(route="/openai/v1/chat/completions", method="POST", level=LLAMA_STACK_API_V1, deprecated=True) @webmethod(route="/chat/completions", method="POST", level=LLAMA_STACK_API_V1) async def openai_chat_completion( self, @@ -1173,6 +1175,7 @@ class InferenceProvider(Protocol): """ ... + @webmethod(route="/openai/v1/embeddings", method="POST", level=LLAMA_STACK_API_V1, deprecated=True) @webmethod(route="/embeddings", method="POST", level=LLAMA_STACK_API_V1) async def openai_embeddings( self, @@ -1202,6 +1205,7 @@ class Inference(InferenceProvider): - Embedding models: these models generate embeddings to be used for semantic search. """ + @webmethod(route="/openai/v1/chat/completions", method="GET", level=LLAMA_STACK_API_V1, deprecated=True) @webmethod(route="/chat/completions", method="GET", level=LLAMA_STACK_API_V1) async def list_chat_completions( self, @@ -1220,6 +1224,9 @@ class Inference(InferenceProvider): """ raise NotImplementedError("List chat completions is not implemented") + @webmethod( + route="/openai/v1/chat/completions/{completion_id}", method="GET", level=LLAMA_STACK_API_V1, deprecated=True + ) @webmethod(route="/chat/completions/{completion_id}", method="GET", level=LLAMA_STACK_API_V1) async def get_chat_completion(self, completion_id: str) -> OpenAICompletionWithInputMessages: """Describe a chat completion by its ID. diff --git a/llama_stack/apis/models/models.py b/llama_stack/apis/models/models.py index d8860654b..210ed9246 100644 --- a/llama_stack/apis/models/models.py +++ b/llama_stack/apis/models/models.py @@ -111,6 +111,14 @@ class Models(Protocol): """ ... + @webmethod(route="/openai/v1/models", method="GET", level=LLAMA_STACK_API_V1, deprecated=True) + async def openai_list_models(self) -> OpenAIListModelsResponse: + """List models using the OpenAI API. + + :returns: A OpenAIListModelsResponse. + """ + ... + @webmethod(route="/models/{model_id:path}", method="GET", level=LLAMA_STACK_API_V1) async def get_model( self, diff --git a/llama_stack/apis/safety/safety.py b/llama_stack/apis/safety/safety.py index bf37b496a..0fa250d90 100644 --- a/llama_stack/apis/safety/safety.py +++ b/llama_stack/apis/safety/safety.py @@ -114,6 +114,7 @@ class Safety(Protocol): """ ... + @webmethod(route="/openai/v1/moderations", method="POST", level=LLAMA_STACK_API_V1, deprecated=True) @webmethod(route="/moderations", method="POST", level=LLAMA_STACK_API_V1) async def run_moderation(self, input: str | list[str], model: str) -> ModerationObject: """Classifies if text and/or image inputs are potentially harmful. diff --git a/llama_stack/apis/vector_io/vector_io.py b/llama_stack/apis/vector_io/vector_io.py index e07175c49..238889099 100644 --- a/llama_stack/apis/vector_io/vector_io.py +++ b/llama_stack/apis/vector_io/vector_io.py @@ -512,6 +512,7 @@ class VectorIO(Protocol): ... # OpenAI Vector Stores API endpoints + @webmethod(route="/openai/v1/vector_stores", method="POST", level=LLAMA_STACK_API_V1, deprecated=True) @webmethod(route="/vector_stores", method="POST", level=LLAMA_STACK_API_V1) async def openai_create_vector_store( self, @@ -538,6 +539,7 @@ class VectorIO(Protocol): """ ... + @webmethod(route="/openai/v1/vector_stores", method="GET", level=LLAMA_STACK_API_V1, deprecated=True) @webmethod(route="/vector_stores", method="GET", level=LLAMA_STACK_API_V1) async def openai_list_vector_stores( self, @@ -556,6 +558,9 @@ class VectorIO(Protocol): """ ... + @webmethod( + route="/openai/v1/vector_stores/{vector_store_id}", method="GET", level=LLAMA_STACK_API_V1, deprecated=True + ) @webmethod(route="/vector_stores/{vector_store_id}", method="GET", level=LLAMA_STACK_API_V1) async def openai_retrieve_vector_store( self, @@ -568,6 +573,9 @@ class VectorIO(Protocol): """ ... + @webmethod( + route="/openai/v1/vector_stores/{vector_store_id}", method="POST", level=LLAMA_STACK_API_V1, deprecated=True + ) @webmethod( route="/vector_stores/{vector_store_id}", method="POST", @@ -590,6 +598,9 @@ class VectorIO(Protocol): """ ... + @webmethod( + route="/openai/v1/vector_stores/{vector_store_id}", method="DELETE", level=LLAMA_STACK_API_V1, deprecated=True + ) @webmethod( route="/vector_stores/{vector_store_id}", method="DELETE", @@ -606,6 +617,12 @@ class VectorIO(Protocol): """ ... + @webmethod( + route="/openai/v1/vector_stores/{vector_store_id}/search", + method="POST", + level=LLAMA_STACK_API_V1, + deprecated=True, + ) @webmethod( route="/vector_stores/{vector_store_id}/search", method="POST", @@ -638,6 +655,12 @@ class VectorIO(Protocol): """ ... + @webmethod( + route="/openai/v1/vector_stores/{vector_store_id}/files", + method="POST", + level=LLAMA_STACK_API_V1, + deprecated=True, + ) @webmethod( route="/vector_stores/{vector_store_id}/files", method="POST", @@ -660,6 +683,12 @@ class VectorIO(Protocol): """ ... + @webmethod( + route="/openai/v1/vector_stores/{vector_store_id}/files", + method="GET", + level=LLAMA_STACK_API_V1, + deprecated=True, + ) @webmethod( route="/vector_stores/{vector_store_id}/files", method="GET", @@ -686,6 +715,12 @@ class VectorIO(Protocol): """ ... + @webmethod( + route="/openai/v1/vector_stores/{vector_store_id}/files/{file_id}", + method="GET", + level=LLAMA_STACK_API_V1, + deprecated=True, + ) @webmethod( route="/vector_stores/{vector_store_id}/files/{file_id}", method="GET", @@ -704,6 +739,12 @@ class VectorIO(Protocol): """ ... + @webmethod( + route="/openai/v1/vector_stores/{vector_store_id}/files/{file_id}/content", + method="GET", + level=LLAMA_STACK_API_V1, + deprecated=True, + ) @webmethod( route="/vector_stores/{vector_store_id}/files/{file_id}/content", method="GET", @@ -722,6 +763,12 @@ class VectorIO(Protocol): """ ... + @webmethod( + route="/openai/v1/vector_stores/{vector_store_id}/files/{file_id}", + method="POST", + level=LLAMA_STACK_API_V1, + deprecated=True, + ) @webmethod( route="/vector_stores/{vector_store_id}/files/{file_id}", method="POST", @@ -742,6 +789,12 @@ class VectorIO(Protocol): """ ... + @webmethod( + route="/openai/v1/vector_stores/{vector_store_id}/files/{file_id}", + method="DELETE", + level=LLAMA_STACK_API_V1, + deprecated=True, + ) @webmethod( route="/vector_stores/{vector_store_id}/files/{file_id}", method="DELETE", @@ -765,6 +818,12 @@ class VectorIO(Protocol): method="POST", level=LLAMA_STACK_API_V1, ) + @webmethod( + route="/openai/v1/vector_stores/{vector_store_id}/file_batches", + method="POST", + level=LLAMA_STACK_API_V1, + deprecated=True, + ) async def openai_create_vector_store_file_batch( self, vector_store_id: str, @@ -787,6 +846,12 @@ class VectorIO(Protocol): method="GET", level=LLAMA_STACK_API_V1, ) + @webmethod( + route="/openai/v1/vector_stores/{vector_store_id}/file_batches/{batch_id}", + method="GET", + level=LLAMA_STACK_API_V1, + deprecated=True, + ) async def openai_retrieve_vector_store_file_batch( self, batch_id: str, @@ -800,6 +865,12 @@ class VectorIO(Protocol): """ ... + @webmethod( + route="/openai/v1/vector_stores/{vector_store_id}/file_batches/{batch_id}/files", + method="GET", + level=LLAMA_STACK_API_V1, + deprecated=True, + ) @webmethod( route="/vector_stores/{vector_store_id}/file_batches/{batch_id}/files", method="GET", @@ -828,6 +899,12 @@ class VectorIO(Protocol): """ ... + @webmethod( + route="/openai/v1/vector_stores/{vector_store_id}/file_batches/{batch_id}/cancel", + method="POST", + level=LLAMA_STACK_API_V1, + deprecated=True, + ) @webmethod( route="/vector_stores/{vector_store_id}/file_batches/{batch_id}/cancel", method="POST", From 7545e6f66915c1cc48c5429b4689dab86e9bfc5e Mon Sep 17 00:00:00 2001 From: Kelly Brown <86735520+kelbrown20@users.noreply.github.com> Date: Thu, 2 Oct 2025 10:48:38 -0400 Subject: [PATCH 05/13] docs: Update docs navbar config (#3653) ## Description Currently, the docs page has the home book opened by default. This PR updates the .ts so that the sidebar books are collapsed when you first open the webpage --- docs/sidebars.ts | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/sidebars.ts b/docs/sidebars.ts index 2724de05c..f2cfe3798 100644 --- a/docs/sidebars.ts +++ b/docs/sidebars.ts @@ -16,7 +16,7 @@ const sidebars: SidebarsConfig = { { type: 'category', label: 'Getting Started', - collapsed: false, + collapsed: true, items: [ 'getting_started/quickstart', 'getting_started/detailed_tutorial', @@ -26,7 +26,7 @@ const sidebars: SidebarsConfig = { { type: 'category', label: 'Concepts', - collapsed: false, + collapsed: true, items: [ 'concepts/index', 'concepts/architecture', @@ -48,7 +48,7 @@ const sidebars: SidebarsConfig = { { type: 'category', label: 'Distributions', - collapsed: false, + collapsed: true, items: [ 'distributions/index', 'distributions/list_of_distributions', @@ -93,7 +93,7 @@ const sidebars: SidebarsConfig = { { type: 'category', label: 'Providers', - collapsed: false, + collapsed: true, items: [ 'providers/index', { @@ -276,7 +276,7 @@ const sidebars: SidebarsConfig = { { type: 'category', label: 'Building Applications', - collapsed: false, + collapsed: true, items: [ 'building_applications/index', 'building_applications/rag', @@ -293,7 +293,7 @@ const sidebars: SidebarsConfig = { { type: 'category', label: 'Advanced APIs', - collapsed: false, + collapsed: true, items: [ 'advanced_apis/post_training', 'advanced_apis/evaluation', @@ -303,7 +303,7 @@ const sidebars: SidebarsConfig = { { type: 'category', label: 'Deploying', - collapsed: false, + collapsed: true, items: [ 'deploying/index', 'deploying/kubernetes_deployment', @@ -313,7 +313,7 @@ const sidebars: SidebarsConfig = { { type: 'category', label: 'Contributing', - collapsed: false, + collapsed: true, items: [ 'contributing/index', 'contributing/new_api_provider', @@ -324,7 +324,7 @@ const sidebars: SidebarsConfig = { { type: 'category', label: 'References', - collapsed: false, + collapsed: true, items: [ 'references/index', 'references/llama_cli_reference/index', From ae9e1c80b543c2779f95f8463af8ca1f51b7126b Mon Sep 17 00:00:00 2001 From: Alexey Rybak <50731695+reluctantfuturist@users.noreply.github.com> Date: Thu, 2 Oct 2025 09:25:09 -0700 Subject: [PATCH 06/13] docs: API spec generation for Stainless (#3655) # What does this PR do? * Adds stainless-llama-stack-spec.yaml for Stainless client generation, which comprises stable + experimental APIs ## Test Plan * Manual generation --- docs/openapi_generator/generate.py | 14 +- docs/openapi_generator/pyopenapi/generator.py | 4 + docs/static/stainless-llama-stack-spec.html | 18085 ++++++++++++++++ docs/static/stainless-llama-stack-spec.yaml | 13412 ++++++++++++ 4 files changed, 31513 insertions(+), 2 deletions(-) create mode 100644 docs/static/stainless-llama-stack-spec.html create mode 100644 docs/static/stainless-llama-stack-spec.yaml diff --git a/docs/openapi_generator/generate.py b/docs/openapi_generator/generate.py index ea0f62b00..b489833b3 100644 --- a/docs/openapi_generator/generate.py +++ b/docs/openapi_generator/generate.py @@ -34,10 +34,17 @@ def str_presenter(dumper, data): return dumper.represent_scalar("tag:yaml.org,2002:str", data, style=style) -def generate_spec(output_dir: Path, stability_filter: str = None, main_spec: bool = False): +def generate_spec(output_dir: Path, stability_filter: str = None, main_spec: bool = False, combined_spec: bool = False): """Generate OpenAPI spec with optional stability filtering.""" - if stability_filter: + if combined_spec: + # Special case for combined stable + experimental APIs + title_suffix = " - Stable & Experimental APIs" + filename_prefix = "stainless-" + description_suffix = "\n\n**🔗 COMBINED**: This specification includes both stable production-ready APIs and experimental pre-release APIs. Use stable APIs for production deployments and experimental APIs for testing new features." + # Use the special "stainless" filter to include stable + experimental APIs + stability_filter = "stainless" + elif stability_filter: title_suffix = { "stable": " - Stable APIs" if not main_spec else "", "experimental": " - Experimental APIs", @@ -125,6 +132,9 @@ def main(output_dir: str): generate_spec(output_dir, "experimental") generate_spec(output_dir, "deprecated") + print("Generating combined stable + experimental specification...") + generate_spec(output_dir, combined_spec=True) + if __name__ == "__main__": fire.Fire(main) diff --git a/docs/openapi_generator/pyopenapi/generator.py b/docs/openapi_generator/pyopenapi/generator.py index d3ad2201b..bb8fa55ab 100644 --- a/docs/openapi_generator/pyopenapi/generator.py +++ b/docs/openapi_generator/pyopenapi/generator.py @@ -948,6 +948,10 @@ class Generator: # Include only deprecated endpoints if deprecated: filtered_operations.append(op) + elif self.options.stability_filter == "stainless": + # Include both stable (v1 non-deprecated) and experimental (v1alpha, v1beta) endpoints + if (stability_level == "v1" and not deprecated) or stability_level in ["v1alpha", "v1beta"]: + filtered_operations.append(op) operations = filtered_operations print( diff --git a/docs/static/stainless-llama-stack-spec.html b/docs/static/stainless-llama-stack-spec.html new file mode 100644 index 000000000..f921d2c29 --- /dev/null +++ b/docs/static/stainless-llama-stack-spec.html @@ -0,0 +1,18085 @@ + + + + + + + OpenAPI specification + + + + + + + + + + + + + diff --git a/docs/static/stainless-llama-stack-spec.yaml b/docs/static/stainless-llama-stack-spec.yaml new file mode 100644 index 000000000..cb43b313b --- /dev/null +++ b/docs/static/stainless-llama-stack-spec.yaml @@ -0,0 +1,13412 @@ +openapi: 3.1.0 +info: + title: >- + Llama Stack Specification - Stable & Experimental APIs + version: v1 + description: >- + This is the specification of the Llama Stack that provides + a set of endpoints and their corresponding interfaces that are + tailored to + best leverage Llama Models. + + **🔗 COMBINED**: This specification includes both stable production-ready APIs + and experimental pre-release APIs. Use stable APIs for production deployments + and experimental APIs for testing new features. +servers: + - url: http://any-hosted-llama-stack.com +paths: + /v1/chat/completions: + get: + responses: + '200': + description: A ListOpenAIChatCompletionResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/ListOpenAIChatCompletionResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Inference + summary: List all chat completions. + description: List all chat completions. + parameters: + - name: after + in: query + description: >- + The ID of the last chat completion to return. + required: false + schema: + type: string + - name: limit + in: query + description: >- + The maximum number of chat completions to return. + required: false + schema: + type: integer + - name: model + in: query + description: The model to filter by. + required: false + schema: + type: string + - name: order + in: query + description: >- + The order to sort the chat completions by: "asc" or "desc". Defaults to + "desc". + required: false + schema: + $ref: '#/components/schemas/Order' + deprecated: false + post: + responses: + '200': + description: An OpenAIChatCompletion. + content: + application/json: + schema: + oneOf: + - $ref: '#/components/schemas/OpenAIChatCompletion' + - $ref: '#/components/schemas/OpenAIChatCompletionChunk' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Inference + summary: >- + Generate an OpenAI-compatible chat completion for the given messages using + the specified model. + description: >- + Generate an OpenAI-compatible chat completion for the given messages using + the specified model. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenaiChatCompletionRequest' + required: true + deprecated: false + /v1/chat/completions/{completion_id}: + get: + responses: + '200': + description: A OpenAICompletionWithInputMessages. + content: + application/json: + schema: + $ref: '#/components/schemas/OpenAICompletionWithInputMessages' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Inference + summary: Describe a chat completion by its ID. + description: Describe a chat completion by its ID. + parameters: + - name: completion_id + in: path + description: ID of the chat completion. + required: true + schema: + type: string + deprecated: false + /v1/completions: + post: + responses: + '200': + description: An OpenAICompletion. + content: + application/json: + schema: + $ref: '#/components/schemas/OpenAICompletion' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Inference + summary: >- + Generate an OpenAI-compatible completion for the given prompt using the specified + model. + description: >- + Generate an OpenAI-compatible completion for the given prompt using the specified + model. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenaiCompletionRequest' + required: true + deprecated: false + /v1/embeddings: + post: + responses: + '200': + description: >- + An OpenAIEmbeddingsResponse containing the embeddings. + content: + application/json: + schema: + $ref: '#/components/schemas/OpenAIEmbeddingsResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Inference + summary: >- + Generate OpenAI-compatible embeddings for the given input using the specified + model. + description: >- + Generate OpenAI-compatible embeddings for the given input using the specified + model. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenaiEmbeddingsRequest' + required: true + deprecated: false + /v1/files: + get: + responses: + '200': + description: >- + An ListOpenAIFileResponse containing the list of files. + content: + application/json: + schema: + $ref: '#/components/schemas/ListOpenAIFileResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Files + summary: >- + Returns a list of files that belong to the user's organization. + description: >- + Returns a list of files that belong to the user's organization. + parameters: + - name: after + in: query + description: >- + A cursor for use in pagination. `after` is an object ID that defines your + place in the list. For instance, if you make a list request and receive + 100 objects, ending with obj_foo, your subsequent call can include after=obj_foo + in order to fetch the next page of the list. + required: false + schema: + type: string + - name: limit + in: query + description: >- + A limit on the number of objects to be returned. Limit can range between + 1 and 10,000, and the default is 10,000. + required: false + schema: + type: integer + - name: order + in: query + description: >- + Sort order by the `created_at` timestamp of the objects. `asc` for ascending + order and `desc` for descending order. + required: false + schema: + $ref: '#/components/schemas/Order' + - name: purpose + in: query + description: >- + Only return files with the given purpose. + required: false + schema: + $ref: '#/components/schemas/OpenAIFilePurpose' + deprecated: false + post: + responses: + '200': + description: >- + An OpenAIFileObject representing the uploaded file. + content: + application/json: + schema: + $ref: '#/components/schemas/OpenAIFileObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Files + summary: >- + Upload a file that can be used across various endpoints. + description: >- + Upload a file that can be used across various endpoints. + + The file upload should be a multipart form request with: + + - file: The File object (not file name) to be uploaded. + + - purpose: The intended purpose of the uploaded file. + + - expires_after: Optional form values describing expiration for the file. + parameters: [] + requestBody: + content: + multipart/form-data: + schema: + type: object + properties: + file: + type: string + format: binary + purpose: + $ref: '#/components/schemas/OpenAIFilePurpose' + expires_after: + $ref: '#/components/schemas/ExpiresAfter' + required: + - file + - purpose + required: true + deprecated: false + /v1/files/{file_id}: + get: + responses: + '200': + description: >- + An OpenAIFileObject containing file information. + content: + application/json: + schema: + $ref: '#/components/schemas/OpenAIFileObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Files + summary: >- + Returns information about a specific file. + description: >- + Returns information about a specific file. + parameters: + - name: file_id + in: path + description: >- + The ID of the file to use for this request. + required: true + schema: + type: string + deprecated: false + delete: + responses: + '200': + description: >- + An OpenAIFileDeleteResponse indicating successful deletion. + content: + application/json: + schema: + $ref: '#/components/schemas/OpenAIFileDeleteResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Files + summary: Delete a file. + description: Delete a file. + parameters: + - name: file_id + in: path + description: >- + The ID of the file to use for this request. + required: true + schema: + type: string + deprecated: false + /v1/files/{file_id}/content: + get: + responses: + '200': + description: >- + The raw file content as a binary response. + content: + application/json: + schema: + $ref: '#/components/schemas/Response' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Files + summary: >- + Returns the contents of the specified file. + description: >- + Returns the contents of the specified file. + parameters: + - name: file_id + in: path + description: >- + The ID of the file to use for this request. + required: true + schema: + type: string + deprecated: false + /v1/health: + get: + responses: + '200': + description: >- + Health information indicating if the service is operational. + content: + application/json: + schema: + $ref: '#/components/schemas/HealthInfo' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Inspect + summary: >- + Get the current health status of the service. + description: >- + Get the current health status of the service. + parameters: [] + deprecated: false + /v1/inspect/routes: + get: + responses: + '200': + description: >- + Response containing information about all available routes. + content: + application/json: + schema: + $ref: '#/components/schemas/ListRoutesResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Inspect + summary: >- + List all available API routes with their methods and implementing providers. + description: >- + List all available API routes with their methods and implementing providers. + parameters: [] + deprecated: false + /v1/models: + get: + responses: + '200': + description: A ListModelsResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/ListModelsResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Models + summary: List all models. + description: List all models. + parameters: [] + deprecated: false + post: + responses: + '200': + description: A Model. + content: + application/json: + schema: + $ref: '#/components/schemas/Model' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Models + summary: Register a model. + description: Register a model. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/RegisterModelRequest' + required: true + deprecated: false + /v1/models/{model_id}: + get: + responses: + '200': + description: A Model. + content: + application/json: + schema: + $ref: '#/components/schemas/Model' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Models + summary: Get a model by its identifier. + description: Get a model by its identifier. + parameters: + - name: model_id + in: path + description: The identifier of the model to get. + required: true + schema: + type: string + deprecated: false + delete: + responses: + '200': + description: OK + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Models + summary: Unregister a model. + description: Unregister a model. + parameters: + - name: model_id + in: path + description: >- + The identifier of the model to unregister. + required: true + schema: + type: string + deprecated: false + /v1/moderations: + post: + responses: + '200': + description: A moderation object. + content: + application/json: + schema: + $ref: '#/components/schemas/ModerationObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Safety + summary: >- + Classifies if text and/or image inputs are potentially harmful. + description: >- + Classifies if text and/or image inputs are potentially harmful. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/RunModerationRequest' + required: true + deprecated: false + /v1/prompts: + get: + responses: + '200': + description: >- + A ListPromptsResponse containing all prompts. + content: + application/json: + schema: + $ref: '#/components/schemas/ListPromptsResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Prompts + summary: List all prompts. + description: List all prompts. + parameters: [] + deprecated: false + post: + responses: + '200': + description: The created Prompt resource. + content: + application/json: + schema: + $ref: '#/components/schemas/Prompt' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Prompts + summary: Create a new prompt. + description: Create a new prompt. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/CreatePromptRequest' + required: true + deprecated: false + /v1/prompts/{prompt_id}: + get: + responses: + '200': + description: A Prompt resource. + content: + application/json: + schema: + $ref: '#/components/schemas/Prompt' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Prompts + summary: >- + Get a prompt by its identifier and optional version. + description: >- + Get a prompt by its identifier and optional version. + parameters: + - name: prompt_id + in: path + description: The identifier of the prompt to get. + required: true + schema: + type: string + - name: version + in: query + description: >- + The version of the prompt to get (defaults to latest). + required: false + schema: + type: integer + deprecated: false + post: + responses: + '200': + description: >- + The updated Prompt resource with incremented version. + content: + application/json: + schema: + $ref: '#/components/schemas/Prompt' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Prompts + summary: >- + Update an existing prompt (increments version). + description: >- + Update an existing prompt (increments version). + parameters: + - name: prompt_id + in: path + description: The identifier of the prompt to update. + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/UpdatePromptRequest' + required: true + deprecated: false + delete: + responses: + '200': + description: OK + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Prompts + summary: Delete a prompt. + description: Delete a prompt. + parameters: + - name: prompt_id + in: path + description: The identifier of the prompt to delete. + required: true + schema: + type: string + deprecated: false + /v1/prompts/{prompt_id}/set-default-version: + post: + responses: + '200': + description: >- + The prompt with the specified version now set as default. + content: + application/json: + schema: + $ref: '#/components/schemas/Prompt' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Prompts + summary: >- + Set which version of a prompt should be the default in get_prompt (latest). + description: >- + Set which version of a prompt should be the default in get_prompt (latest). + parameters: + - name: prompt_id + in: path + description: The identifier of the prompt. + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/SetDefaultVersionRequest' + required: true + deprecated: false + /v1/prompts/{prompt_id}/versions: + get: + responses: + '200': + description: >- + A ListPromptsResponse containing all versions of the prompt. + content: + application/json: + schema: + $ref: '#/components/schemas/ListPromptsResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Prompts + summary: List all versions of a specific prompt. + description: List all versions of a specific prompt. + parameters: + - name: prompt_id + in: path + description: >- + The identifier of the prompt to list versions for. + required: true + schema: + type: string + deprecated: false + /v1/providers: + get: + responses: + '200': + description: >- + A ListProvidersResponse containing information about all providers. + content: + application/json: + schema: + $ref: '#/components/schemas/ListProvidersResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Providers + summary: List all available providers. + description: List all available providers. + parameters: [] + deprecated: false + /v1/providers/{provider_id}: + get: + responses: + '200': + description: >- + A ProviderInfo object containing the provider's details. + content: + application/json: + schema: + $ref: '#/components/schemas/ProviderInfo' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Providers + summary: >- + Get detailed information about a specific provider. + description: >- + Get detailed information about a specific provider. + parameters: + - name: provider_id + in: path + description: The ID of the provider to inspect. + required: true + schema: + type: string + deprecated: false + /v1/responses: + get: + responses: + '200': + description: A ListOpenAIResponseObject. + content: + application/json: + schema: + $ref: '#/components/schemas/ListOpenAIResponseObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Agents + summary: List all OpenAI responses. + description: List all OpenAI responses. + parameters: + - name: after + in: query + description: The ID of the last response to return. + required: false + schema: + type: string + - name: limit + in: query + description: The number of responses to return. + required: false + schema: + type: integer + - name: model + in: query + description: The model to filter responses by. + required: false + schema: + type: string + - name: order + in: query + description: >- + The order to sort responses by when sorted by created_at ('asc' or 'desc'). + required: false + schema: + $ref: '#/components/schemas/Order' + deprecated: false + post: + responses: + '200': + description: A ListOpenAIResponseObject. + content: + application/json: + schema: + $ref: '#/components/schemas/ListOpenAIResponseObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Agents + summary: List all OpenAI responses. + description: List all OpenAI responses. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ListOpenaiResponsesRequest' + required: true + deprecated: false + /v1/responses/{response_id}: + get: + responses: + '200': + description: An OpenAIResponseObject. + content: + application/json: + schema: + $ref: '#/components/schemas/OpenAIResponseObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Agents + summary: Retrieve an OpenAI response by its ID. + description: Retrieve an OpenAI response by its ID. + parameters: + - name: response_id + in: path + description: >- + The ID of the OpenAI response to retrieve. + required: true + schema: + type: string + deprecated: false + delete: + responses: + '200': + description: An OpenAIDeleteResponseObject + content: + application/json: + schema: + $ref: '#/components/schemas/OpenAIDeleteResponseObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Agents + summary: Delete an OpenAI response by its ID. + description: Delete an OpenAI response by its ID. + parameters: + - name: response_id + in: path + description: The ID of the OpenAI response to delete. + required: true + schema: + type: string + deprecated: false + /v1/responses/{response_id}/input_items: + get: + responses: + '200': + description: An ListOpenAIResponseInputItem. + content: + application/json: + schema: + $ref: '#/components/schemas/ListOpenAIResponseInputItem' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Agents + summary: >- + List input items for a given OpenAI response. + description: >- + List input items for a given OpenAI response. + parameters: + - name: response_id + in: path + description: >- + The ID of the response to retrieve input items for. + required: true + schema: + type: string + - name: after + in: query + description: >- + An item ID to list items after, used for pagination. + required: false + schema: + type: string + - name: before + in: query + description: >- + An item ID to list items before, used for pagination. + required: false + schema: + type: string + - name: include + in: query + description: >- + Additional fields to include in the response. + required: false + schema: + type: array + items: + type: string + - name: limit + in: query + description: >- + A limit on the number of objects to be returned. Limit can range between + 1 and 100, and the default is 20. + required: false + schema: + type: integer + - name: order + in: query + description: >- + The order to return the input items in. Default is desc. + required: false + schema: + $ref: '#/components/schemas/Order' + deprecated: false + /v1/safety/run-shield: + post: + responses: + '200': + description: A RunShieldResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/RunShieldResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Safety + summary: Run a shield. + description: Run a shield. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/RunShieldRequest' + required: true + deprecated: false + /v1/scoring-functions: + get: + responses: + '200': + description: A ListScoringFunctionsResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/ListScoringFunctionsResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - ScoringFunctions + summary: List all scoring functions. + description: List all scoring functions. + parameters: [] + deprecated: false + post: + responses: + '200': + description: OK + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - ScoringFunctions + summary: Register a scoring function. + description: Register a scoring function. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/RegisterScoringFunctionRequest' + required: true + deprecated: false + /v1/scoring-functions/{scoring_fn_id}: + get: + responses: + '200': + description: A ScoringFn. + content: + application/json: + schema: + $ref: '#/components/schemas/ScoringFn' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - ScoringFunctions + summary: Get a scoring function by its ID. + description: Get a scoring function by its ID. + parameters: + - name: scoring_fn_id + in: path + description: The ID of the scoring function to get. + required: true + schema: + type: string + deprecated: false + delete: + responses: + '200': + description: OK + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - ScoringFunctions + summary: Unregister a scoring function. + description: Unregister a scoring function. + parameters: + - name: scoring_fn_id + in: path + description: >- + The ID of the scoring function to unregister. + required: true + schema: + type: string + deprecated: false + /v1/scoring/score: + post: + responses: + '200': + description: >- + A ScoreResponse object containing rows and aggregated results. + content: + application/json: + schema: + $ref: '#/components/schemas/ScoreResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Scoring + summary: Score a list of rows. + description: Score a list of rows. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ScoreRequest' + required: true + deprecated: false + /v1/scoring/score-batch: + post: + responses: + '200': + description: A ScoreBatchResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/ScoreBatchResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Scoring + summary: Score a batch of rows. + description: Score a batch of rows. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ScoreBatchRequest' + required: true + deprecated: false + /v1/shields: + get: + responses: + '200': + description: A ListShieldsResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/ListShieldsResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Shields + summary: List all shields. + description: List all shields. + parameters: [] + deprecated: false + post: + responses: + '200': + description: A Shield. + content: + application/json: + schema: + $ref: '#/components/schemas/Shield' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Shields + summary: Register a shield. + description: Register a shield. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/RegisterShieldRequest' + required: true + deprecated: false + /v1/shields/{identifier}: + get: + responses: + '200': + description: A Shield. + content: + application/json: + schema: + $ref: '#/components/schemas/Shield' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Shields + summary: Get a shield by its identifier. + description: Get a shield by its identifier. + parameters: + - name: identifier + in: path + description: The identifier of the shield to get. + required: true + schema: + type: string + deprecated: false + delete: + responses: + '200': + description: OK + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Shields + summary: Unregister a shield. + description: Unregister a shield. + parameters: + - name: identifier + in: path + description: >- + The identifier of the shield to unregister. + required: true + schema: + type: string + deprecated: false + /v1/synthetic-data-generation/generate: + post: + responses: + '200': + description: >- + Response containing filtered synthetic data samples and optional statistics + content: + application/json: + schema: + $ref: '#/components/schemas/SyntheticDataGenerationResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - SyntheticDataGeneration (Coming Soon) + summary: >- + Generate synthetic data based on input dialogs and apply filtering. + description: >- + Generate synthetic data based on input dialogs and apply filtering. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/SyntheticDataGenerateRequest' + required: true + deprecated: false + /v1/telemetry/events: + post: + responses: + '200': + description: OK + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Telemetry + summary: Log an event. + description: Log an event. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/LogEventRequest' + required: true + deprecated: false + /v1/tool-runtime/invoke: + post: + responses: + '200': + description: A ToolInvocationResult. + content: + application/json: + schema: + $ref: '#/components/schemas/ToolInvocationResult' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - ToolRuntime + summary: Run a tool with the given arguments. + description: Run a tool with the given arguments. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/InvokeToolRequest' + required: true + deprecated: false + /v1/tool-runtime/list-tools: + get: + responses: + '200': + description: A ListToolDefsResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/ListToolDefsResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - ToolRuntime + summary: List all tools in the runtime. + description: List all tools in the runtime. + parameters: + - name: tool_group_id + in: query + description: >- + The ID of the tool group to list tools for. + required: false + schema: + type: string + - name: mcp_endpoint + in: query + description: >- + The MCP endpoint to use for the tool group. + required: false + schema: + $ref: '#/components/schemas/URL' + deprecated: false + /v1/tool-runtime/rag-tool/insert: + post: + responses: + '200': + description: OK + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - ToolRuntime + summary: >- + Index documents so they can be used by the RAG system. + description: >- + Index documents so they can be used by the RAG system. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/InsertRequest' + required: true + deprecated: false + /v1/tool-runtime/rag-tool/query: + post: + responses: + '200': + description: >- + RAGQueryResult containing the retrieved content and metadata + content: + application/json: + schema: + $ref: '#/components/schemas/RAGQueryResult' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - ToolRuntime + summary: >- + Query the RAG system for context; typically invoked by the agent. + description: >- + Query the RAG system for context; typically invoked by the agent. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/QueryRequest' + required: true + deprecated: false + /v1/toolgroups: + get: + responses: + '200': + description: A ListToolGroupsResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/ListToolGroupsResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - ToolGroups + summary: List tool groups with optional provider. + description: List tool groups with optional provider. + parameters: [] + deprecated: false + post: + responses: + '200': + description: OK + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - ToolGroups + summary: Register a tool group. + description: Register a tool group. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/RegisterToolGroupRequest' + required: true + deprecated: false + /v1/toolgroups/{toolgroup_id}: + get: + responses: + '200': + description: A ToolGroup. + content: + application/json: + schema: + $ref: '#/components/schemas/ToolGroup' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - ToolGroups + summary: Get a tool group by its ID. + description: Get a tool group by its ID. + parameters: + - name: toolgroup_id + in: path + description: The ID of the tool group to get. + required: true + schema: + type: string + deprecated: false + delete: + responses: + '200': + description: OK + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - ToolGroups + summary: Unregister a tool group. + description: Unregister a tool group. + parameters: + - name: toolgroup_id + in: path + description: The ID of the tool group to unregister. + required: true + schema: + type: string + deprecated: false + /v1/tools: + get: + responses: + '200': + description: A ListToolsResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/ListToolsResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - ToolGroups + summary: List tools with optional tool group. + description: List tools with optional tool group. + parameters: + - name: toolgroup_id + in: query + description: >- + The ID of the tool group to list tools for. + required: false + schema: + type: string + deprecated: false + /v1/tools/{tool_name}: + get: + responses: + '200': + description: A Tool. + content: + application/json: + schema: + $ref: '#/components/schemas/Tool' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - ToolGroups + summary: Get a tool by its name. + description: Get a tool by its name. + parameters: + - name: tool_name + in: path + description: The name of the tool to get. + required: true + schema: + type: string + deprecated: false + /v1/vector-dbs: + get: + responses: + '200': + description: A ListVectorDBsResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/ListVectorDBsResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorDBs + summary: List all vector databases. + description: List all vector databases. + parameters: [] + deprecated: false + post: + responses: + '200': + description: A VectorDB. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorDB' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorDBs + summary: Register a vector database. + description: Register a vector database. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/RegisterVectorDbRequest' + required: true + deprecated: false + /v1/vector-dbs/{vector_db_id}: + get: + responses: + '200': + description: A VectorDB. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorDB' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorDBs + summary: Get a vector database by its identifier. + description: Get a vector database by its identifier. + parameters: + - name: vector_db_id + in: path + description: >- + The identifier of the vector database to get. + required: true + schema: + type: string + deprecated: false + delete: + responses: + '200': + description: OK + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorDBs + summary: Unregister a vector database. + description: Unregister a vector database. + parameters: + - name: vector_db_id + in: path + description: >- + The identifier of the vector database to unregister. + required: true + schema: + type: string + deprecated: false + /v1/vector-io/insert: + post: + responses: + '200': + description: OK + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Insert chunks into a vector database. + description: Insert chunks into a vector database. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/InsertChunksRequest' + required: true + deprecated: false + /v1/vector-io/query: + post: + responses: + '200': + description: A QueryChunksResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/QueryChunksResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Query chunks from a vector database. + description: Query chunks from a vector database. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/QueryChunksRequest' + required: true + deprecated: false + /v1/vector_stores: + get: + responses: + '200': + description: >- + A VectorStoreListResponse containing the list of vector stores. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreListResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Returns a list of vector stores. + description: Returns a list of vector stores. + parameters: + - name: limit + in: query + description: >- + A limit on the number of objects to be returned. Limit can range between + 1 and 100, and the default is 20. + required: false + schema: + type: integer + - name: order + in: query + description: >- + Sort order by the `created_at` timestamp of the objects. `asc` for ascending + order and `desc` for descending order. + required: false + schema: + type: string + - name: after + in: query + description: >- + A cursor for use in pagination. `after` is an object ID that defines your + place in the list. + required: false + schema: + type: string + - name: before + in: query + description: >- + A cursor for use in pagination. `before` is an object ID that defines + your place in the list. + required: false + schema: + type: string + deprecated: false + post: + responses: + '200': + description: >- + A VectorStoreObject representing the created vector store. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Creates a vector store. + description: Creates a vector store. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenaiCreateVectorStoreRequest' + required: true + deprecated: false + /v1/vector_stores/{vector_store_id}: + get: + responses: + '200': + description: >- + A VectorStoreObject representing the vector store. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Retrieves a vector store. + description: Retrieves a vector store. + parameters: + - name: vector_store_id + in: path + description: The ID of the vector store to retrieve. + required: true + schema: + type: string + deprecated: false + post: + responses: + '200': + description: >- + A VectorStoreObject representing the updated vector store. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Updates a vector store. + description: Updates a vector store. + parameters: + - name: vector_store_id + in: path + description: The ID of the vector store to update. + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenaiUpdateVectorStoreRequest' + required: true + deprecated: false + delete: + responses: + '200': + description: >- + A VectorStoreDeleteResponse indicating the deletion status. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreDeleteResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Delete a vector store. + description: Delete a vector store. + parameters: + - name: vector_store_id + in: path + description: The ID of the vector store to delete. + required: true + schema: + type: string + deprecated: false + /v1/vector_stores/{vector_store_id}/file_batches: + post: + responses: + '200': + description: >- + A VectorStoreFileBatchObject representing the created file batch. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreFileBatchObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Create a vector store file batch. + description: Create a vector store file batch. + parameters: + - name: vector_store_id + in: path + description: >- + The ID of the vector store to create the file batch for. + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenaiCreateVectorStoreFileBatchRequest' + required: true + deprecated: false + /v1/vector_stores/{vector_store_id}/file_batches/{batch_id}: + get: + responses: + '200': + description: >- + A VectorStoreFileBatchObject representing the file batch. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreFileBatchObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Retrieve a vector store file batch. + description: Retrieve a vector store file batch. + parameters: + - name: batch_id + in: path + description: The ID of the file batch to retrieve. + required: true + schema: + type: string + - name: vector_store_id + in: path + description: >- + The ID of the vector store containing the file batch. + required: true + schema: + type: string + deprecated: false + /v1/vector_stores/{vector_store_id}/file_batches/{batch_id}/cancel: + post: + responses: + '200': + description: >- + A VectorStoreFileBatchObject representing the cancelled file batch. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreFileBatchObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Cancels a vector store file batch. + description: Cancels a vector store file batch. + parameters: + - name: batch_id + in: path + description: The ID of the file batch to cancel. + required: true + schema: + type: string + - name: vector_store_id + in: path + description: >- + The ID of the vector store containing the file batch. + required: true + schema: + type: string + deprecated: false + /v1/vector_stores/{vector_store_id}/file_batches/{batch_id}/files: + get: + responses: + '200': + description: >- + A VectorStoreFilesListInBatchResponse containing the list of files in + the batch. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreFilesListInBatchResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: >- + Returns a list of vector store files in a batch. + description: >- + Returns a list of vector store files in a batch. + parameters: + - name: batch_id + in: path + description: >- + The ID of the file batch to list files from. + required: true + schema: + type: string + - name: vector_store_id + in: path + description: >- + The ID of the vector store containing the file batch. + required: true + schema: + type: string + - name: after + in: query + description: >- + A cursor for use in pagination. `after` is an object ID that defines your + place in the list. + required: false + schema: + type: string + - name: before + in: query + description: >- + A cursor for use in pagination. `before` is an object ID that defines + your place in the list. + required: false + schema: + type: string + - name: filter + in: query + description: >- + Filter by file status. One of in_progress, completed, failed, cancelled. + required: false + schema: + type: string + - name: limit + in: query + description: >- + A limit on the number of objects to be returned. Limit can range between + 1 and 100, and the default is 20. + required: false + schema: + type: integer + - name: order + in: query + description: >- + Sort order by the `created_at` timestamp of the objects. `asc` for ascending + order and `desc` for descending order. + required: false + schema: + type: string + deprecated: false + /v1/vector_stores/{vector_store_id}/files: + get: + responses: + '200': + description: >- + A VectorStoreListFilesResponse containing the list of files. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreListFilesResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: List files in a vector store. + description: List files in a vector store. + parameters: + - name: vector_store_id + in: path + description: >- + The ID of the vector store to list files from. + required: true + schema: + type: string + - name: limit + in: query + description: >- + (Optional) A limit on the number of objects to be returned. Limit can + range between 1 and 100, and the default is 20. + required: false + schema: + type: integer + - name: order + in: query + description: >- + (Optional) Sort order by the `created_at` timestamp of the objects. `asc` + for ascending order and `desc` for descending order. + required: false + schema: + type: string + - name: after + in: query + description: >- + (Optional) A cursor for use in pagination. `after` is an object ID that + defines your place in the list. + required: false + schema: + type: string + - name: before + in: query + description: >- + (Optional) A cursor for use in pagination. `before` is an object ID that + defines your place in the list. + required: false + schema: + type: string + - name: filter + in: query + description: >- + (Optional) Filter by file status to only return files with the specified + status. + required: false + schema: + $ref: '#/components/schemas/VectorStoreFileStatus' + deprecated: false + post: + responses: + '200': + description: >- + A VectorStoreFileObject representing the attached file. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreFileObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Attach a file to a vector store. + description: Attach a file to a vector store. + parameters: + - name: vector_store_id + in: path + description: >- + The ID of the vector store to attach the file to. + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenaiAttachFileToVectorStoreRequest' + required: true + deprecated: false + /v1/vector_stores/{vector_store_id}/files/{file_id}: + get: + responses: + '200': + description: >- + A VectorStoreFileObject representing the file. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreFileObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Retrieves a vector store file. + description: Retrieves a vector store file. + parameters: + - name: vector_store_id + in: path + description: >- + The ID of the vector store containing the file to retrieve. + required: true + schema: + type: string + - name: file_id + in: path + description: The ID of the file to retrieve. + required: true + schema: + type: string + deprecated: false + post: + responses: + '200': + description: >- + A VectorStoreFileObject representing the updated file. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreFileObject' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Updates a vector store file. + description: Updates a vector store file. + parameters: + - name: vector_store_id + in: path + description: >- + The ID of the vector store containing the file to update. + required: true + schema: + type: string + - name: file_id + in: path + description: The ID of the file to update. + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenaiUpdateVectorStoreFileRequest' + required: true + deprecated: false + delete: + responses: + '200': + description: >- + A VectorStoreFileDeleteResponse indicating the deletion status. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreFileDeleteResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Delete a vector store file. + description: Delete a vector store file. + parameters: + - name: vector_store_id + in: path + description: >- + The ID of the vector store containing the file to delete. + required: true + schema: + type: string + - name: file_id + in: path + description: The ID of the file to delete. + required: true + schema: + type: string + deprecated: false + /v1/vector_stores/{vector_store_id}/files/{file_id}/content: + get: + responses: + '200': + description: >- + A list of InterleavedContent representing the file contents. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreFileContentsResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: >- + Retrieves the contents of a vector store file. + description: >- + Retrieves the contents of a vector store file. + parameters: + - name: vector_store_id + in: path + description: >- + The ID of the vector store containing the file to retrieve. + required: true + schema: + type: string + - name: file_id + in: path + description: The ID of the file to retrieve. + required: true + schema: + type: string + deprecated: false + /v1/vector_stores/{vector_store_id}/search: + post: + responses: + '200': + description: >- + A VectorStoreSearchResponse containing the search results. + content: + application/json: + schema: + $ref: '#/components/schemas/VectorStoreSearchResponsePage' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - VectorIO + summary: Search for chunks in a vector store. + description: >- + Search for chunks in a vector store. + + Searches a vector store for relevant chunks based on a query and optional + file attribute filters. + parameters: + - name: vector_store_id + in: path + description: The ID of the vector store to search. + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OpenaiSearchVectorStoreRequest' + required: true + deprecated: false + /v1/version: + get: + responses: + '200': + description: >- + Version information containing the service version number. + content: + application/json: + schema: + $ref: '#/components/schemas/VersionInfo' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Inspect + summary: Get the version of the service. + description: Get the version of the service. + parameters: [] + deprecated: false + /v1beta/datasetio/append-rows/{dataset_id}: + post: + responses: + '200': + description: OK + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - DatasetIO + summary: Append rows to a dataset. + description: Append rows to a dataset. + parameters: + - name: dataset_id + in: path + description: >- + The ID of the dataset to append the rows to. + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/AppendRowsRequest' + required: true + deprecated: false + /v1beta/datasetio/iterrows/{dataset_id}: + get: + responses: + '200': + description: A PaginatedResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/PaginatedResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - DatasetIO + summary: >- + Get a paginated list of rows from a dataset. + description: >- + Get a paginated list of rows from a dataset. + + Uses offset-based pagination where: + + - start_index: The starting index (0-based). If None, starts from beginning. + + - limit: Number of items to return. If None or -1, returns all items. + + + The response includes: + + - data: List of items for the current page. + + - has_more: Whether there are more items available after this set. + parameters: + - name: dataset_id + in: path + description: >- + The ID of the dataset to get the rows from. + required: true + schema: + type: string + - name: start_index + in: query + description: >- + Index into dataset for the first row to get. Get all rows if None. + required: false + schema: + type: integer + - name: limit + in: query + description: The number of rows to get. + required: false + schema: + type: integer + deprecated: false + /v1beta/datasets: + get: + responses: + '200': + description: A ListDatasetsResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/ListDatasetsResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Datasets + summary: List all datasets. + description: List all datasets. + parameters: [] + deprecated: false + post: + responses: + '200': + description: A Dataset. + content: + application/json: + schema: + $ref: '#/components/schemas/Dataset' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Datasets + summary: Register a new dataset. + description: Register a new dataset. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/RegisterDatasetRequest' + required: true + deprecated: false + /v1beta/datasets/{dataset_id}: + get: + responses: + '200': + description: A Dataset. + content: + application/json: + schema: + $ref: '#/components/schemas/Dataset' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Datasets + summary: Get a dataset by its ID. + description: Get a dataset by its ID. + parameters: + - name: dataset_id + in: path + description: The ID of the dataset to get. + required: true + schema: + type: string + deprecated: false + delete: + responses: + '200': + description: OK + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Datasets + summary: Unregister a dataset by its ID. + description: Unregister a dataset by its ID. + parameters: + - name: dataset_id + in: path + description: The ID of the dataset to unregister. + required: true + schema: + type: string + deprecated: false + /v1alpha/agents: + get: + responses: + '200': + description: A PaginatedResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/PaginatedResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Agents + summary: List all agents. + description: List all agents. + parameters: + - name: start_index + in: query + description: The index to start the pagination from. + required: false + schema: + type: integer + - name: limit + in: query + description: The number of agents to return. + required: false + schema: + type: integer + deprecated: false + post: + responses: + '200': + description: >- + An AgentCreateResponse with the agent ID. + content: + application/json: + schema: + $ref: '#/components/schemas/AgentCreateResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Agents + summary: >- + Create an agent with the given configuration. + description: >- + Create an agent with the given configuration. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/CreateAgentRequest' + required: true + deprecated: false + /v1alpha/agents/{agent_id}: + get: + responses: + '200': + description: An Agent of the agent. + content: + application/json: + schema: + $ref: '#/components/schemas/Agent' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Agents + summary: Describe an agent by its ID. + description: Describe an agent by its ID. + parameters: + - name: agent_id + in: path + description: ID of the agent. + required: true + schema: + type: string + deprecated: false + delete: + responses: + '200': + description: OK + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Agents + summary: >- + Delete an agent by its ID and its associated sessions and turns. + description: >- + Delete an agent by its ID and its associated sessions and turns. + parameters: + - name: agent_id + in: path + description: The ID of the agent to delete. + required: true + schema: + type: string + deprecated: false + /v1alpha/agents/{agent_id}/session: + post: + responses: + '200': + description: An AgentSessionCreateResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/AgentSessionCreateResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Agents + summary: Create a new session for an agent. + description: Create a new session for an agent. + parameters: + - name: agent_id + in: path + description: >- + The ID of the agent to create the session for. + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/CreateAgentSessionRequest' + required: true + deprecated: false + /v1alpha/agents/{agent_id}/session/{session_id}: + get: + responses: + '200': + description: A Session. + content: + application/json: + schema: + $ref: '#/components/schemas/Session' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Agents + summary: Retrieve an agent session by its ID. + description: Retrieve an agent session by its ID. + parameters: + - name: session_id + in: path + description: The ID of the session to get. + required: true + schema: + type: string + - name: agent_id + in: path + description: >- + The ID of the agent to get the session for. + required: true + schema: + type: string + - name: turn_ids + in: query + description: >- + (Optional) List of turn IDs to filter the session by. + required: false + schema: + type: array + items: + type: string + deprecated: false + delete: + responses: + '200': + description: OK + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Agents + summary: >- + Delete an agent session by its ID and its associated turns. + description: >- + Delete an agent session by its ID and its associated turns. + parameters: + - name: session_id + in: path + description: The ID of the session to delete. + required: true + schema: + type: string + - name: agent_id + in: path + description: >- + The ID of the agent to delete the session for. + required: true + schema: + type: string + deprecated: false + /v1alpha/agents/{agent_id}/session/{session_id}/turn: + post: + responses: + '200': + description: >- + If stream=False, returns a Turn object. If stream=True, returns an SSE + event stream of AgentTurnResponseStreamChunk. + content: + application/json: + schema: + $ref: '#/components/schemas/Turn' + text/event-stream: + schema: + $ref: '#/components/schemas/AgentTurnResponseStreamChunk' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Agents + summary: Create a new turn for an agent. + description: Create a new turn for an agent. + parameters: + - name: agent_id + in: path + description: >- + The ID of the agent to create the turn for. + required: true + schema: + type: string + - name: session_id + in: path + description: >- + The ID of the session to create the turn for. + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/CreateAgentTurnRequest' + required: true + deprecated: false + /v1alpha/agents/{agent_id}/session/{session_id}/turn/{turn_id}: + get: + responses: + '200': + description: A Turn. + content: + application/json: + schema: + $ref: '#/components/schemas/Turn' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Agents + summary: Retrieve an agent turn by its ID. + description: Retrieve an agent turn by its ID. + parameters: + - name: agent_id + in: path + description: The ID of the agent to get the turn for. + required: true + schema: + type: string + - name: session_id + in: path + description: >- + The ID of the session to get the turn for. + required: true + schema: + type: string + - name: turn_id + in: path + description: The ID of the turn to get. + required: true + schema: + type: string + deprecated: false + /v1alpha/agents/{agent_id}/session/{session_id}/turn/{turn_id}/resume: + post: + responses: + '200': + description: >- + A Turn object if stream is False, otherwise an AsyncIterator of AgentTurnResponseStreamChunk + objects. + content: + application/json: + schema: + $ref: '#/components/schemas/Turn' + text/event-stream: + schema: + $ref: '#/components/schemas/AgentTurnResponseStreamChunk' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Agents + summary: >- + Resume an agent turn with executed tool call responses. + description: >- + Resume an agent turn with executed tool call responses. + + When a Turn has the status `awaiting_input` due to pending input from client + side tool calls, this endpoint can be used to submit the outputs from the + tool calls once they are ready. + parameters: + - name: agent_id + in: path + description: The ID of the agent to resume. + required: true + schema: + type: string + - name: session_id + in: path + description: The ID of the session to resume. + required: true + schema: + type: string + - name: turn_id + in: path + description: The ID of the turn to resume. + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ResumeAgentTurnRequest' + required: true + deprecated: false + /v1alpha/agents/{agent_id}/session/{session_id}/turn/{turn_id}/step/{step_id}: + get: + responses: + '200': + description: An AgentStepResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/AgentStepResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Agents + summary: Retrieve an agent step by its ID. + description: Retrieve an agent step by its ID. + parameters: + - name: agent_id + in: path + description: The ID of the agent to get the step for. + required: true + schema: + type: string + - name: session_id + in: path + description: >- + The ID of the session to get the step for. + required: true + schema: + type: string + - name: turn_id + in: path + description: The ID of the turn to get the step for. + required: true + schema: + type: string + - name: step_id + in: path + description: The ID of the step to get. + required: true + schema: + type: string + deprecated: false + /v1alpha/agents/{agent_id}/sessions: + get: + responses: + '200': + description: A PaginatedResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/PaginatedResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Agents + summary: List all session(s) of a given agent. + description: List all session(s) of a given agent. + parameters: + - name: agent_id + in: path + description: >- + The ID of the agent to list sessions for. + required: true + schema: + type: string + - name: start_index + in: query + description: The index to start the pagination from. + required: false + schema: + type: integer + - name: limit + in: query + description: The number of sessions to return. + required: false + schema: + type: integer + deprecated: false + /v1alpha/eval/benchmarks: + get: + responses: + '200': + description: A ListBenchmarksResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/ListBenchmarksResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Benchmarks + summary: List all benchmarks. + description: List all benchmarks. + parameters: [] + deprecated: false + post: + responses: + '200': + description: OK + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Benchmarks + summary: Register a benchmark. + description: Register a benchmark. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/RegisterBenchmarkRequest' + required: true + deprecated: false + /v1alpha/eval/benchmarks/{benchmark_id}: + get: + responses: + '200': + description: A Benchmark. + content: + application/json: + schema: + $ref: '#/components/schemas/Benchmark' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Benchmarks + summary: Get a benchmark by its ID. + description: Get a benchmark by its ID. + parameters: + - name: benchmark_id + in: path + description: The ID of the benchmark to get. + required: true + schema: + type: string + deprecated: false + delete: + responses: + '200': + description: OK + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Benchmarks + summary: Unregister a benchmark. + description: Unregister a benchmark. + parameters: + - name: benchmark_id + in: path + description: The ID of the benchmark to unregister. + required: true + schema: + type: string + deprecated: false + /v1alpha/eval/benchmarks/{benchmark_id}/evaluations: + post: + responses: + '200': + description: >- + EvaluateResponse object containing generations and scores. + content: + application/json: + schema: + $ref: '#/components/schemas/EvaluateResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Eval + summary: Evaluate a list of rows on a benchmark. + description: Evaluate a list of rows on a benchmark. + parameters: + - name: benchmark_id + in: path + description: >- + The ID of the benchmark to run the evaluation on. + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/EvaluateRowsRequest' + required: true + deprecated: false + /v1alpha/eval/benchmarks/{benchmark_id}/jobs: + post: + responses: + '200': + description: >- + The job that was created to run the evaluation. + content: + application/json: + schema: + $ref: '#/components/schemas/Job' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Eval + summary: Run an evaluation on a benchmark. + description: Run an evaluation on a benchmark. + parameters: + - name: benchmark_id + in: path + description: >- + The ID of the benchmark to run the evaluation on. + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/RunEvalRequest' + required: true + deprecated: false + /v1alpha/eval/benchmarks/{benchmark_id}/jobs/{job_id}: + get: + responses: + '200': + description: The status of the evaluation job. + content: + application/json: + schema: + $ref: '#/components/schemas/Job' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Eval + summary: Get the status of a job. + description: Get the status of a job. + parameters: + - name: benchmark_id + in: path + description: >- + The ID of the benchmark to run the evaluation on. + required: true + schema: + type: string + - name: job_id + in: path + description: The ID of the job to get the status of. + required: true + schema: + type: string + deprecated: false + delete: + responses: + '200': + description: OK + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Eval + summary: Cancel a job. + description: Cancel a job. + parameters: + - name: benchmark_id + in: path + description: >- + The ID of the benchmark to run the evaluation on. + required: true + schema: + type: string + - name: job_id + in: path + description: The ID of the job to cancel. + required: true + schema: + type: string + deprecated: false + /v1alpha/eval/benchmarks/{benchmark_id}/jobs/{job_id}/result: + get: + responses: + '200': + description: The result of the job. + content: + application/json: + schema: + $ref: '#/components/schemas/EvaluateResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Eval + summary: Get the result of a job. + description: Get the result of a job. + parameters: + - name: benchmark_id + in: path + description: >- + The ID of the benchmark to run the evaluation on. + required: true + schema: + type: string + - name: job_id + in: path + description: The ID of the job to get the result of. + required: true + schema: + type: string + deprecated: false + /v1alpha/inference/rerank: + post: + responses: + '200': + description: >- + RerankResponse with indices sorted by relevance score (descending). + content: + application/json: + schema: + $ref: '#/components/schemas/RerankResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Inference + summary: >- + Rerank a list of documents based on their relevance to a query. + description: >- + Rerank a list of documents based on their relevance to a query. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/RerankRequest' + required: true + deprecated: false + /v1alpha/post-training/job/artifacts: + get: + responses: + '200': + description: A PostTrainingJobArtifactsResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/PostTrainingJobArtifactsResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - PostTraining (Coming Soon) + summary: Get the artifacts of a training job. + description: Get the artifacts of a training job. + parameters: + - name: job_uuid + in: query + description: >- + The UUID of the job to get the artifacts of. + required: true + schema: + type: string + deprecated: false + /v1alpha/post-training/job/cancel: + post: + responses: + '200': + description: OK + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - PostTraining (Coming Soon) + summary: Cancel a training job. + description: Cancel a training job. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/CancelTrainingJobRequest' + required: true + deprecated: false + /v1alpha/post-training/job/status: + get: + responses: + '200': + description: A PostTrainingJobStatusResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/PostTrainingJobStatusResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - PostTraining (Coming Soon) + summary: Get the status of a training job. + description: Get the status of a training job. + parameters: + - name: job_uuid + in: query + description: >- + The UUID of the job to get the status of. + required: true + schema: + type: string + deprecated: false + /v1alpha/post-training/jobs: + get: + responses: + '200': + description: A ListPostTrainingJobsResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/ListPostTrainingJobsResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - PostTraining (Coming Soon) + summary: Get all training jobs. + description: Get all training jobs. + parameters: [] + deprecated: false + /v1alpha/post-training/preference-optimize: + post: + responses: + '200': + description: A PostTrainingJob. + content: + application/json: + schema: + $ref: '#/components/schemas/PostTrainingJob' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - PostTraining (Coming Soon) + summary: Run preference optimization of a model. + description: Run preference optimization of a model. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/PreferenceOptimizeRequest' + required: true + deprecated: false + /v1alpha/post-training/supervised-fine-tune: + post: + responses: + '200': + description: A PostTrainingJob. + content: + application/json: + schema: + $ref: '#/components/schemas/PostTrainingJob' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - PostTraining (Coming Soon) + summary: Run supervised fine-tuning of a model. + description: Run supervised fine-tuning of a model. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/SupervisedFineTuneRequest' + required: true + deprecated: false + /v1alpha/telemetry/metrics/{metric_name}: + post: + responses: + '200': + description: A QueryMetricsResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/QueryMetricsResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Telemetry + summary: Query metrics. + description: Query metrics. + parameters: + - name: metric_name + in: path + description: The name of the metric to query. + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/QueryMetricsRequest' + required: true + deprecated: false + /v1alpha/telemetry/spans: + post: + responses: + '200': + description: A QuerySpansResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/QuerySpansResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Telemetry + summary: Query spans. + description: Query spans. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/QuerySpansRequest' + required: true + deprecated: false + /v1alpha/telemetry/spans/export: + post: + responses: + '200': + description: OK + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Telemetry + summary: Save spans to a dataset. + description: Save spans to a dataset. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/SaveSpansToDatasetRequest' + required: true + deprecated: false + /v1alpha/telemetry/spans/{span_id}/tree: + post: + responses: + '200': + description: A QuerySpanTreeResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/QuerySpanTreeResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Telemetry + summary: Get a span tree by its ID. + description: Get a span tree by its ID. + parameters: + - name: span_id + in: path + description: The ID of the span to get the tree from. + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/GetSpanTreeRequest' + required: true + deprecated: false + /v1alpha/telemetry/traces: + post: + responses: + '200': + description: A QueryTracesResponse. + content: + application/json: + schema: + $ref: '#/components/schemas/QueryTracesResponse' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Telemetry + summary: Query traces. + description: Query traces. + parameters: [] + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/QueryTracesRequest' + required: true + deprecated: false + /v1alpha/telemetry/traces/{trace_id}: + get: + responses: + '200': + description: A Trace. + content: + application/json: + schema: + $ref: '#/components/schemas/Trace' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Telemetry + summary: Get a trace by its ID. + description: Get a trace by its ID. + parameters: + - name: trace_id + in: path + description: The ID of the trace to get. + required: true + schema: + type: string + deprecated: false + /v1alpha/telemetry/traces/{trace_id}/spans/{span_id}: + get: + responses: + '200': + description: A Span. + content: + application/json: + schema: + $ref: '#/components/schemas/Span' + '400': + $ref: '#/components/responses/BadRequest400' + '429': + $ref: >- + #/components/responses/TooManyRequests429 + '500': + $ref: >- + #/components/responses/InternalServerError500 + default: + $ref: '#/components/responses/DefaultError' + tags: + - Telemetry + summary: Get a span by its ID. + description: Get a span by its ID. + parameters: + - name: trace_id + in: path + description: >- + The ID of the trace to get the span from. + required: true + schema: + type: string + - name: span_id + in: path + description: The ID of the span to get. + required: true + schema: + type: string + deprecated: false +jsonSchemaDialect: >- + https://json-schema.org/draft/2020-12/schema +components: + schemas: + Error: + type: object + properties: + status: + type: integer + description: HTTP status code + title: + type: string + description: >- + Error title, a short summary of the error which is invariant for an error + type + detail: + type: string + description: >- + Error detail, a longer human-readable description of the error + instance: + type: string + description: >- + (Optional) A URL which can be used to retrieve more information about + the specific occurrence of the error + additionalProperties: false + required: + - status + - title + - detail + title: Error + description: >- + Error response from the API. Roughly follows RFC 7807. + Order: + type: string + enum: + - asc + - desc + title: Order + description: Sort order for paginated responses. + ListOpenAIChatCompletionResponse: + type: object + properties: + data: + type: array + items: + type: object + properties: + id: + type: string + description: The ID of the chat completion + choices: + type: array + items: + $ref: '#/components/schemas/OpenAIChoice' + description: List of choices + object: + type: string + const: chat.completion + default: chat.completion + description: >- + The object type, which will be "chat.completion" + created: + type: integer + description: >- + The Unix timestamp in seconds when the chat completion was created + model: + type: string + description: >- + The model that was used to generate the chat completion + input_messages: + type: array + items: + $ref: '#/components/schemas/OpenAIMessageParam' + additionalProperties: false + required: + - id + - choices + - object + - created + - model + - input_messages + title: OpenAICompletionWithInputMessages + description: >- + List of chat completion objects with their input messages + has_more: + type: boolean + description: >- + Whether there are more completions available beyond this list + first_id: + type: string + description: ID of the first completion in this list + last_id: + type: string + description: ID of the last completion in this list + object: + type: string + const: list + default: list + description: >- + Must be "list" to identify this as a list response + additionalProperties: false + required: + - data + - has_more + - first_id + - last_id + - object + title: ListOpenAIChatCompletionResponse + description: >- + Response from listing OpenAI-compatible chat completions. + OpenAIAssistantMessageParam: + type: object + properties: + role: + type: string + const: assistant + default: assistant + description: >- + Must be "assistant" to identify this as the model's response + content: + oneOf: + - type: string + - type: array + items: + $ref: '#/components/schemas/OpenAIChatCompletionContentPartTextParam' + description: The content of the model's response + name: + type: string + description: >- + (Optional) The name of the assistant message participant. + tool_calls: + type: array + items: + $ref: '#/components/schemas/OpenAIChatCompletionToolCall' + description: >- + List of tool calls. Each tool call is an OpenAIChatCompletionToolCall + object. + additionalProperties: false + required: + - role + title: OpenAIAssistantMessageParam + description: >- + A message containing the model's (assistant) response in an OpenAI-compatible + chat completion request. + "OpenAIChatCompletionContentPartImageParam": + type: object + properties: + type: + type: string + const: image_url + default: image_url + description: >- + Must be "image_url" to identify this as image content + image_url: + $ref: '#/components/schemas/OpenAIImageURL' + description: >- + Image URL specification and processing details + additionalProperties: false + required: + - type + - image_url + title: >- + OpenAIChatCompletionContentPartImageParam + description: >- + Image content part for OpenAI-compatible chat completion messages. + OpenAIChatCompletionContentPartParam: + oneOf: + - $ref: '#/components/schemas/OpenAIChatCompletionContentPartTextParam' + - $ref: '#/components/schemas/OpenAIChatCompletionContentPartImageParam' + - $ref: '#/components/schemas/OpenAIFile' + discriminator: + propertyName: type + mapping: + text: '#/components/schemas/OpenAIChatCompletionContentPartTextParam' + image_url: '#/components/schemas/OpenAIChatCompletionContentPartImageParam' + file: '#/components/schemas/OpenAIFile' + OpenAIChatCompletionContentPartTextParam: + type: object + properties: + type: + type: string + const: text + default: text + description: >- + Must be "text" to identify this as text content + text: + type: string + description: The text content of the message + additionalProperties: false + required: + - type + - text + title: OpenAIChatCompletionContentPartTextParam + description: >- + Text content part for OpenAI-compatible chat completion messages. + OpenAIChatCompletionToolCall: + type: object + properties: + index: + type: integer + description: >- + (Optional) Index of the tool call in the list + id: + type: string + description: >- + (Optional) Unique identifier for the tool call + type: + type: string + const: function + default: function + description: >- + Must be "function" to identify this as a function call + function: + $ref: '#/components/schemas/OpenAIChatCompletionToolCallFunction' + description: (Optional) Function call details + additionalProperties: false + required: + - type + title: OpenAIChatCompletionToolCall + description: >- + Tool call specification for OpenAI-compatible chat completion responses. + OpenAIChatCompletionToolCallFunction: + type: object + properties: + name: + type: string + description: (Optional) Name of the function to call + arguments: + type: string + description: >- + (Optional) Arguments to pass to the function as a JSON string + additionalProperties: false + title: OpenAIChatCompletionToolCallFunction + description: >- + Function call details for OpenAI-compatible tool calls. + OpenAIChoice: + type: object + properties: + message: + oneOf: + - $ref: '#/components/schemas/OpenAIUserMessageParam' + - $ref: '#/components/schemas/OpenAISystemMessageParam' + - $ref: '#/components/schemas/OpenAIAssistantMessageParam' + - $ref: '#/components/schemas/OpenAIToolMessageParam' + - $ref: '#/components/schemas/OpenAIDeveloperMessageParam' + discriminator: + propertyName: role + mapping: + user: '#/components/schemas/OpenAIUserMessageParam' + system: '#/components/schemas/OpenAISystemMessageParam' + assistant: '#/components/schemas/OpenAIAssistantMessageParam' + tool: '#/components/schemas/OpenAIToolMessageParam' + developer: '#/components/schemas/OpenAIDeveloperMessageParam' + description: The message from the model + finish_reason: + type: string + description: The reason the model stopped generating + index: + type: integer + description: The index of the choice + logprobs: + $ref: '#/components/schemas/OpenAIChoiceLogprobs' + description: >- + (Optional) The log probabilities for the tokens in the message + additionalProperties: false + required: + - message + - finish_reason + - index + title: OpenAIChoice + description: >- + A choice from an OpenAI-compatible chat completion response. + OpenAIChoiceLogprobs: + type: object + properties: + content: + type: array + items: + $ref: '#/components/schemas/OpenAITokenLogProb' + description: >- + (Optional) The log probabilities for the tokens in the message + refusal: + type: array + items: + $ref: '#/components/schemas/OpenAITokenLogProb' + description: >- + (Optional) The log probabilities for the tokens in the message + additionalProperties: false + title: OpenAIChoiceLogprobs + description: >- + The log probabilities for the tokens in the message from an OpenAI-compatible + chat completion response. + OpenAIDeveloperMessageParam: + type: object + properties: + role: + type: string + const: developer + default: developer + description: >- + Must be "developer" to identify this as a developer message + content: + oneOf: + - type: string + - type: array + items: + $ref: '#/components/schemas/OpenAIChatCompletionContentPartTextParam' + description: The content of the developer message + name: + type: string + description: >- + (Optional) The name of the developer message participant. + additionalProperties: false + required: + - role + - content + title: OpenAIDeveloperMessageParam + description: >- + A message from the developer in an OpenAI-compatible chat completion request. + OpenAIFile: + type: object + properties: + type: + type: string + const: file + default: file + file: + $ref: '#/components/schemas/OpenAIFileFile' + additionalProperties: false + required: + - type + - file + title: OpenAIFile + OpenAIFileFile: + type: object + properties: + file_data: + type: string + file_id: + type: string + filename: + type: string + additionalProperties: false + title: OpenAIFileFile + OpenAIImageURL: + type: object + properties: + url: + type: string + description: >- + URL of the image to include in the message + detail: + type: string + description: >- + (Optional) Level of detail for image processing. Can be "low", "high", + or "auto" + additionalProperties: false + required: + - url + title: OpenAIImageURL + description: >- + Image URL specification for OpenAI-compatible chat completion messages. + OpenAIMessageParam: + oneOf: + - $ref: '#/components/schemas/OpenAIUserMessageParam' + - $ref: '#/components/schemas/OpenAISystemMessageParam' + - $ref: '#/components/schemas/OpenAIAssistantMessageParam' + - $ref: '#/components/schemas/OpenAIToolMessageParam' + - $ref: '#/components/schemas/OpenAIDeveloperMessageParam' + discriminator: + propertyName: role + mapping: + user: '#/components/schemas/OpenAIUserMessageParam' + system: '#/components/schemas/OpenAISystemMessageParam' + assistant: '#/components/schemas/OpenAIAssistantMessageParam' + tool: '#/components/schemas/OpenAIToolMessageParam' + developer: '#/components/schemas/OpenAIDeveloperMessageParam' + OpenAISystemMessageParam: + type: object + properties: + role: + type: string + const: system + default: system + description: >- + Must be "system" to identify this as a system message + content: + oneOf: + - type: string + - type: array + items: + $ref: '#/components/schemas/OpenAIChatCompletionContentPartTextParam' + description: >- + The content of the "system prompt". If multiple system messages are provided, + they are concatenated. The underlying Llama Stack code may also add other + system messages (for example, for formatting tool definitions). + name: + type: string + description: >- + (Optional) The name of the system message participant. + additionalProperties: false + required: + - role + - content + title: OpenAISystemMessageParam + description: >- + A system message providing instructions or context to the model. + OpenAITokenLogProb: + type: object + properties: + token: + type: string + bytes: + type: array + items: + type: integer + logprob: + type: number + top_logprobs: + type: array + items: + $ref: '#/components/schemas/OpenAITopLogProb' + additionalProperties: false + required: + - token + - logprob + - top_logprobs + title: OpenAITokenLogProb + description: >- + The log probability for a token from an OpenAI-compatible chat completion + response. + OpenAIToolMessageParam: + type: object + properties: + role: + type: string + const: tool + default: tool + description: >- + Must be "tool" to identify this as a tool response + tool_call_id: + type: string + description: >- + Unique identifier for the tool call this response is for + content: + oneOf: + - type: string + - type: array + items: + $ref: '#/components/schemas/OpenAIChatCompletionContentPartTextParam' + description: The response content from the tool + additionalProperties: false + required: + - role + - tool_call_id + - content + title: OpenAIToolMessageParam + description: >- + A message representing the result of a tool invocation in an OpenAI-compatible + chat completion request. + OpenAITopLogProb: + type: object + properties: + token: + type: string + bytes: + type: array + items: + type: integer + logprob: + type: number + additionalProperties: false + required: + - token + - logprob + title: OpenAITopLogProb + description: >- + The top log probability for a token from an OpenAI-compatible chat completion + response. + OpenAIUserMessageParam: + type: object + properties: + role: + type: string + const: user + default: user + description: >- + Must be "user" to identify this as a user message + content: + oneOf: + - type: string + - type: array + items: + $ref: '#/components/schemas/OpenAIChatCompletionContentPartParam' + description: >- + The content of the message, which can include text and other media + name: + type: string + description: >- + (Optional) The name of the user message participant. + additionalProperties: false + required: + - role + - content + title: OpenAIUserMessageParam + description: >- + A message from the user in an OpenAI-compatible chat completion request. + OpenAIJSONSchema: + type: object + properties: + name: + type: string + description: Name of the schema + description: + type: string + description: (Optional) Description of the schema + strict: + type: boolean + description: >- + (Optional) Whether to enforce strict adherence to the schema + schema: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: (Optional) The JSON schema definition + additionalProperties: false + required: + - name + title: OpenAIJSONSchema + description: >- + JSON schema specification for OpenAI-compatible structured response format. + OpenAIResponseFormatJSONObject: + type: object + properties: + type: + type: string + const: json_object + default: json_object + description: >- + Must be "json_object" to indicate generic JSON object response format + additionalProperties: false + required: + - type + title: OpenAIResponseFormatJSONObject + description: >- + JSON object response format for OpenAI-compatible chat completion requests. + OpenAIResponseFormatJSONSchema: + type: object + properties: + type: + type: string + const: json_schema + default: json_schema + description: >- + Must be "json_schema" to indicate structured JSON response format + json_schema: + $ref: '#/components/schemas/OpenAIJSONSchema' + description: >- + The JSON schema specification for the response + additionalProperties: false + required: + - type + - json_schema + title: OpenAIResponseFormatJSONSchema + description: >- + JSON schema response format for OpenAI-compatible chat completion requests. + OpenAIResponseFormatParam: + oneOf: + - $ref: '#/components/schemas/OpenAIResponseFormatText' + - $ref: '#/components/schemas/OpenAIResponseFormatJSONSchema' + - $ref: '#/components/schemas/OpenAIResponseFormatJSONObject' + discriminator: + propertyName: type + mapping: + text: '#/components/schemas/OpenAIResponseFormatText' + json_schema: '#/components/schemas/OpenAIResponseFormatJSONSchema' + json_object: '#/components/schemas/OpenAIResponseFormatJSONObject' + OpenAIResponseFormatText: + type: object + properties: + type: + type: string + const: text + default: text + description: >- + Must be "text" to indicate plain text response format + additionalProperties: false + required: + - type + title: OpenAIResponseFormatText + description: >- + Text response format for OpenAI-compatible chat completion requests. + OpenaiChatCompletionRequest: + type: object + properties: + model: + type: string + description: >- + The identifier of the model to use. The model must be registered with + Llama Stack and available via the /models endpoint. + messages: + type: array + items: + $ref: '#/components/schemas/OpenAIMessageParam' + description: List of messages in the conversation. + frequency_penalty: + type: number + description: >- + (Optional) The penalty for repeated tokens. + function_call: + oneOf: + - type: string + - type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: (Optional) The function call to use. + functions: + type: array + items: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: (Optional) List of functions to use. + logit_bias: + type: object + additionalProperties: + type: number + description: (Optional) The logit bias to use. + logprobs: + type: boolean + description: (Optional) The log probabilities to use. + max_completion_tokens: + type: integer + description: >- + (Optional) The maximum number of tokens to generate. + max_tokens: + type: integer + description: >- + (Optional) The maximum number of tokens to generate. + n: + type: integer + description: >- + (Optional) The number of completions to generate. + parallel_tool_calls: + type: boolean + description: >- + (Optional) Whether to parallelize tool calls. + presence_penalty: + type: number + description: >- + (Optional) The penalty for repeated tokens. + response_format: + $ref: '#/components/schemas/OpenAIResponseFormatParam' + description: (Optional) The response format to use. + seed: + type: integer + description: (Optional) The seed to use. + stop: + oneOf: + - type: string + - type: array + items: + type: string + description: (Optional) The stop tokens to use. + stream: + type: boolean + description: >- + (Optional) Whether to stream the response. + stream_options: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: (Optional) The stream options to use. + temperature: + type: number + description: (Optional) The temperature to use. + tool_choice: + oneOf: + - type: string + - type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: (Optional) The tool choice to use. + tools: + type: array + items: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: (Optional) The tools to use. + top_logprobs: + type: integer + description: >- + (Optional) The top log probabilities to use. + top_p: + type: number + description: (Optional) The top p to use. + user: + type: string + description: (Optional) The user to use. + additionalProperties: false + required: + - model + - messages + title: OpenaiChatCompletionRequest + OpenAIChatCompletion: + type: object + properties: + id: + type: string + description: The ID of the chat completion + choices: + type: array + items: + $ref: '#/components/schemas/OpenAIChoice' + description: List of choices + object: + type: string + const: chat.completion + default: chat.completion + description: >- + The object type, which will be "chat.completion" + created: + type: integer + description: >- + The Unix timestamp in seconds when the chat completion was created + model: + type: string + description: >- + The model that was used to generate the chat completion + additionalProperties: false + required: + - id + - choices + - object + - created + - model + title: OpenAIChatCompletion + description: >- + Response from an OpenAI-compatible chat completion request. + OpenAIChatCompletionChunk: + type: object + properties: + id: + type: string + description: The ID of the chat completion + choices: + type: array + items: + $ref: '#/components/schemas/OpenAIChunkChoice' + description: List of choices + object: + type: string + const: chat.completion.chunk + default: chat.completion.chunk + description: >- + The object type, which will be "chat.completion.chunk" + created: + type: integer + description: >- + The Unix timestamp in seconds when the chat completion was created + model: + type: string + description: >- + The model that was used to generate the chat completion + additionalProperties: false + required: + - id + - choices + - object + - created + - model + title: OpenAIChatCompletionChunk + description: >- + Chunk from a streaming response to an OpenAI-compatible chat completion request. + OpenAIChoiceDelta: + type: object + properties: + content: + type: string + description: (Optional) The content of the delta + refusal: + type: string + description: (Optional) The refusal of the delta + role: + type: string + description: (Optional) The role of the delta + tool_calls: + type: array + items: + $ref: '#/components/schemas/OpenAIChatCompletionToolCall' + description: (Optional) The tool calls of the delta + additionalProperties: false + title: OpenAIChoiceDelta + description: >- + A delta from an OpenAI-compatible chat completion streaming response. + OpenAIChunkChoice: + type: object + properties: + delta: + $ref: '#/components/schemas/OpenAIChoiceDelta' + description: The delta from the chunk + finish_reason: + type: string + description: The reason the model stopped generating + index: + type: integer + description: The index of the choice + logprobs: + $ref: '#/components/schemas/OpenAIChoiceLogprobs' + description: >- + (Optional) The log probabilities for the tokens in the message + additionalProperties: false + required: + - delta + - finish_reason + - index + title: OpenAIChunkChoice + description: >- + A chunk choice from an OpenAI-compatible chat completion streaming response. + OpenAICompletionWithInputMessages: + type: object + properties: + id: + type: string + description: The ID of the chat completion + choices: + type: array + items: + $ref: '#/components/schemas/OpenAIChoice' + description: List of choices + object: + type: string + const: chat.completion + default: chat.completion + description: >- + The object type, which will be "chat.completion" + created: + type: integer + description: >- + The Unix timestamp in seconds when the chat completion was created + model: + type: string + description: >- + The model that was used to generate the chat completion + input_messages: + type: array + items: + $ref: '#/components/schemas/OpenAIMessageParam' + additionalProperties: false + required: + - id + - choices + - object + - created + - model + - input_messages + title: OpenAICompletionWithInputMessages + OpenaiCompletionRequest: + type: object + properties: + model: + type: string + description: >- + The identifier of the model to use. The model must be registered with + Llama Stack and available via the /models endpoint. + prompt: + oneOf: + - type: string + - type: array + items: + type: string + - type: array + items: + type: integer + - type: array + items: + type: array + items: + type: integer + description: The prompt to generate a completion for. + best_of: + type: integer + description: >- + (Optional) The number of completions to generate. + echo: + type: boolean + description: (Optional) Whether to echo the prompt. + frequency_penalty: + type: number + description: >- + (Optional) The penalty for repeated tokens. + logit_bias: + type: object + additionalProperties: + type: number + description: (Optional) The logit bias to use. + logprobs: + type: boolean + description: (Optional) The log probabilities to use. + max_tokens: + type: integer + description: >- + (Optional) The maximum number of tokens to generate. + n: + type: integer + description: >- + (Optional) The number of completions to generate. + presence_penalty: + type: number + description: >- + (Optional) The penalty for repeated tokens. + seed: + type: integer + description: (Optional) The seed to use. + stop: + oneOf: + - type: string + - type: array + items: + type: string + description: (Optional) The stop tokens to use. + stream: + type: boolean + description: >- + (Optional) Whether to stream the response. + stream_options: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: (Optional) The stream options to use. + temperature: + type: number + description: (Optional) The temperature to use. + top_p: + type: number + description: (Optional) The top p to use. + user: + type: string + description: (Optional) The user to use. + guided_choice: + type: array + items: + type: string + prompt_logprobs: + type: integer + suffix: + type: string + description: >- + (Optional) The suffix that should be appended to the completion. + additionalProperties: false + required: + - model + - prompt + title: OpenaiCompletionRequest + OpenAICompletion: + type: object + properties: + id: + type: string + choices: + type: array + items: + $ref: '#/components/schemas/OpenAICompletionChoice' + created: + type: integer + model: + type: string + object: + type: string + const: text_completion + default: text_completion + additionalProperties: false + required: + - id + - choices + - created + - model + - object + title: OpenAICompletion + description: >- + Response from an OpenAI-compatible completion request. + OpenAICompletionChoice: + type: object + properties: + finish_reason: + type: string + text: + type: string + index: + type: integer + logprobs: + $ref: '#/components/schemas/OpenAIChoiceLogprobs' + additionalProperties: false + required: + - finish_reason + - text + - index + title: OpenAICompletionChoice + description: >- + A choice from an OpenAI-compatible completion response. + OpenaiEmbeddingsRequest: + type: object + properties: + model: + type: string + description: >- + The identifier of the model to use. The model must be an embedding model + registered with Llama Stack and available via the /models endpoint. + input: + oneOf: + - type: string + - type: array + items: + type: string + description: >- + Input text to embed, encoded as a string or array of strings. To embed + multiple inputs in a single request, pass an array of strings. + encoding_format: + type: string + description: >- + (Optional) The format to return the embeddings in. Can be either "float" + or "base64". Defaults to "float". + dimensions: + type: integer + description: >- + (Optional) The number of dimensions the resulting output embeddings should + have. Only supported in text-embedding-3 and later models. + user: + type: string + description: >- + (Optional) A unique identifier representing your end-user, which can help + OpenAI to monitor and detect abuse. + additionalProperties: false + required: + - model + - input + title: OpenaiEmbeddingsRequest + OpenAIEmbeddingData: + type: object + properties: + object: + type: string + const: embedding + default: embedding + description: >- + The object type, which will be "embedding" + embedding: + oneOf: + - type: array + items: + type: number + - type: string + description: >- + The embedding vector as a list of floats (when encoding_format="float") + or as a base64-encoded string (when encoding_format="base64") + index: + type: integer + description: >- + The index of the embedding in the input list + additionalProperties: false + required: + - object + - embedding + - index + title: OpenAIEmbeddingData + description: >- + A single embedding data object from an OpenAI-compatible embeddings response. + OpenAIEmbeddingUsage: + type: object + properties: + prompt_tokens: + type: integer + description: The number of tokens in the input + total_tokens: + type: integer + description: The total number of tokens used + additionalProperties: false + required: + - prompt_tokens + - total_tokens + title: OpenAIEmbeddingUsage + description: >- + Usage information for an OpenAI-compatible embeddings response. + OpenAIEmbeddingsResponse: + type: object + properties: + object: + type: string + const: list + default: list + description: The object type, which will be "list" + data: + type: array + items: + $ref: '#/components/schemas/OpenAIEmbeddingData' + description: List of embedding data objects + model: + type: string + description: >- + The model that was used to generate the embeddings + usage: + $ref: '#/components/schemas/OpenAIEmbeddingUsage' + description: Usage information + additionalProperties: false + required: + - object + - data + - model + - usage + title: OpenAIEmbeddingsResponse + description: >- + Response from an OpenAI-compatible embeddings request. + OpenAIFilePurpose: + type: string + enum: + - assistants + - batch + title: OpenAIFilePurpose + description: >- + Valid purpose values for OpenAI Files API. + ListOpenAIFileResponse: + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/OpenAIFileObject' + description: List of file objects + has_more: + type: boolean + description: >- + Whether there are more files available beyond this page + first_id: + type: string + description: >- + ID of the first file in the list for pagination + last_id: + type: string + description: >- + ID of the last file in the list for pagination + object: + type: string + const: list + default: list + description: The object type, which is always "list" + additionalProperties: false + required: + - data + - has_more + - first_id + - last_id + - object + title: ListOpenAIFileResponse + description: >- + Response for listing files in OpenAI Files API. + OpenAIFileObject: + type: object + properties: + object: + type: string + const: file + default: file + description: The object type, which is always "file" + id: + type: string + description: >- + The file identifier, which can be referenced in the API endpoints + bytes: + type: integer + description: The size of the file, in bytes + created_at: + type: integer + description: >- + The Unix timestamp (in seconds) for when the file was created + expires_at: + type: integer + description: >- + The Unix timestamp (in seconds) for when the file expires + filename: + type: string + description: The name of the file + purpose: + type: string + enum: + - assistants + - batch + description: The intended purpose of the file + additionalProperties: false + required: + - object + - id + - bytes + - created_at + - expires_at + - filename + - purpose + title: OpenAIFileObject + description: >- + OpenAI File object as defined in the OpenAI Files API. + ExpiresAfter: + type: object + properties: + anchor: + type: string + const: created_at + seconds: + type: integer + additionalProperties: false + required: + - anchor + - seconds + title: ExpiresAfter + description: >- + Control expiration of uploaded files. + + Params: + - anchor, must be "created_at" + - seconds, must be int between 3600 and 2592000 (1 hour to 30 days) + OpenAIFileDeleteResponse: + type: object + properties: + id: + type: string + description: The file identifier that was deleted + object: + type: string + const: file + default: file + description: The object type, which is always "file" + deleted: + type: boolean + description: >- + Whether the file was successfully deleted + additionalProperties: false + required: + - id + - object + - deleted + title: OpenAIFileDeleteResponse + description: >- + Response for deleting a file in OpenAI Files API. + Response: + type: object + title: Response + HealthInfo: + type: object + properties: + status: + type: string + enum: + - OK + - Error + - Not Implemented + description: Current health status of the service + additionalProperties: false + required: + - status + title: HealthInfo + description: >- + Health status information for the service. + RouteInfo: + type: object + properties: + route: + type: string + description: The API endpoint path + method: + type: string + description: HTTP method for the route + provider_types: + type: array + items: + type: string + description: >- + List of provider types that implement this route + additionalProperties: false + required: + - route + - method + - provider_types + title: RouteInfo + description: >- + Information about an API route including its path, method, and implementing + providers. + ListRoutesResponse: + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/RouteInfo' + description: >- + List of available route information objects + additionalProperties: false + required: + - data + title: ListRoutesResponse + description: >- + Response containing a list of all available API routes. + Model: + type: object + properties: + identifier: + type: string + description: >- + Unique identifier for this resource in llama stack + provider_resource_id: + type: string + description: >- + Unique identifier for this resource in the provider + provider_id: + type: string + description: >- + ID of the provider that owns this resource + type: + type: string + enum: + - model + - shield + - vector_db + - dataset + - scoring_function + - benchmark + - tool + - tool_group + - prompt + const: model + default: model + description: >- + The resource type, always 'model' for model resources + metadata: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: Any additional metadata for this model + model_type: + $ref: '#/components/schemas/ModelType' + default: llm + description: >- + The type of model (LLM or embedding model) + additionalProperties: false + required: + - identifier + - provider_id + - type + - metadata + - model_type + title: Model + description: >- + A model resource representing an AI model registered in Llama Stack. + ModelType: + type: string + enum: + - llm + - embedding + title: ModelType + description: >- + Enumeration of supported model types in Llama Stack. + ListModelsResponse: + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/Model' + additionalProperties: false + required: + - data + title: ListModelsResponse + RegisterModelRequest: + type: object + properties: + model_id: + type: string + description: The identifier of the model to register. + provider_model_id: + type: string + description: >- + The identifier of the model in the provider. + provider_id: + type: string + description: The identifier of the provider. + metadata: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: Any additional metadata for this model. + model_type: + $ref: '#/components/schemas/ModelType' + description: The type of model to register. + additionalProperties: false + required: + - model_id + title: RegisterModelRequest + RunModerationRequest: + type: object + properties: + input: + oneOf: + - type: string + - type: array + items: + type: string + description: >- + Input (or inputs) to classify. Can be a single string, an array of strings, + or an array of multi-modal input objects similar to other models. + model: + type: string + description: >- + The content moderation model you would like to use. + additionalProperties: false + required: + - input + - model + title: RunModerationRequest + ModerationObject: + type: object + properties: + id: + type: string + description: >- + The unique identifier for the moderation request. + model: + type: string + description: >- + The model used to generate the moderation results. + results: + type: array + items: + $ref: '#/components/schemas/ModerationObjectResults' + description: A list of moderation objects + additionalProperties: false + required: + - id + - model + - results + title: ModerationObject + description: A moderation object. + ModerationObjectResults: + type: object + properties: + flagged: + type: boolean + description: >- + Whether any of the below categories are flagged. + categories: + type: object + additionalProperties: + type: boolean + description: >- + A list of the categories, and whether they are flagged or not. + category_applied_input_types: + type: object + additionalProperties: + type: array + items: + type: string + description: >- + A list of the categories along with the input type(s) that the score applies + to. + category_scores: + type: object + additionalProperties: + type: number + description: >- + A list of the categories along with their scores as predicted by model. + user_message: + type: string + metadata: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + additionalProperties: false + required: + - flagged + - metadata + title: ModerationObjectResults + description: A moderation object. + Prompt: + type: object + properties: + prompt: + type: string + description: >- + The system prompt text with variable placeholders. Variables are only + supported when using the Responses API. + version: + type: integer + description: >- + Version (integer starting at 1, incremented on save) + prompt_id: + type: string + description: >- + Unique identifier formatted as 'pmpt_<48-digit-hash>' + variables: + type: array + items: + type: string + description: >- + List of prompt variable names that can be used in the prompt template + is_default: + type: boolean + default: false + description: >- + Boolean indicating whether this version is the default version for this + prompt + additionalProperties: false + required: + - version + - prompt_id + - variables + - is_default + title: Prompt + description: >- + A prompt resource representing a stored OpenAI Compatible prompt template + in Llama Stack. + ListPromptsResponse: + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/Prompt' + additionalProperties: false + required: + - data + title: ListPromptsResponse + description: Response model to list prompts. + CreatePromptRequest: + type: object + properties: + prompt: + type: string + description: >- + The prompt text content with variable placeholders. + variables: + type: array + items: + type: string + description: >- + List of variable names that can be used in the prompt template. + additionalProperties: false + required: + - prompt + title: CreatePromptRequest + UpdatePromptRequest: + type: object + properties: + prompt: + type: string + description: The updated prompt text content. + version: + type: integer + description: >- + The current version of the prompt being updated. + variables: + type: array + items: + type: string + description: >- + Updated list of variable names that can be used in the prompt template. + set_as_default: + type: boolean + description: >- + Set the new version as the default (default=True). + additionalProperties: false + required: + - prompt + - version + - set_as_default + title: UpdatePromptRequest + SetDefaultVersionRequest: + type: object + properties: + version: + type: integer + description: The version to set as default. + additionalProperties: false + required: + - version + title: SetDefaultVersionRequest + ProviderInfo: + type: object + properties: + api: + type: string + description: The API name this provider implements + provider_id: + type: string + description: Unique identifier for the provider + provider_type: + type: string + description: The type of provider implementation + config: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + Configuration parameters for the provider + health: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: Current health status of the provider + additionalProperties: false + required: + - api + - provider_id + - provider_type + - config + - health + title: ProviderInfo + description: >- + Information about a registered provider including its configuration and health + status. + ListProvidersResponse: + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/ProviderInfo' + description: List of provider information objects + additionalProperties: false + required: + - data + title: ListProvidersResponse + description: >- + Response containing a list of all available providers. + ListOpenAIResponseObject: + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/OpenAIResponseObjectWithInput' + description: >- + List of response objects with their input context + has_more: + type: boolean + description: >- + Whether there are more results available beyond this page + first_id: + type: string + description: >- + Identifier of the first item in this page + last_id: + type: string + description: Identifier of the last item in this page + object: + type: string + const: list + default: list + description: Object type identifier, always "list" + additionalProperties: false + required: + - data + - has_more + - first_id + - last_id + - object + title: ListOpenAIResponseObject + description: >- + Paginated list of OpenAI response objects with navigation metadata. + OpenAIResponseAnnotationCitation: + type: object + properties: + type: + type: string + const: url_citation + default: url_citation + description: >- + Annotation type identifier, always "url_citation" + end_index: + type: integer + description: >- + End position of the citation span in the content + start_index: + type: integer + description: >- + Start position of the citation span in the content + title: + type: string + description: Title of the referenced web resource + url: + type: string + description: URL of the referenced web resource + additionalProperties: false + required: + - type + - end_index + - start_index + - title + - url + title: OpenAIResponseAnnotationCitation + description: >- + URL citation annotation for referencing external web resources. + "OpenAIResponseAnnotationContainerFileCitation": + type: object + properties: + type: + type: string + const: container_file_citation + default: container_file_citation + container_id: + type: string + end_index: + type: integer + file_id: + type: string + filename: + type: string + start_index: + type: integer + additionalProperties: false + required: + - type + - container_id + - end_index + - file_id + - filename + - start_index + title: >- + OpenAIResponseAnnotationContainerFileCitation + OpenAIResponseAnnotationFileCitation: + type: object + properties: + type: + type: string + const: file_citation + default: file_citation + description: >- + Annotation type identifier, always "file_citation" + file_id: + type: string + description: Unique identifier of the referenced file + filename: + type: string + description: Name of the referenced file + index: + type: integer + description: >- + Position index of the citation within the content + additionalProperties: false + required: + - type + - file_id + - filename + - index + title: OpenAIResponseAnnotationFileCitation + description: >- + File citation annotation for referencing specific files in response content. + OpenAIResponseAnnotationFilePath: + type: object + properties: + type: + type: string + const: file_path + default: file_path + file_id: + type: string + index: + type: integer + additionalProperties: false + required: + - type + - file_id + - index + title: OpenAIResponseAnnotationFilePath + OpenAIResponseAnnotations: + oneOf: + - $ref: '#/components/schemas/OpenAIResponseAnnotationFileCitation' + - $ref: '#/components/schemas/OpenAIResponseAnnotationCitation' + - $ref: '#/components/schemas/OpenAIResponseAnnotationContainerFileCitation' + - $ref: '#/components/schemas/OpenAIResponseAnnotationFilePath' + discriminator: + propertyName: type + mapping: + file_citation: '#/components/schemas/OpenAIResponseAnnotationFileCitation' + url_citation: '#/components/schemas/OpenAIResponseAnnotationCitation' + container_file_citation: '#/components/schemas/OpenAIResponseAnnotationContainerFileCitation' + file_path: '#/components/schemas/OpenAIResponseAnnotationFilePath' + OpenAIResponseError: + type: object + properties: + code: + type: string + description: >- + Error code identifying the type of failure + message: + type: string + description: >- + Human-readable error message describing the failure + additionalProperties: false + required: + - code + - message + title: OpenAIResponseError + description: >- + Error details for failed OpenAI response requests. + OpenAIResponseInput: + oneOf: + - $ref: '#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall' + - $ref: '#/components/schemas/OpenAIResponseInputFunctionToolCallOutput' + - $ref: '#/components/schemas/OpenAIResponseMCPApprovalRequest' + - $ref: '#/components/schemas/OpenAIResponseMCPApprovalResponse' + - $ref: '#/components/schemas/OpenAIResponseMessage' + "OpenAIResponseInputFunctionToolCallOutput": + type: object + properties: + call_id: + type: string + output: + type: string + type: + type: string + const: function_call_output + default: function_call_output + id: + type: string + status: + type: string + additionalProperties: false + required: + - call_id + - output + - type + title: >- + OpenAIResponseInputFunctionToolCallOutput + description: >- + This represents the output of a function call that gets passed back to the + model. + OpenAIResponseInputMessageContent: + oneOf: + - $ref: '#/components/schemas/OpenAIResponseInputMessageContentText' + - $ref: '#/components/schemas/OpenAIResponseInputMessageContentImage' + discriminator: + propertyName: type + mapping: + input_text: '#/components/schemas/OpenAIResponseInputMessageContentText' + input_image: '#/components/schemas/OpenAIResponseInputMessageContentImage' + OpenAIResponseInputMessageContentImage: + type: object + properties: + detail: + oneOf: + - type: string + const: low + - type: string + const: high + - type: string + const: auto + default: auto + description: >- + Level of detail for image processing, can be "low", "high", or "auto" + type: + type: string + const: input_image + default: input_image + description: >- + Content type identifier, always "input_image" + image_url: + type: string + description: (Optional) URL of the image content + additionalProperties: false + required: + - detail + - type + title: OpenAIResponseInputMessageContentImage + description: >- + Image content for input messages in OpenAI response format. + OpenAIResponseInputMessageContentText: + type: object + properties: + text: + type: string + description: The text content of the input message + type: + type: string + const: input_text + default: input_text + description: >- + Content type identifier, always "input_text" + additionalProperties: false + required: + - text + - type + title: OpenAIResponseInputMessageContentText + description: >- + Text content for input messages in OpenAI response format. + OpenAIResponseMCPApprovalRequest: + type: object + properties: + arguments: + type: string + id: + type: string + name: + type: string + server_label: + type: string + type: + type: string + const: mcp_approval_request + default: mcp_approval_request + additionalProperties: false + required: + - arguments + - id + - name + - server_label + - type + title: OpenAIResponseMCPApprovalRequest + description: >- + A request for human approval of a tool invocation. + OpenAIResponseMCPApprovalResponse: + type: object + properties: + approval_request_id: + type: string + approve: + type: boolean + type: + type: string + const: mcp_approval_response + default: mcp_approval_response + id: + type: string + reason: + type: string + additionalProperties: false + required: + - approval_request_id + - approve + - type + title: OpenAIResponseMCPApprovalResponse + description: A response to an MCP approval request. + OpenAIResponseMessage: + type: object + properties: + content: + oneOf: + - type: string + - type: array + items: + $ref: '#/components/schemas/OpenAIResponseInputMessageContent' + - type: array + items: + $ref: '#/components/schemas/OpenAIResponseOutputMessageContent' + role: + oneOf: + - type: string + const: system + - type: string + const: developer + - type: string + const: user + - type: string + const: assistant + type: + type: string + const: message + default: message + id: + type: string + status: + type: string + additionalProperties: false + required: + - content + - role + - type + title: OpenAIResponseMessage + description: >- + Corresponds to the various Message types in the Responses API. They are all + under one type because the Responses API gives them all the same "type" value, + and there is no way to tell them apart in certain scenarios. + OpenAIResponseObjectWithInput: + type: object + properties: + created_at: + type: integer + description: >- + Unix timestamp when the response was created + error: + $ref: '#/components/schemas/OpenAIResponseError' + description: >- + (Optional) Error details if the response generation failed + id: + type: string + description: Unique identifier for this response + model: + type: string + description: Model identifier used for generation + object: + type: string + const: response + default: response + description: >- + Object type identifier, always "response" + output: + type: array + items: + $ref: '#/components/schemas/OpenAIResponseOutput' + description: >- + List of generated output items (messages, tool calls, etc.) + parallel_tool_calls: + type: boolean + default: false + description: >- + Whether tool calls can be executed in parallel + previous_response_id: + type: string + description: >- + (Optional) ID of the previous response in a conversation + status: + type: string + description: >- + Current status of the response generation + temperature: + type: number + description: >- + (Optional) Sampling temperature used for generation + text: + $ref: '#/components/schemas/OpenAIResponseText' + description: >- + Text formatting configuration for the response + top_p: + type: number + description: >- + (Optional) Nucleus sampling parameter used for generation + truncation: + type: string + description: >- + (Optional) Truncation strategy applied to the response + input: + type: array + items: + $ref: '#/components/schemas/OpenAIResponseInput' + description: >- + List of input items that led to this response + additionalProperties: false + required: + - created_at + - id + - model + - object + - output + - parallel_tool_calls + - status + - text + - input + title: OpenAIResponseObjectWithInput + description: >- + OpenAI response object extended with input context information. + OpenAIResponseOutput: + oneOf: + - $ref: '#/components/schemas/OpenAIResponseMessage' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageMCPCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageMCPListTools' + - $ref: '#/components/schemas/OpenAIResponseMCPApprovalRequest' + discriminator: + propertyName: type + mapping: + message: '#/components/schemas/OpenAIResponseMessage' + web_search_call: '#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall' + file_search_call: '#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall' + function_call: '#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall' + mcp_call: '#/components/schemas/OpenAIResponseOutputMessageMCPCall' + mcp_list_tools: '#/components/schemas/OpenAIResponseOutputMessageMCPListTools' + mcp_approval_request: '#/components/schemas/OpenAIResponseMCPApprovalRequest' + OpenAIResponseOutputMessageContent: + type: object + properties: + text: + type: string + type: + type: string + const: output_text + default: output_text + annotations: + type: array + items: + $ref: '#/components/schemas/OpenAIResponseAnnotations' + additionalProperties: false + required: + - text + - type + - annotations + title: >- + OpenAIResponseOutputMessageContentOutputText + "OpenAIResponseOutputMessageFileSearchToolCall": + type: object + properties: + id: + type: string + description: Unique identifier for this tool call + queries: + type: array + items: + type: string + description: List of search queries executed + status: + type: string + description: >- + Current status of the file search operation + type: + type: string + const: file_search_call + default: file_search_call + description: >- + Tool call type identifier, always "file_search_call" + results: + type: array + items: + type: object + properties: + attributes: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) Key-value attributes associated with the file + file_id: + type: string + description: >- + Unique identifier of the file containing the result + filename: + type: string + description: Name of the file containing the result + score: + type: number + description: >- + Relevance score for this search result (between 0 and 1) + text: + type: string + description: Text content of the search result + additionalProperties: false + required: + - attributes + - file_id + - filename + - score + - text + title: >- + OpenAIResponseOutputMessageFileSearchToolCallResults + description: >- + Search results returned by the file search operation. + description: >- + (Optional) Search results returned by the file search operation + additionalProperties: false + required: + - id + - queries + - status + - type + title: >- + OpenAIResponseOutputMessageFileSearchToolCall + description: >- + File search tool call output message for OpenAI responses. + "OpenAIResponseOutputMessageFunctionToolCall": + type: object + properties: + call_id: + type: string + description: Unique identifier for the function call + name: + type: string + description: Name of the function being called + arguments: + type: string + description: >- + JSON string containing the function arguments + type: + type: string + const: function_call + default: function_call + description: >- + Tool call type identifier, always "function_call" + id: + type: string + description: >- + (Optional) Additional identifier for the tool call + status: + type: string + description: >- + (Optional) Current status of the function call execution + additionalProperties: false + required: + - call_id + - name + - arguments + - type + title: >- + OpenAIResponseOutputMessageFunctionToolCall + description: >- + Function tool call output message for OpenAI responses. + OpenAIResponseOutputMessageMCPCall: + type: object + properties: + id: + type: string + description: Unique identifier for this MCP call + type: + type: string + const: mcp_call + default: mcp_call + description: >- + Tool call type identifier, always "mcp_call" + arguments: + type: string + description: >- + JSON string containing the MCP call arguments + name: + type: string + description: Name of the MCP method being called + server_label: + type: string + description: >- + Label identifying the MCP server handling the call + error: + type: string + description: >- + (Optional) Error message if the MCP call failed + output: + type: string + description: >- + (Optional) Output result from the successful MCP call + additionalProperties: false + required: + - id + - type + - arguments + - name + - server_label + title: OpenAIResponseOutputMessageMCPCall + description: >- + Model Context Protocol (MCP) call output message for OpenAI responses. + OpenAIResponseOutputMessageMCPListTools: + type: object + properties: + id: + type: string + description: >- + Unique identifier for this MCP list tools operation + type: + type: string + const: mcp_list_tools + default: mcp_list_tools + description: >- + Tool call type identifier, always "mcp_list_tools" + server_label: + type: string + description: >- + Label identifying the MCP server providing the tools + tools: + type: array + items: + type: object + properties: + input_schema: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + JSON schema defining the tool's input parameters + name: + type: string + description: Name of the tool + description: + type: string + description: >- + (Optional) Description of what the tool does + additionalProperties: false + required: + - input_schema + - name + title: MCPListToolsTool + description: >- + Tool definition returned by MCP list tools operation. + description: >- + List of available tools provided by the MCP server + additionalProperties: false + required: + - id + - type + - server_label + - tools + title: OpenAIResponseOutputMessageMCPListTools + description: >- + MCP list tools output message containing available tools from an MCP server. + "OpenAIResponseOutputMessageWebSearchToolCall": + type: object + properties: + id: + type: string + description: Unique identifier for this tool call + status: + type: string + description: >- + Current status of the web search operation + type: + type: string + const: web_search_call + default: web_search_call + description: >- + Tool call type identifier, always "web_search_call" + additionalProperties: false + required: + - id + - status + - type + title: >- + OpenAIResponseOutputMessageWebSearchToolCall + description: >- + Web search tool call output message for OpenAI responses. + OpenAIResponseText: + type: object + properties: + format: + type: object + properties: + type: + oneOf: + - type: string + const: text + - type: string + const: json_schema + - type: string + const: json_object + description: >- + Must be "text", "json_schema", or "json_object" to identify the format + type + name: + type: string + description: >- + The name of the response format. Only used for json_schema. + schema: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + The JSON schema the response should conform to. In a Python SDK, this + is often a `pydantic` model. Only used for json_schema. + description: + type: string + description: >- + (Optional) A description of the response format. Only used for json_schema. + strict: + type: boolean + description: >- + (Optional) Whether to strictly enforce the JSON schema. If true, the + response must match the schema exactly. Only used for json_schema. + additionalProperties: false + required: + - type + description: >- + (Optional) Text format configuration specifying output format requirements + additionalProperties: false + title: OpenAIResponseText + description: >- + Text response configuration for OpenAI responses. + OpenAIResponseInputTool: + oneOf: + - $ref: '#/components/schemas/OpenAIResponseInputToolWebSearch' + - $ref: '#/components/schemas/OpenAIResponseInputToolFileSearch' + - $ref: '#/components/schemas/OpenAIResponseInputToolFunction' + - $ref: '#/components/schemas/OpenAIResponseInputToolMCP' + discriminator: + propertyName: type + mapping: + web_search: '#/components/schemas/OpenAIResponseInputToolWebSearch' + file_search: '#/components/schemas/OpenAIResponseInputToolFileSearch' + function: '#/components/schemas/OpenAIResponseInputToolFunction' + mcp: '#/components/schemas/OpenAIResponseInputToolMCP' + OpenAIResponseInputToolFileSearch: + type: object + properties: + type: + type: string + const: file_search + default: file_search + description: >- + Tool type identifier, always "file_search" + vector_store_ids: + type: array + items: + type: string + description: >- + List of vector store identifiers to search within + filters: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) Additional filters to apply to the search + max_num_results: + type: integer + default: 10 + description: >- + (Optional) Maximum number of search results to return (1-50) + ranking_options: + type: object + properties: + ranker: + type: string + description: >- + (Optional) Name of the ranking algorithm to use + score_threshold: + type: number + default: 0.0 + description: >- + (Optional) Minimum relevance score threshold for results + additionalProperties: false + description: >- + (Optional) Options for ranking and scoring search results + additionalProperties: false + required: + - type + - vector_store_ids + title: OpenAIResponseInputToolFileSearch + description: >- + File search tool configuration for OpenAI response inputs. + OpenAIResponseInputToolFunction: + type: object + properties: + type: + type: string + const: function + default: function + description: Tool type identifier, always "function" + name: + type: string + description: Name of the function that can be called + description: + type: string + description: >- + (Optional) Description of what the function does + parameters: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) JSON schema defining the function's parameters + strict: + type: boolean + description: >- + (Optional) Whether to enforce strict parameter validation + additionalProperties: false + required: + - type + - name + title: OpenAIResponseInputToolFunction + description: >- + Function tool configuration for OpenAI response inputs. + OpenAIResponseInputToolMCP: + type: object + properties: + type: + type: string + const: mcp + default: mcp + description: Tool type identifier, always "mcp" + server_label: + type: string + description: Label to identify this MCP server + server_url: + type: string + description: URL endpoint of the MCP server + headers: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) HTTP headers to include when connecting to the server + require_approval: + oneOf: + - type: string + const: always + - type: string + const: never + - type: object + properties: + always: + type: array + items: + type: string + description: >- + (Optional) List of tool names that always require approval + never: + type: array + items: + type: string + description: >- + (Optional) List of tool names that never require approval + additionalProperties: false + title: ApprovalFilter + description: >- + Filter configuration for MCP tool approval requirements. + default: never + description: >- + Approval requirement for tool calls ("always", "never", or filter) + allowed_tools: + oneOf: + - type: array + items: + type: string + - type: object + properties: + tool_names: + type: array + items: + type: string + description: >- + (Optional) List of specific tool names that are allowed + additionalProperties: false + title: AllowedToolsFilter + description: >- + Filter configuration for restricting which MCP tools can be used. + description: >- + (Optional) Restriction on which tools can be used from this server + additionalProperties: false + required: + - type + - server_label + - server_url + - require_approval + title: OpenAIResponseInputToolMCP + description: >- + Model Context Protocol (MCP) tool configuration for OpenAI response inputs. + OpenAIResponseInputToolWebSearch: + type: object + properties: + type: + oneOf: + - type: string + const: web_search + - type: string + const: web_search_preview + - type: string + const: web_search_preview_2025_03_11 + default: web_search + description: Web search tool type variant to use + search_context_size: + type: string + default: medium + description: >- + (Optional) Size of search context, must be "low", "medium", or "high" + additionalProperties: false + required: + - type + title: OpenAIResponseInputToolWebSearch + description: >- + Web search tool configuration for OpenAI response inputs. + CreateOpenaiResponseRequest: + type: object + properties: + input: + oneOf: + - type: string + - type: array + items: + $ref: '#/components/schemas/OpenAIResponseInput' + description: Input message(s) to create the response. + model: + type: string + description: The underlying LLM used for completions. + instructions: + type: string + previous_response_id: + type: string + description: >- + (Optional) if specified, the new response will be a continuation of the + previous response. This can be used to easily fork-off new responses from + existing responses. + store: + type: boolean + stream: + type: boolean + temperature: + type: number + text: + $ref: '#/components/schemas/OpenAIResponseText' + tools: + type: array + items: + $ref: '#/components/schemas/OpenAIResponseInputTool' + include: + type: array + items: + type: string + description: >- + (Optional) Additional fields to include in the response. + max_infer_iters: + type: integer + additionalProperties: false + required: + - input + - model + title: CreateOpenaiResponseRequest + OpenAIResponseObject: + type: object + properties: + created_at: + type: integer + description: >- + Unix timestamp when the response was created + error: + $ref: '#/components/schemas/OpenAIResponseError' + description: >- + (Optional) Error details if the response generation failed + id: + type: string + description: Unique identifier for this response + model: + type: string + description: Model identifier used for generation + object: + type: string + const: response + default: response + description: >- + Object type identifier, always "response" + output: + type: array + items: + $ref: '#/components/schemas/OpenAIResponseOutput' + description: >- + List of generated output items (messages, tool calls, etc.) + parallel_tool_calls: + type: boolean + default: false + description: >- + Whether tool calls can be executed in parallel + previous_response_id: + type: string + description: >- + (Optional) ID of the previous response in a conversation + status: + type: string + description: >- + Current status of the response generation + temperature: + type: number + description: >- + (Optional) Sampling temperature used for generation + text: + $ref: '#/components/schemas/OpenAIResponseText' + description: >- + Text formatting configuration for the response + top_p: + type: number + description: >- + (Optional) Nucleus sampling parameter used for generation + truncation: + type: string + description: >- + (Optional) Truncation strategy applied to the response + additionalProperties: false + required: + - created_at + - id + - model + - object + - output + - parallel_tool_calls + - status + - text + title: OpenAIResponseObject + description: >- + Complete OpenAI response object containing generation results and metadata. + OpenAIResponseContentPartOutputText: + type: object + properties: + type: + type: string + const: output_text + default: output_text + text: + type: string + additionalProperties: false + required: + - type + - text + title: OpenAIResponseContentPartOutputText + OpenAIResponseContentPartRefusal: + type: object + properties: + type: + type: string + const: refusal + default: refusal + refusal: + type: string + additionalProperties: false + required: + - type + - refusal + title: OpenAIResponseContentPartRefusal + OpenAIResponseObjectStream: + oneOf: + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseCreated' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseOutputItemAdded' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseOutputItemDone' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseOutputTextDelta' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseOutputTextDone' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseFunctionCallArgumentsDelta' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseFunctionCallArgumentsDone' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseWebSearchCallInProgress' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseWebSearchCallSearching' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseWebSearchCallCompleted' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpListToolsInProgress' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpListToolsFailed' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpListToolsCompleted' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallArgumentsDelta' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallArgumentsDone' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallInProgress' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallFailed' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallCompleted' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseContentPartAdded' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseContentPartDone' + - $ref: '#/components/schemas/OpenAIResponseObjectStreamResponseCompleted' + discriminator: + propertyName: type + mapping: + response.created: '#/components/schemas/OpenAIResponseObjectStreamResponseCreated' + response.output_item.added: '#/components/schemas/OpenAIResponseObjectStreamResponseOutputItemAdded' + response.output_item.done: '#/components/schemas/OpenAIResponseObjectStreamResponseOutputItemDone' + response.output_text.delta: '#/components/schemas/OpenAIResponseObjectStreamResponseOutputTextDelta' + response.output_text.done: '#/components/schemas/OpenAIResponseObjectStreamResponseOutputTextDone' + response.function_call_arguments.delta: '#/components/schemas/OpenAIResponseObjectStreamResponseFunctionCallArgumentsDelta' + response.function_call_arguments.done: '#/components/schemas/OpenAIResponseObjectStreamResponseFunctionCallArgumentsDone' + response.web_search_call.in_progress: '#/components/schemas/OpenAIResponseObjectStreamResponseWebSearchCallInProgress' + response.web_search_call.searching: '#/components/schemas/OpenAIResponseObjectStreamResponseWebSearchCallSearching' + response.web_search_call.completed: '#/components/schemas/OpenAIResponseObjectStreamResponseWebSearchCallCompleted' + response.mcp_list_tools.in_progress: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpListToolsInProgress' + response.mcp_list_tools.failed: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpListToolsFailed' + response.mcp_list_tools.completed: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpListToolsCompleted' + response.mcp_call.arguments.delta: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallArgumentsDelta' + response.mcp_call.arguments.done: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallArgumentsDone' + response.mcp_call.in_progress: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallInProgress' + response.mcp_call.failed: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallFailed' + response.mcp_call.completed: '#/components/schemas/OpenAIResponseObjectStreamResponseMcpCallCompleted' + response.content_part.added: '#/components/schemas/OpenAIResponseObjectStreamResponseContentPartAdded' + response.content_part.done: '#/components/schemas/OpenAIResponseObjectStreamResponseContentPartDone' + response.completed: '#/components/schemas/OpenAIResponseObjectStreamResponseCompleted' + "OpenAIResponseObjectStreamResponseCompleted": + type: object + properties: + response: + $ref: '#/components/schemas/OpenAIResponseObject' + description: The completed response object + type: + type: string + const: response.completed + default: response.completed + description: >- + Event type identifier, always "response.completed" + additionalProperties: false + required: + - response + - type + title: >- + OpenAIResponseObjectStreamResponseCompleted + description: >- + Streaming event indicating a response has been completed. + "OpenAIResponseObjectStreamResponseContentPartAdded": + type: object + properties: + response_id: + type: string + description: >- + Unique identifier of the response containing this content + item_id: + type: string + description: >- + Unique identifier of the output item containing this content part + part: + oneOf: + - $ref: '#/components/schemas/OpenAIResponseContentPartOutputText' + - $ref: '#/components/schemas/OpenAIResponseContentPartRefusal' + discriminator: + propertyName: type + mapping: + output_text: '#/components/schemas/OpenAIResponseContentPartOutputText' + refusal: '#/components/schemas/OpenAIResponseContentPartRefusal' + description: The content part that was added + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.content_part.added + default: response.content_part.added + description: >- + Event type identifier, always "response.content_part.added" + additionalProperties: false + required: + - response_id + - item_id + - part + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseContentPartAdded + description: >- + Streaming event for when a new content part is added to a response item. + "OpenAIResponseObjectStreamResponseContentPartDone": + type: object + properties: + response_id: + type: string + description: >- + Unique identifier of the response containing this content + item_id: + type: string + description: >- + Unique identifier of the output item containing this content part + part: + oneOf: + - $ref: '#/components/schemas/OpenAIResponseContentPartOutputText' + - $ref: '#/components/schemas/OpenAIResponseContentPartRefusal' + discriminator: + propertyName: type + mapping: + output_text: '#/components/schemas/OpenAIResponseContentPartOutputText' + refusal: '#/components/schemas/OpenAIResponseContentPartRefusal' + description: The completed content part + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.content_part.done + default: response.content_part.done + description: >- + Event type identifier, always "response.content_part.done" + additionalProperties: false + required: + - response_id + - item_id + - part + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseContentPartDone + description: >- + Streaming event for when a content part is completed. + "OpenAIResponseObjectStreamResponseCreated": + type: object + properties: + response: + $ref: '#/components/schemas/OpenAIResponseObject' + description: The newly created response object + type: + type: string + const: response.created + default: response.created + description: >- + Event type identifier, always "response.created" + additionalProperties: false + required: + - response + - type + title: >- + OpenAIResponseObjectStreamResponseCreated + description: >- + Streaming event indicating a new response has been created. + "OpenAIResponseObjectStreamResponseFunctionCallArgumentsDelta": + type: object + properties: + delta: + type: string + description: >- + Incremental function call arguments being added + item_id: + type: string + description: >- + Unique identifier of the function call being updated + output_index: + type: integer + description: >- + Index position of the item in the output list + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.function_call_arguments.delta + default: response.function_call_arguments.delta + description: >- + Event type identifier, always "response.function_call_arguments.delta" + additionalProperties: false + required: + - delta + - item_id + - output_index + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseFunctionCallArgumentsDelta + description: >- + Streaming event for incremental function call argument updates. + "OpenAIResponseObjectStreamResponseFunctionCallArgumentsDone": + type: object + properties: + arguments: + type: string + description: >- + Final complete arguments JSON string for the function call + item_id: + type: string + description: >- + Unique identifier of the completed function call + output_index: + type: integer + description: >- + Index position of the item in the output list + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.function_call_arguments.done + default: response.function_call_arguments.done + description: >- + Event type identifier, always "response.function_call_arguments.done" + additionalProperties: false + required: + - arguments + - item_id + - output_index + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseFunctionCallArgumentsDone + description: >- + Streaming event for when function call arguments are completed. + "OpenAIResponseObjectStreamResponseMcpCallArgumentsDelta": + type: object + properties: + delta: + type: string + item_id: + type: string + output_index: + type: integer + sequence_number: + type: integer + type: + type: string + const: response.mcp_call.arguments.delta + default: response.mcp_call.arguments.delta + additionalProperties: false + required: + - delta + - item_id + - output_index + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseMcpCallArgumentsDelta + "OpenAIResponseObjectStreamResponseMcpCallArgumentsDone": + type: object + properties: + arguments: + type: string + item_id: + type: string + output_index: + type: integer + sequence_number: + type: integer + type: + type: string + const: response.mcp_call.arguments.done + default: response.mcp_call.arguments.done + additionalProperties: false + required: + - arguments + - item_id + - output_index + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseMcpCallArgumentsDone + "OpenAIResponseObjectStreamResponseMcpCallCompleted": + type: object + properties: + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.mcp_call.completed + default: response.mcp_call.completed + description: >- + Event type identifier, always "response.mcp_call.completed" + additionalProperties: false + required: + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseMcpCallCompleted + description: Streaming event for completed MCP calls. + "OpenAIResponseObjectStreamResponseMcpCallFailed": + type: object + properties: + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.mcp_call.failed + default: response.mcp_call.failed + description: >- + Event type identifier, always "response.mcp_call.failed" + additionalProperties: false + required: + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseMcpCallFailed + description: Streaming event for failed MCP calls. + "OpenAIResponseObjectStreamResponseMcpCallInProgress": + type: object + properties: + item_id: + type: string + description: Unique identifier of the MCP call + output_index: + type: integer + description: >- + Index position of the item in the output list + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.mcp_call.in_progress + default: response.mcp_call.in_progress + description: >- + Event type identifier, always "response.mcp_call.in_progress" + additionalProperties: false + required: + - item_id + - output_index + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseMcpCallInProgress + description: >- + Streaming event for MCP calls in progress. + "OpenAIResponseObjectStreamResponseMcpListToolsCompleted": + type: object + properties: + sequence_number: + type: integer + type: + type: string + const: response.mcp_list_tools.completed + default: response.mcp_list_tools.completed + additionalProperties: false + required: + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseMcpListToolsCompleted + "OpenAIResponseObjectStreamResponseMcpListToolsFailed": + type: object + properties: + sequence_number: + type: integer + type: + type: string + const: response.mcp_list_tools.failed + default: response.mcp_list_tools.failed + additionalProperties: false + required: + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseMcpListToolsFailed + "OpenAIResponseObjectStreamResponseMcpListToolsInProgress": + type: object + properties: + sequence_number: + type: integer + type: + type: string + const: response.mcp_list_tools.in_progress + default: response.mcp_list_tools.in_progress + additionalProperties: false + required: + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseMcpListToolsInProgress + "OpenAIResponseObjectStreamResponseOutputItemAdded": + type: object + properties: + response_id: + type: string + description: >- + Unique identifier of the response containing this output + item: + oneOf: + - $ref: '#/components/schemas/OpenAIResponseMessage' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageMCPCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageMCPListTools' + - $ref: '#/components/schemas/OpenAIResponseMCPApprovalRequest' + discriminator: + propertyName: type + mapping: + message: '#/components/schemas/OpenAIResponseMessage' + web_search_call: '#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall' + file_search_call: '#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall' + function_call: '#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall' + mcp_call: '#/components/schemas/OpenAIResponseOutputMessageMCPCall' + mcp_list_tools: '#/components/schemas/OpenAIResponseOutputMessageMCPListTools' + mcp_approval_request: '#/components/schemas/OpenAIResponseMCPApprovalRequest' + description: >- + The output item that was added (message, tool call, etc.) + output_index: + type: integer + description: >- + Index position of this item in the output list + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.output_item.added + default: response.output_item.added + description: >- + Event type identifier, always "response.output_item.added" + additionalProperties: false + required: + - response_id + - item + - output_index + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseOutputItemAdded + description: >- + Streaming event for when a new output item is added to the response. + "OpenAIResponseObjectStreamResponseOutputItemDone": + type: object + properties: + response_id: + type: string + description: >- + Unique identifier of the response containing this output + item: + oneOf: + - $ref: '#/components/schemas/OpenAIResponseMessage' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageMCPCall' + - $ref: '#/components/schemas/OpenAIResponseOutputMessageMCPListTools' + - $ref: '#/components/schemas/OpenAIResponseMCPApprovalRequest' + discriminator: + propertyName: type + mapping: + message: '#/components/schemas/OpenAIResponseMessage' + web_search_call: '#/components/schemas/OpenAIResponseOutputMessageWebSearchToolCall' + file_search_call: '#/components/schemas/OpenAIResponseOutputMessageFileSearchToolCall' + function_call: '#/components/schemas/OpenAIResponseOutputMessageFunctionToolCall' + mcp_call: '#/components/schemas/OpenAIResponseOutputMessageMCPCall' + mcp_list_tools: '#/components/schemas/OpenAIResponseOutputMessageMCPListTools' + mcp_approval_request: '#/components/schemas/OpenAIResponseMCPApprovalRequest' + description: >- + The completed output item (message, tool call, etc.) + output_index: + type: integer + description: >- + Index position of this item in the output list + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.output_item.done + default: response.output_item.done + description: >- + Event type identifier, always "response.output_item.done" + additionalProperties: false + required: + - response_id + - item + - output_index + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseOutputItemDone + description: >- + Streaming event for when an output item is completed. + "OpenAIResponseObjectStreamResponseOutputTextDelta": + type: object + properties: + content_index: + type: integer + description: Index position within the text content + delta: + type: string + description: Incremental text content being added + item_id: + type: string + description: >- + Unique identifier of the output item being updated + output_index: + type: integer + description: >- + Index position of the item in the output list + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.output_text.delta + default: response.output_text.delta + description: >- + Event type identifier, always "response.output_text.delta" + additionalProperties: false + required: + - content_index + - delta + - item_id + - output_index + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseOutputTextDelta + description: >- + Streaming event for incremental text content updates. + "OpenAIResponseObjectStreamResponseOutputTextDone": + type: object + properties: + content_index: + type: integer + description: Index position within the text content + text: + type: string + description: >- + Final complete text content of the output item + item_id: + type: string + description: >- + Unique identifier of the completed output item + output_index: + type: integer + description: >- + Index position of the item in the output list + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.output_text.done + default: response.output_text.done + description: >- + Event type identifier, always "response.output_text.done" + additionalProperties: false + required: + - content_index + - text + - item_id + - output_index + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseOutputTextDone + description: >- + Streaming event for when text output is completed. + "OpenAIResponseObjectStreamResponseWebSearchCallCompleted": + type: object + properties: + item_id: + type: string + description: >- + Unique identifier of the completed web search call + output_index: + type: integer + description: >- + Index position of the item in the output list + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.web_search_call.completed + default: response.web_search_call.completed + description: >- + Event type identifier, always "response.web_search_call.completed" + additionalProperties: false + required: + - item_id + - output_index + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseWebSearchCallCompleted + description: >- + Streaming event for completed web search calls. + "OpenAIResponseObjectStreamResponseWebSearchCallInProgress": + type: object + properties: + item_id: + type: string + description: Unique identifier of the web search call + output_index: + type: integer + description: >- + Index position of the item in the output list + sequence_number: + type: integer + description: >- + Sequential number for ordering streaming events + type: + type: string + const: response.web_search_call.in_progress + default: response.web_search_call.in_progress + description: >- + Event type identifier, always "response.web_search_call.in_progress" + additionalProperties: false + required: + - item_id + - output_index + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseWebSearchCallInProgress + description: >- + Streaming event for web search calls in progress. + "OpenAIResponseObjectStreamResponseWebSearchCallSearching": + type: object + properties: + item_id: + type: string + output_index: + type: integer + sequence_number: + type: integer + type: + type: string + const: response.web_search_call.searching + default: response.web_search_call.searching + additionalProperties: false + required: + - item_id + - output_index + - sequence_number + - type + title: >- + OpenAIResponseObjectStreamResponseWebSearchCallSearching + ListOpenaiResponsesRequest: + type: object + properties: + after: + type: string + description: The ID of the last response to return. + limit: + type: integer + description: The number of responses to return. + model: + type: string + description: The model to filter responses by. + order: + type: string + enum: + - asc + - desc + description: >- + The order to sort responses by when sorted by created_at ('asc' or 'desc'). + additionalProperties: false + title: ListOpenaiResponsesRequest + OpenAIDeleteResponseObject: + type: object + properties: + id: + type: string + description: >- + Unique identifier of the deleted response + object: + type: string + const: response + default: response + description: >- + Object type identifier, always "response" + deleted: + type: boolean + default: true + description: Deletion confirmation flag, always True + additionalProperties: false + required: + - id + - object + - deleted + title: OpenAIDeleteResponseObject + description: >- + Response object confirming deletion of an OpenAI response. + ListOpenAIResponseInputItem: + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/OpenAIResponseInput' + description: List of input items + object: + type: string + const: list + default: list + description: Object type identifier, always "list" + additionalProperties: false + required: + - data + - object + title: ListOpenAIResponseInputItem + description: >- + List container for OpenAI response input items. + CompletionMessage: + type: object + properties: + role: + type: string + const: assistant + default: assistant + description: >- + Must be "assistant" to identify this as the model's response + content: + $ref: '#/components/schemas/InterleavedContent' + description: The content of the model's response + stop_reason: + type: string + enum: + - end_of_turn + - end_of_message + - out_of_tokens + description: >- + Reason why the model stopped generating. Options are: - `StopReason.end_of_turn`: + The model finished generating the entire response. - `StopReason.end_of_message`: + The model finished generating but generated a partial response -- usually, + a tool call. The user may call the tool and continue the conversation + with the tool's response. - `StopReason.out_of_tokens`: The model ran + out of token budget. + tool_calls: + type: array + items: + $ref: '#/components/schemas/ToolCall' + description: >- + List of tool calls. Each tool call is a ToolCall object. + additionalProperties: false + required: + - role + - content + - stop_reason + title: CompletionMessage + description: >- + A message containing the model's (assistant) response in a chat conversation. + ImageContentItem: + type: object + properties: + type: + type: string + const: image + default: image + description: >- + Discriminator type of the content item. Always "image" + image: + type: object + properties: + url: + $ref: '#/components/schemas/URL' + description: >- + A URL of the image or data URL in the format of data:image/{type};base64,{data}. + Note that URL could have length limits. + data: + type: string + contentEncoding: base64 + description: base64 encoded image data as string + additionalProperties: false + description: >- + Image as a base64 encoded string or an URL + additionalProperties: false + required: + - type + - image + title: ImageContentItem + description: A image content item + InterleavedContent: + oneOf: + - type: string + - $ref: '#/components/schemas/InterleavedContentItem' + - type: array + items: + $ref: '#/components/schemas/InterleavedContentItem' + InterleavedContentItem: + oneOf: + - $ref: '#/components/schemas/ImageContentItem' + - $ref: '#/components/schemas/TextContentItem' + discriminator: + propertyName: type + mapping: + image: '#/components/schemas/ImageContentItem' + text: '#/components/schemas/TextContentItem' + Message: + oneOf: + - $ref: '#/components/schemas/UserMessage' + - $ref: '#/components/schemas/SystemMessage' + - $ref: '#/components/schemas/ToolResponseMessage' + - $ref: '#/components/schemas/CompletionMessage' + discriminator: + propertyName: role + mapping: + user: '#/components/schemas/UserMessage' + system: '#/components/schemas/SystemMessage' + tool: '#/components/schemas/ToolResponseMessage' + assistant: '#/components/schemas/CompletionMessage' + SystemMessage: + type: object + properties: + role: + type: string + const: system + default: system + description: >- + Must be "system" to identify this as a system message + content: + $ref: '#/components/schemas/InterleavedContent' + description: >- + The content of the "system prompt". If multiple system messages are provided, + they are concatenated. The underlying Llama Stack code may also add other + system messages (for example, for formatting tool definitions). + additionalProperties: false + required: + - role + - content + title: SystemMessage + description: >- + A system message providing instructions or context to the model. + TextContentItem: + type: object + properties: + type: + type: string + const: text + default: text + description: >- + Discriminator type of the content item. Always "text" + text: + type: string + description: Text content + additionalProperties: false + required: + - type + - text + title: TextContentItem + description: A text content item + ToolCall: + type: object + properties: + call_id: + type: string + tool_name: + oneOf: + - type: string + enum: + - brave_search + - wolfram_alpha + - photogen + - code_interpreter + title: BuiltinTool + - type: string + arguments: + oneOf: + - type: string + - type: object + additionalProperties: + oneOf: + - type: string + - type: integer + - type: number + - type: boolean + - type: 'null' + - type: array + items: + oneOf: + - type: string + - type: integer + - type: number + - type: boolean + - type: 'null' + - type: object + additionalProperties: + oneOf: + - type: string + - type: integer + - type: number + - type: boolean + - type: 'null' + arguments_json: + type: string + additionalProperties: false + required: + - call_id + - tool_name + - arguments + title: ToolCall + ToolResponseMessage: + type: object + properties: + role: + type: string + const: tool + default: tool + description: >- + Must be "tool" to identify this as a tool response + call_id: + type: string + description: >- + Unique identifier for the tool call this response is for + content: + $ref: '#/components/schemas/InterleavedContent' + description: The response content from the tool + additionalProperties: false + required: + - role + - call_id + - content + title: ToolResponseMessage + description: >- + A message representing the result of a tool invocation. + URL: + type: object + properties: + uri: + type: string + description: The URL string pointing to the resource + additionalProperties: false + required: + - uri + title: URL + description: A URL reference to external content. + UserMessage: + type: object + properties: + role: + type: string + const: user + default: user + description: >- + Must be "user" to identify this as a user message + content: + $ref: '#/components/schemas/InterleavedContent' + description: >- + The content of the message, which can include text and other media + context: + $ref: '#/components/schemas/InterleavedContent' + description: >- + (Optional) This field is used internally by Llama Stack to pass RAG context. + This field may be removed in the API in the future. + additionalProperties: false + required: + - role + - content + title: UserMessage + description: >- + A message from the user in a chat conversation. + RunShieldRequest: + type: object + properties: + shield_id: + type: string + description: The identifier of the shield to run. + messages: + type: array + items: + $ref: '#/components/schemas/Message' + description: The messages to run the shield on. + params: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: The parameters of the shield. + additionalProperties: false + required: + - shield_id + - messages + - params + title: RunShieldRequest + RunShieldResponse: + type: object + properties: + violation: + $ref: '#/components/schemas/SafetyViolation' + description: >- + (Optional) Safety violation detected by the shield, if any + additionalProperties: false + title: RunShieldResponse + description: Response from running a safety shield. + SafetyViolation: + type: object + properties: + violation_level: + $ref: '#/components/schemas/ViolationLevel' + description: Severity level of the violation + user_message: + type: string + description: >- + (Optional) Message to convey to the user about the violation + metadata: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + Additional metadata including specific violation codes for debugging and + telemetry + additionalProperties: false + required: + - violation_level + - metadata + title: SafetyViolation + description: >- + Details of a safety violation detected by content moderation. + ViolationLevel: + type: string + enum: + - info + - warn + - error + title: ViolationLevel + description: Severity level of a safety violation. + AgentTurnInputType: + type: object + properties: + type: + type: string + const: agent_turn_input + default: agent_turn_input + description: >- + Discriminator type. Always "agent_turn_input" + additionalProperties: false + required: + - type + title: AgentTurnInputType + description: Parameter type for agent turn input. + AggregationFunctionType: + type: string + enum: + - average + - weighted_average + - median + - categorical_count + - accuracy + title: AggregationFunctionType + description: >- + Types of aggregation functions for scoring results. + ArrayType: + type: object + properties: + type: + type: string + const: array + default: array + description: Discriminator type. Always "array" + additionalProperties: false + required: + - type + title: ArrayType + description: Parameter type for array values. + BasicScoringFnParams: + type: object + properties: + type: + $ref: '#/components/schemas/ScoringFnParamsType' + const: basic + default: basic + description: >- + The type of scoring function parameters, always basic + aggregation_functions: + type: array + items: + $ref: '#/components/schemas/AggregationFunctionType' + description: >- + Aggregation functions to apply to the scores of each row + additionalProperties: false + required: + - type + - aggregation_functions + title: BasicScoringFnParams + description: >- + Parameters for basic scoring function configuration. + BooleanType: + type: object + properties: + type: + type: string + const: boolean + default: boolean + description: Discriminator type. Always "boolean" + additionalProperties: false + required: + - type + title: BooleanType + description: Parameter type for boolean values. + ChatCompletionInputType: + type: object + properties: + type: + type: string + const: chat_completion_input + default: chat_completion_input + description: >- + Discriminator type. Always "chat_completion_input" + additionalProperties: false + required: + - type + title: ChatCompletionInputType + description: >- + Parameter type for chat completion input. + CompletionInputType: + type: object + properties: + type: + type: string + const: completion_input + default: completion_input + description: >- + Discriminator type. Always "completion_input" + additionalProperties: false + required: + - type + title: CompletionInputType + description: Parameter type for completion input. + JsonType: + type: object + properties: + type: + type: string + const: json + default: json + description: Discriminator type. Always "json" + additionalProperties: false + required: + - type + title: JsonType + description: Parameter type for JSON values. + LLMAsJudgeScoringFnParams: + type: object + properties: + type: + $ref: '#/components/schemas/ScoringFnParamsType' + const: llm_as_judge + default: llm_as_judge + description: >- + The type of scoring function parameters, always llm_as_judge + judge_model: + type: string + description: >- + Identifier of the LLM model to use as a judge for scoring + prompt_template: + type: string + description: >- + (Optional) Custom prompt template for the judge model + judge_score_regexes: + type: array + items: + type: string + description: >- + Regexes to extract the answer from generated response + aggregation_functions: + type: array + items: + $ref: '#/components/schemas/AggregationFunctionType' + description: >- + Aggregation functions to apply to the scores of each row + additionalProperties: false + required: + - type + - judge_model + - judge_score_regexes + - aggregation_functions + title: LLMAsJudgeScoringFnParams + description: >- + Parameters for LLM-as-judge scoring function configuration. + NumberType: + type: object + properties: + type: + type: string + const: number + default: number + description: Discriminator type. Always "number" + additionalProperties: false + required: + - type + title: NumberType + description: Parameter type for numeric values. + ObjectType: + type: object + properties: + type: + type: string + const: object + default: object + description: Discriminator type. Always "object" + additionalProperties: false + required: + - type + title: ObjectType + description: Parameter type for object values. + RegexParserScoringFnParams: + type: object + properties: + type: + $ref: '#/components/schemas/ScoringFnParamsType' + const: regex_parser + default: regex_parser + description: >- + The type of scoring function parameters, always regex_parser + parsing_regexes: + type: array + items: + type: string + description: >- + Regex to extract the answer from generated response + aggregation_functions: + type: array + items: + $ref: '#/components/schemas/AggregationFunctionType' + description: >- + Aggregation functions to apply to the scores of each row + additionalProperties: false + required: + - type + - parsing_regexes + - aggregation_functions + title: RegexParserScoringFnParams + description: >- + Parameters for regex parser scoring function configuration. + ScoringFn: + type: object + properties: + identifier: + type: string + provider_resource_id: + type: string + provider_id: + type: string + type: + type: string + enum: + - model + - shield + - vector_db + - dataset + - scoring_function + - benchmark + - tool + - tool_group + - prompt + const: scoring_function + default: scoring_function + description: >- + The resource type, always scoring_function + description: + type: string + metadata: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + return_type: + oneOf: + - $ref: '#/components/schemas/StringType' + - $ref: '#/components/schemas/NumberType' + - $ref: '#/components/schemas/BooleanType' + - $ref: '#/components/schemas/ArrayType' + - $ref: '#/components/schemas/ObjectType' + - $ref: '#/components/schemas/JsonType' + - $ref: '#/components/schemas/UnionType' + - $ref: '#/components/schemas/ChatCompletionInputType' + - $ref: '#/components/schemas/CompletionInputType' + - $ref: '#/components/schemas/AgentTurnInputType' + discriminator: + propertyName: type + mapping: + string: '#/components/schemas/StringType' + number: '#/components/schemas/NumberType' + boolean: '#/components/schemas/BooleanType' + array: '#/components/schemas/ArrayType' + object: '#/components/schemas/ObjectType' + json: '#/components/schemas/JsonType' + union: '#/components/schemas/UnionType' + chat_completion_input: '#/components/schemas/ChatCompletionInputType' + completion_input: '#/components/schemas/CompletionInputType' + agent_turn_input: '#/components/schemas/AgentTurnInputType' + params: + $ref: '#/components/schemas/ScoringFnParams' + additionalProperties: false + required: + - identifier + - provider_id + - type + - metadata + - return_type + title: ScoringFn + description: >- + A scoring function resource for evaluating model outputs. + ScoringFnParams: + oneOf: + - $ref: '#/components/schemas/LLMAsJudgeScoringFnParams' + - $ref: '#/components/schemas/RegexParserScoringFnParams' + - $ref: '#/components/schemas/BasicScoringFnParams' + discriminator: + propertyName: type + mapping: + llm_as_judge: '#/components/schemas/LLMAsJudgeScoringFnParams' + regex_parser: '#/components/schemas/RegexParserScoringFnParams' + basic: '#/components/schemas/BasicScoringFnParams' + ScoringFnParamsType: + type: string + enum: + - llm_as_judge + - regex_parser + - basic + title: ScoringFnParamsType + description: >- + Types of scoring function parameter configurations. + StringType: + type: object + properties: + type: + type: string + const: string + default: string + description: Discriminator type. Always "string" + additionalProperties: false + required: + - type + title: StringType + description: Parameter type for string values. + UnionType: + type: object + properties: + type: + type: string + const: union + default: union + description: Discriminator type. Always "union" + additionalProperties: false + required: + - type + title: UnionType + description: Parameter type for union values. + ListScoringFunctionsResponse: + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/ScoringFn' + additionalProperties: false + required: + - data + title: ListScoringFunctionsResponse + ParamType: + oneOf: + - $ref: '#/components/schemas/StringType' + - $ref: '#/components/schemas/NumberType' + - $ref: '#/components/schemas/BooleanType' + - $ref: '#/components/schemas/ArrayType' + - $ref: '#/components/schemas/ObjectType' + - $ref: '#/components/schemas/JsonType' + - $ref: '#/components/schemas/UnionType' + - $ref: '#/components/schemas/ChatCompletionInputType' + - $ref: '#/components/schemas/CompletionInputType' + - $ref: '#/components/schemas/AgentTurnInputType' + discriminator: + propertyName: type + mapping: + string: '#/components/schemas/StringType' + number: '#/components/schemas/NumberType' + boolean: '#/components/schemas/BooleanType' + array: '#/components/schemas/ArrayType' + object: '#/components/schemas/ObjectType' + json: '#/components/schemas/JsonType' + union: '#/components/schemas/UnionType' + chat_completion_input: '#/components/schemas/ChatCompletionInputType' + completion_input: '#/components/schemas/CompletionInputType' + agent_turn_input: '#/components/schemas/AgentTurnInputType' + RegisterScoringFunctionRequest: + type: object + properties: + scoring_fn_id: + type: string + description: >- + The ID of the scoring function to register. + description: + type: string + description: The description of the scoring function. + return_type: + $ref: '#/components/schemas/ParamType' + description: The return type of the scoring function. + provider_scoring_fn_id: + type: string + description: >- + The ID of the provider scoring function to use for the scoring function. + provider_id: + type: string + description: >- + The ID of the provider to use for the scoring function. + params: + $ref: '#/components/schemas/ScoringFnParams' + description: >- + The parameters for the scoring function for benchmark eval, these can + be overridden for app eval. + additionalProperties: false + required: + - scoring_fn_id + - description + - return_type + title: RegisterScoringFunctionRequest + ScoreRequest: + type: object + properties: + input_rows: + type: array + items: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: The rows to score. + scoring_functions: + type: object + additionalProperties: + oneOf: + - $ref: '#/components/schemas/ScoringFnParams' + - type: 'null' + description: >- + The scoring functions to use for the scoring. + additionalProperties: false + required: + - input_rows + - scoring_functions + title: ScoreRequest + ScoreResponse: + type: object + properties: + results: + type: object + additionalProperties: + $ref: '#/components/schemas/ScoringResult' + description: >- + A map of scoring function name to ScoringResult. + additionalProperties: false + required: + - results + title: ScoreResponse + description: The response from scoring. + ScoringResult: + type: object + properties: + score_rows: + type: array + items: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + The scoring result for each row. Each row is a map of column name to value. + aggregated_results: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: Map of metric name to aggregated value + additionalProperties: false + required: + - score_rows + - aggregated_results + title: ScoringResult + description: A scoring result for a single row. + ScoreBatchRequest: + type: object + properties: + dataset_id: + type: string + description: The ID of the dataset to score. + scoring_functions: + type: object + additionalProperties: + oneOf: + - $ref: '#/components/schemas/ScoringFnParams' + - type: 'null' + description: >- + The scoring functions to use for the scoring. + save_results_dataset: + type: boolean + description: >- + Whether to save the results to a dataset. + additionalProperties: false + required: + - dataset_id + - scoring_functions + - save_results_dataset + title: ScoreBatchRequest + ScoreBatchResponse: + type: object + properties: + dataset_id: + type: string + description: >- + (Optional) The identifier of the dataset that was scored + results: + type: object + additionalProperties: + $ref: '#/components/schemas/ScoringResult' + description: >- + A map of scoring function name to ScoringResult + additionalProperties: false + required: + - results + title: ScoreBatchResponse + description: >- + Response from batch scoring operations on datasets. + Shield: + type: object + properties: + identifier: + type: string + provider_resource_id: + type: string + provider_id: + type: string + type: + type: string + enum: + - model + - shield + - vector_db + - dataset + - scoring_function + - benchmark + - tool + - tool_group + - prompt + const: shield + default: shield + description: The resource type, always shield + params: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) Configuration parameters for the shield + additionalProperties: false + required: + - identifier + - provider_id + - type + title: Shield + description: >- + A safety shield resource that can be used to check content. + ListShieldsResponse: + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/Shield' + additionalProperties: false + required: + - data + title: ListShieldsResponse + RegisterShieldRequest: + type: object + properties: + shield_id: + type: string + description: >- + The identifier of the shield to register. + provider_shield_id: + type: string + description: >- + The identifier of the shield in the provider. + provider_id: + type: string + description: The identifier of the provider. + params: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: The parameters of the shield. + additionalProperties: false + required: + - shield_id + title: RegisterShieldRequest + SyntheticDataGenerateRequest: + type: object + properties: + dialogs: + type: array + items: + $ref: '#/components/schemas/Message' + description: >- + List of conversation messages to use as input for synthetic data generation + filtering_function: + type: string + enum: + - none + - random + - top_k + - top_p + - top_k_top_p + - sigmoid + description: >- + Type of filtering to apply to generated synthetic data samples + model: + type: string + description: >- + (Optional) The identifier of the model to use. The model must be registered + with Llama Stack and available via the /models endpoint + additionalProperties: false + required: + - dialogs + - filtering_function + title: SyntheticDataGenerateRequest + SyntheticDataGenerationResponse: + type: object + properties: + synthetic_data: + type: array + items: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + List of generated synthetic data samples that passed the filtering criteria + statistics: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) Statistical information about the generation process and filtering + results + additionalProperties: false + required: + - synthetic_data + title: SyntheticDataGenerationResponse + description: >- + Response from the synthetic data generation. Batch of (prompt, response, score) + tuples that pass the threshold. + Event: + oneOf: + - $ref: '#/components/schemas/UnstructuredLogEvent' + - $ref: '#/components/schemas/MetricEvent' + - $ref: '#/components/schemas/StructuredLogEvent' + discriminator: + propertyName: type + mapping: + unstructured_log: '#/components/schemas/UnstructuredLogEvent' + metric: '#/components/schemas/MetricEvent' + structured_log: '#/components/schemas/StructuredLogEvent' + EventType: + type: string + enum: + - unstructured_log + - structured_log + - metric + title: EventType + description: >- + The type of telemetry event being logged. + LogSeverity: + type: string + enum: + - verbose + - debug + - info + - warn + - error + - critical + title: LogSeverity + description: The severity level of a log message. + MetricEvent: + type: object + properties: + trace_id: + type: string + description: >- + Unique identifier for the trace this event belongs to + span_id: + type: string + description: >- + Unique identifier for the span this event belongs to + timestamp: + type: string + format: date-time + description: Timestamp when the event occurred + attributes: + type: object + additionalProperties: + oneOf: + - type: string + - type: integer + - type: number + - type: boolean + - type: 'null' + description: >- + (Optional) Key-value pairs containing additional metadata about the event + type: + $ref: '#/components/schemas/EventType' + const: metric + default: metric + description: Event type identifier set to METRIC + metric: + type: string + description: The name of the metric being measured + value: + oneOf: + - type: integer + - type: number + description: >- + The numeric value of the metric measurement + unit: + type: string + description: >- + The unit of measurement for the metric value + additionalProperties: false + required: + - trace_id + - span_id + - timestamp + - type + - metric + - value + - unit + title: MetricEvent + description: >- + A metric event containing a measured value. + SpanEndPayload: + type: object + properties: + type: + $ref: '#/components/schemas/StructuredLogType' + const: span_end + default: span_end + description: Payload type identifier set to SPAN_END + status: + $ref: '#/components/schemas/SpanStatus' + description: >- + The final status of the span indicating success or failure + additionalProperties: false + required: + - type + - status + title: SpanEndPayload + description: Payload for a span end event. + SpanStartPayload: + type: object + properties: + type: + $ref: '#/components/schemas/StructuredLogType' + const: span_start + default: span_start + description: >- + Payload type identifier set to SPAN_START + name: + type: string + description: >- + Human-readable name describing the operation this span represents + parent_span_id: + type: string + description: >- + (Optional) Unique identifier for the parent span, if this is a child span + additionalProperties: false + required: + - type + - name + title: SpanStartPayload + description: Payload for a span start event. + SpanStatus: + type: string + enum: + - ok + - error + title: SpanStatus + description: >- + The status of a span indicating whether it completed successfully or with + an error. + StructuredLogEvent: + type: object + properties: + trace_id: + type: string + description: >- + Unique identifier for the trace this event belongs to + span_id: + type: string + description: >- + Unique identifier for the span this event belongs to + timestamp: + type: string + format: date-time + description: Timestamp when the event occurred + attributes: + type: object + additionalProperties: + oneOf: + - type: string + - type: integer + - type: number + - type: boolean + - type: 'null' + description: >- + (Optional) Key-value pairs containing additional metadata about the event + type: + $ref: '#/components/schemas/EventType' + const: structured_log + default: structured_log + description: >- + Event type identifier set to STRUCTURED_LOG + payload: + oneOf: + - $ref: '#/components/schemas/SpanStartPayload' + - $ref: '#/components/schemas/SpanEndPayload' + discriminator: + propertyName: type + mapping: + span_start: '#/components/schemas/SpanStartPayload' + span_end: '#/components/schemas/SpanEndPayload' + description: >- + The structured payload data for the log event + additionalProperties: false + required: + - trace_id + - span_id + - timestamp + - type + - payload + title: StructuredLogEvent + description: >- + A structured log event containing typed payload data. + StructuredLogType: + type: string + enum: + - span_start + - span_end + title: StructuredLogType + description: >- + The type of structured log event payload. + UnstructuredLogEvent: + type: object + properties: + trace_id: + type: string + description: >- + Unique identifier for the trace this event belongs to + span_id: + type: string + description: >- + Unique identifier for the span this event belongs to + timestamp: + type: string + format: date-time + description: Timestamp when the event occurred + attributes: + type: object + additionalProperties: + oneOf: + - type: string + - type: integer + - type: number + - type: boolean + - type: 'null' + description: >- + (Optional) Key-value pairs containing additional metadata about the event + type: + $ref: '#/components/schemas/EventType' + const: unstructured_log + default: unstructured_log + description: >- + Event type identifier set to UNSTRUCTURED_LOG + message: + type: string + description: The log message text + severity: + $ref: '#/components/schemas/LogSeverity' + description: The severity level of the log message + additionalProperties: false + required: + - trace_id + - span_id + - timestamp + - type + - message + - severity + title: UnstructuredLogEvent + description: >- + An unstructured log event containing a simple text message. + LogEventRequest: + type: object + properties: + event: + $ref: '#/components/schemas/Event' + description: The event to log. + ttl_seconds: + type: integer + description: The time to live of the event. + additionalProperties: false + required: + - event + - ttl_seconds + title: LogEventRequest + InvokeToolRequest: + type: object + properties: + tool_name: + type: string + description: The name of the tool to invoke. + kwargs: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + A dictionary of arguments to pass to the tool. + additionalProperties: false + required: + - tool_name + - kwargs + title: InvokeToolRequest + ToolInvocationResult: + type: object + properties: + content: + $ref: '#/components/schemas/InterleavedContent' + description: >- + (Optional) The output content from the tool execution + error_message: + type: string + description: >- + (Optional) Error message if the tool execution failed + error_code: + type: integer + description: >- + (Optional) Numeric error code if the tool execution failed + metadata: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) Additional metadata about the tool execution + additionalProperties: false + title: ToolInvocationResult + description: Result of a tool invocation. + ToolDef: + type: object + properties: + name: + type: string + description: Name of the tool + description: + type: string + description: >- + (Optional) Human-readable description of what the tool does + parameters: + type: array + items: + $ref: '#/components/schemas/ToolParameter' + description: >- + (Optional) List of parameters this tool accepts + metadata: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) Additional metadata about the tool + additionalProperties: false + required: + - name + title: ToolDef + description: >- + Tool definition used in runtime contexts. + ToolParameter: + type: object + properties: + name: + type: string + description: Name of the parameter + parameter_type: + type: string + description: >- + Type of the parameter (e.g., string, integer) + description: + type: string + description: >- + Human-readable description of what the parameter does + required: + type: boolean + default: true + description: >- + Whether this parameter is required for tool invocation + items: + type: object + description: >- + Type of the elements when parameter_type is array + title: + type: string + description: (Optional) Title of the parameter + default: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) Default value for the parameter if not provided + additionalProperties: false + required: + - name + - parameter_type + - description + - required + title: ToolParameter + description: Parameter definition for a tool. + ListToolDefsResponse: + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/ToolDef' + description: List of tool definitions + additionalProperties: false + required: + - data + title: ListToolDefsResponse + description: >- + Response containing a list of tool definitions. + RAGDocument: + type: object + properties: + document_id: + type: string + description: The unique identifier for the document. + content: + oneOf: + - type: string + - $ref: '#/components/schemas/InterleavedContentItem' + - type: array + items: + $ref: '#/components/schemas/InterleavedContentItem' + - $ref: '#/components/schemas/URL' + description: The content of the document. + mime_type: + type: string + description: The MIME type of the document. + metadata: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: Additional metadata for the document. + additionalProperties: false + required: + - document_id + - content + - metadata + title: RAGDocument + description: >- + A document to be used for document ingestion in the RAG Tool. + InsertRequest: + type: object + properties: + documents: + type: array + items: + $ref: '#/components/schemas/RAGDocument' + description: >- + List of documents to index in the RAG system + vector_db_id: + type: string + description: >- + ID of the vector database to store the document embeddings + chunk_size_in_tokens: + type: integer + description: >- + (Optional) Size in tokens for document chunking during indexing + additionalProperties: false + required: + - documents + - vector_db_id + - chunk_size_in_tokens + title: InsertRequest + DefaultRAGQueryGeneratorConfig: + type: object + properties: + type: + type: string + const: default + default: default + description: >- + Type of query generator, always 'default' + separator: + type: string + default: ' ' + description: >- + String separator used to join query terms + additionalProperties: false + required: + - type + - separator + title: DefaultRAGQueryGeneratorConfig + description: >- + Configuration for the default RAG query generator. + LLMRAGQueryGeneratorConfig: + type: object + properties: + type: + type: string + const: llm + default: llm + description: Type of query generator, always 'llm' + model: + type: string + description: >- + Name of the language model to use for query generation + template: + type: string + description: >- + Template string for formatting the query generation prompt + additionalProperties: false + required: + - type + - model + - template + title: LLMRAGQueryGeneratorConfig + description: >- + Configuration for the LLM-based RAG query generator. + RAGQueryConfig: + type: object + properties: + query_generator_config: + oneOf: + - $ref: '#/components/schemas/DefaultRAGQueryGeneratorConfig' + - $ref: '#/components/schemas/LLMRAGQueryGeneratorConfig' + discriminator: + propertyName: type + mapping: + default: '#/components/schemas/DefaultRAGQueryGeneratorConfig' + llm: '#/components/schemas/LLMRAGQueryGeneratorConfig' + description: Configuration for the query generator. + max_tokens_in_context: + type: integer + default: 4096 + description: Maximum number of tokens in the context. + max_chunks: + type: integer + default: 5 + description: Maximum number of chunks to retrieve. + chunk_template: + type: string + default: > + Result {index} + + Content: {chunk.content} + + Metadata: {metadata} + description: >- + Template for formatting each retrieved chunk in the context. Available + placeholders: {index} (1-based chunk ordinal), {chunk.content} (chunk + content string), {metadata} (chunk metadata dict). Default: "Result {index}\nContent: + {chunk.content}\nMetadata: {metadata}\n" + mode: + $ref: '#/components/schemas/RAGSearchMode' + default: vector + description: >- + Search mode for retrieval—either "vector", "keyword", or "hybrid". Default + "vector". + ranker: + $ref: '#/components/schemas/Ranker' + description: >- + Configuration for the ranker to use in hybrid search. Defaults to RRF + ranker. + additionalProperties: false + required: + - query_generator_config + - max_tokens_in_context + - max_chunks + - chunk_template + title: RAGQueryConfig + description: >- + Configuration for the RAG query generation. + RAGSearchMode: + type: string + enum: + - vector + - keyword + - hybrid + title: RAGSearchMode + description: >- + Search modes for RAG query retrieval: - VECTOR: Uses vector similarity search + for semantic matching - KEYWORD: Uses keyword-based search for exact matching + - HYBRID: Combines both vector and keyword search for better results + RRFRanker: + type: object + properties: + type: + type: string + const: rrf + default: rrf + description: The type of ranker, always "rrf" + impact_factor: + type: number + default: 60.0 + description: >- + The impact factor for RRF scoring. Higher values give more weight to higher-ranked + results. Must be greater than 0 + additionalProperties: false + required: + - type + - impact_factor + title: RRFRanker + description: >- + Reciprocal Rank Fusion (RRF) ranker configuration. + Ranker: + oneOf: + - $ref: '#/components/schemas/RRFRanker' + - $ref: '#/components/schemas/WeightedRanker' + discriminator: + propertyName: type + mapping: + rrf: '#/components/schemas/RRFRanker' + weighted: '#/components/schemas/WeightedRanker' + WeightedRanker: + type: object + properties: + type: + type: string + const: weighted + default: weighted + description: The type of ranker, always "weighted" + alpha: + type: number + default: 0.5 + description: >- + Weight factor between 0 and 1. 0 means only use keyword scores, 1 means + only use vector scores, values in between blend both scores. + additionalProperties: false + required: + - type + - alpha + title: WeightedRanker + description: >- + Weighted ranker configuration that combines vector and keyword scores. + QueryRequest: + type: object + properties: + content: + $ref: '#/components/schemas/InterleavedContent' + description: >- + The query content to search for in the indexed documents + vector_db_ids: + type: array + items: + type: string + description: >- + List of vector database IDs to search within + query_config: + $ref: '#/components/schemas/RAGQueryConfig' + description: >- + (Optional) Configuration parameters for the query operation + additionalProperties: false + required: + - content + - vector_db_ids + title: QueryRequest + RAGQueryResult: + type: object + properties: + content: + $ref: '#/components/schemas/InterleavedContent' + description: >- + (Optional) The retrieved content from the query + metadata: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + Additional metadata about the query result + additionalProperties: false + required: + - metadata + title: RAGQueryResult + description: >- + Result of a RAG query containing retrieved content and metadata. + ToolGroup: + type: object + properties: + identifier: + type: string + provider_resource_id: + type: string + provider_id: + type: string + type: + type: string + enum: + - model + - shield + - vector_db + - dataset + - scoring_function + - benchmark + - tool + - tool_group + - prompt + const: tool_group + default: tool_group + description: Type of resource, always 'tool_group' + mcp_endpoint: + $ref: '#/components/schemas/URL' + description: >- + (Optional) Model Context Protocol endpoint for remote tools + args: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) Additional arguments for the tool group + additionalProperties: false + required: + - identifier + - provider_id + - type + title: ToolGroup + description: >- + A group of related tools managed together. + ListToolGroupsResponse: + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/ToolGroup' + description: List of tool groups + additionalProperties: false + required: + - data + title: ListToolGroupsResponse + description: >- + Response containing a list of tool groups. + RegisterToolGroupRequest: + type: object + properties: + toolgroup_id: + type: string + description: The ID of the tool group to register. + provider_id: + type: string + description: >- + The ID of the provider to use for the tool group. + mcp_endpoint: + $ref: '#/components/schemas/URL' + description: >- + The MCP endpoint to use for the tool group. + args: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + A dictionary of arguments to pass to the tool group. + additionalProperties: false + required: + - toolgroup_id + - provider_id + title: RegisterToolGroupRequest + Tool: + type: object + properties: + identifier: + type: string + provider_resource_id: + type: string + provider_id: + type: string + type: + type: string + enum: + - model + - shield + - vector_db + - dataset + - scoring_function + - benchmark + - tool + - tool_group + - prompt + const: tool + default: tool + description: Type of resource, always 'tool' + toolgroup_id: + type: string + description: >- + ID of the tool group this tool belongs to + description: + type: string + description: >- + Human-readable description of what the tool does + parameters: + type: array + items: + $ref: '#/components/schemas/ToolParameter' + description: List of parameters this tool accepts + metadata: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) Additional metadata about the tool + additionalProperties: false + required: + - identifier + - provider_id + - type + - toolgroup_id + - description + - parameters + title: Tool + description: A tool that can be invoked by agents. + ListToolsResponse: + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/Tool' + description: List of tools + additionalProperties: false + required: + - data + title: ListToolsResponse + description: Response containing a list of tools. + VectorDB: + type: object + properties: + identifier: + type: string + provider_resource_id: + type: string + provider_id: + type: string + type: + type: string + enum: + - model + - shield + - vector_db + - dataset + - scoring_function + - benchmark + - tool + - tool_group + - prompt + const: vector_db + default: vector_db + description: >- + Type of resource, always 'vector_db' for vector databases + embedding_model: + type: string + description: >- + Name of the embedding model to use for vector generation + embedding_dimension: + type: integer + description: Dimension of the embedding vectors + vector_db_name: + type: string + additionalProperties: false + required: + - identifier + - provider_id + - type + - embedding_model + - embedding_dimension + title: VectorDB + description: >- + Vector database resource for storing and querying vector embeddings. + ListVectorDBsResponse: + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/VectorDB' + description: List of vector databases + additionalProperties: false + required: + - data + title: ListVectorDBsResponse + description: Response from listing vector databases. + RegisterVectorDbRequest: + type: object + properties: + vector_db_id: + type: string + description: >- + The identifier of the vector database to register. + embedding_model: + type: string + description: The embedding model to use. + embedding_dimension: + type: integer + description: The dimension of the embedding model. + provider_id: + type: string + description: The identifier of the provider. + vector_db_name: + type: string + description: The name of the vector database. + provider_vector_db_id: + type: string + description: >- + The identifier of the vector database in the provider. + additionalProperties: false + required: + - vector_db_id + - embedding_model + title: RegisterVectorDbRequest + Chunk: + type: object + properties: + content: + $ref: '#/components/schemas/InterleavedContent' + description: >- + The content of the chunk, which can be interleaved text, images, or other + types. + metadata: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + Metadata associated with the chunk that will be used in the model context + during inference. + embedding: + type: array + items: + type: number + description: >- + Optional embedding for the chunk. If not provided, it will be computed + later. + stored_chunk_id: + type: string + description: >- + The chunk ID that is stored in the vector database. Used for backend functionality. + chunk_metadata: + $ref: '#/components/schemas/ChunkMetadata' + description: >- + Metadata for the chunk that will NOT be used in the context during inference. + The `chunk_metadata` is required backend functionality. + additionalProperties: false + required: + - content + - metadata + title: Chunk + description: >- + A chunk of content that can be inserted into a vector database. + ChunkMetadata: + type: object + properties: + chunk_id: + type: string + description: >- + The ID of the chunk. If not set, it will be generated based on the document + ID and content. + document_id: + type: string + description: >- + The ID of the document this chunk belongs to. + source: + type: string + description: >- + The source of the content, such as a URL, file path, or other identifier. + created_timestamp: + type: integer + description: >- + An optional timestamp indicating when the chunk was created. + updated_timestamp: + type: integer + description: >- + An optional timestamp indicating when the chunk was last updated. + chunk_window: + type: string + description: >- + The window of the chunk, which can be used to group related chunks together. + chunk_tokenizer: + type: string + description: >- + The tokenizer used to create the chunk. Default is Tiktoken. + chunk_embedding_model: + type: string + description: >- + The embedding model used to create the chunk's embedding. + chunk_embedding_dimension: + type: integer + description: >- + The dimension of the embedding vector for the chunk. + content_token_count: + type: integer + description: >- + The number of tokens in the content of the chunk. + metadata_token_count: + type: integer + description: >- + The number of tokens in the metadata of the chunk. + additionalProperties: false + title: ChunkMetadata + description: >- + `ChunkMetadata` is backend metadata for a `Chunk` that is used to store additional + information about the chunk that will not be used in the context during + inference, but is required for backend functionality. The `ChunkMetadata` is + set during chunk creation in `MemoryToolRuntimeImpl().insert()`and is not + expected to change after. Use `Chunk.metadata` for metadata that will + be used in the context during inference. + InsertChunksRequest: + type: object + properties: + vector_db_id: + type: string + description: >- + The identifier of the vector database to insert the chunks into. + chunks: + type: array + items: + $ref: '#/components/schemas/Chunk' + description: >- + The chunks to insert. Each `Chunk` should contain content which can be + interleaved text, images, or other types. `metadata`: `dict[str, Any]` + and `embedding`: `List[float]` are optional. If `metadata` is provided, + you configure how Llama Stack formats the chunk during generation. If + `embedding` is not provided, it will be computed later. + ttl_seconds: + type: integer + description: The time to live of the chunks. + additionalProperties: false + required: + - vector_db_id + - chunks + title: InsertChunksRequest + QueryChunksRequest: + type: object + properties: + vector_db_id: + type: string + description: >- + The identifier of the vector database to query. + query: + $ref: '#/components/schemas/InterleavedContent' + description: The query to search for. + params: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: The parameters of the query. + additionalProperties: false + required: + - vector_db_id + - query + title: QueryChunksRequest + QueryChunksResponse: + type: object + properties: + chunks: + type: array + items: + $ref: '#/components/schemas/Chunk' + description: >- + List of content chunks returned from the query + scores: + type: array + items: + type: number + description: >- + Relevance scores corresponding to each returned chunk + additionalProperties: false + required: + - chunks + - scores + title: QueryChunksResponse + description: >- + Response from querying chunks in a vector database. + VectorStoreFileCounts: + type: object + properties: + completed: + type: integer + description: >- + Number of files that have been successfully processed + cancelled: + type: integer + description: >- + Number of files that had their processing cancelled + failed: + type: integer + description: Number of files that failed to process + in_progress: + type: integer + description: >- + Number of files currently being processed + total: + type: integer + description: >- + Total number of files in the vector store + additionalProperties: false + required: + - completed + - cancelled + - failed + - in_progress + - total + title: VectorStoreFileCounts + description: >- + File processing status counts for a vector store. + VectorStoreListResponse: + type: object + properties: + object: + type: string + default: list + description: Object type identifier, always "list" + data: + type: array + items: + $ref: '#/components/schemas/VectorStoreObject' + description: List of vector store objects + first_id: + type: string + description: >- + (Optional) ID of the first vector store in the list for pagination + last_id: + type: string + description: >- + (Optional) ID of the last vector store in the list for pagination + has_more: + type: boolean + default: false + description: >- + Whether there are more vector stores available beyond this page + additionalProperties: false + required: + - object + - data + - has_more + title: VectorStoreListResponse + description: Response from listing vector stores. + VectorStoreObject: + type: object + properties: + id: + type: string + description: Unique identifier for the vector store + object: + type: string + default: vector_store + description: >- + Object type identifier, always "vector_store" + created_at: + type: integer + description: >- + Timestamp when the vector store was created + name: + type: string + description: (Optional) Name of the vector store + usage_bytes: + type: integer + default: 0 + description: >- + Storage space used by the vector store in bytes + file_counts: + $ref: '#/components/schemas/VectorStoreFileCounts' + description: >- + File processing status counts for the vector store + status: + type: string + default: completed + description: Current status of the vector store + expires_after: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) Expiration policy for the vector store + expires_at: + type: integer + description: >- + (Optional) Timestamp when the vector store will expire + last_active_at: + type: integer + description: >- + (Optional) Timestamp of last activity on the vector store + metadata: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + Set of key-value pairs that can be attached to the vector store + additionalProperties: false + required: + - id + - object + - created_at + - usage_bytes + - file_counts + - status + - metadata + title: VectorStoreObject + description: OpenAI Vector Store object. + OpenaiCreateVectorStoreRequest: + type: object + properties: + name: + type: string + description: A name for the vector store. + file_ids: + type: array + items: + type: string + description: >- + A list of File IDs that the vector store should use. Useful for tools + like `file_search` that can access files. + expires_after: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + The expiration policy for a vector store. + chunking_strategy: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + The chunking strategy used to chunk the file(s). If not set, will use + the `auto` strategy. + metadata: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + Set of 16 key-value pairs that can be attached to an object. + embedding_model: + type: string + description: >- + The embedding model to use for this vector store. + embedding_dimension: + type: integer + description: >- + The dimension of the embedding vectors (default: 384). + provider_id: + type: string + description: >- + The ID of the provider to use for this vector store. + additionalProperties: false + title: OpenaiCreateVectorStoreRequest + OpenaiUpdateVectorStoreRequest: + type: object + properties: + name: + type: string + description: The name of the vector store. + expires_after: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + The expiration policy for a vector store. + metadata: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + Set of 16 key-value pairs that can be attached to an object. + additionalProperties: false + title: OpenaiUpdateVectorStoreRequest + VectorStoreDeleteResponse: + type: object + properties: + id: + type: string + description: >- + Unique identifier of the deleted vector store + object: + type: string + default: vector_store.deleted + description: >- + Object type identifier for the deletion response + deleted: + type: boolean + default: true + description: >- + Whether the deletion operation was successful + additionalProperties: false + required: + - id + - object + - deleted + title: VectorStoreDeleteResponse + description: Response from deleting a vector store. + VectorStoreChunkingStrategy: + oneOf: + - $ref: '#/components/schemas/VectorStoreChunkingStrategyAuto' + - $ref: '#/components/schemas/VectorStoreChunkingStrategyStatic' + discriminator: + propertyName: type + mapping: + auto: '#/components/schemas/VectorStoreChunkingStrategyAuto' + static: '#/components/schemas/VectorStoreChunkingStrategyStatic' + VectorStoreChunkingStrategyAuto: + type: object + properties: + type: + type: string + const: auto + default: auto + description: >- + Strategy type, always "auto" for automatic chunking + additionalProperties: false + required: + - type + title: VectorStoreChunkingStrategyAuto + description: >- + Automatic chunking strategy for vector store files. + VectorStoreChunkingStrategyStatic: + type: object + properties: + type: + type: string + const: static + default: static + description: >- + Strategy type, always "static" for static chunking + static: + $ref: '#/components/schemas/VectorStoreChunkingStrategyStaticConfig' + description: >- + Configuration parameters for the static chunking strategy + additionalProperties: false + required: + - type + - static + title: VectorStoreChunkingStrategyStatic + description: >- + Static chunking strategy with configurable parameters. + VectorStoreChunkingStrategyStaticConfig: + type: object + properties: + chunk_overlap_tokens: + type: integer + default: 400 + description: >- + Number of tokens to overlap between adjacent chunks + max_chunk_size_tokens: + type: integer + default: 800 + description: >- + Maximum number of tokens per chunk, must be between 100 and 4096 + additionalProperties: false + required: + - chunk_overlap_tokens + - max_chunk_size_tokens + title: VectorStoreChunkingStrategyStaticConfig + description: >- + Configuration for static chunking strategy. + OpenaiCreateVectorStoreFileBatchRequest: + type: object + properties: + file_ids: + type: array + items: + type: string + description: >- + A list of File IDs that the vector store should use. + attributes: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) Key-value attributes to store with the files. + chunking_strategy: + $ref: '#/components/schemas/VectorStoreChunkingStrategy' + description: >- + (Optional) The chunking strategy used to chunk the file(s). Defaults to + auto. + additionalProperties: false + required: + - file_ids + title: OpenaiCreateVectorStoreFileBatchRequest + VectorStoreFileBatchObject: + type: object + properties: + id: + type: string + description: Unique identifier for the file batch + object: + type: string + default: vector_store.file_batch + description: >- + Object type identifier, always "vector_store.file_batch" + created_at: + type: integer + description: >- + Timestamp when the file batch was created + vector_store_id: + type: string + description: >- + ID of the vector store containing the file batch + status: + $ref: '#/components/schemas/VectorStoreFileStatus' + description: >- + Current processing status of the file batch + file_counts: + $ref: '#/components/schemas/VectorStoreFileCounts' + description: >- + File processing status counts for the batch + additionalProperties: false + required: + - id + - object + - created_at + - vector_store_id + - status + - file_counts + title: VectorStoreFileBatchObject + description: OpenAI Vector Store File Batch object. + VectorStoreFileStatus: + oneOf: + - type: string + const: completed + - type: string + const: in_progress + - type: string + const: cancelled + - type: string + const: failed + VectorStoreFileLastError: + type: object + properties: + code: + oneOf: + - type: string + const: server_error + - type: string + const: rate_limit_exceeded + description: >- + Error code indicating the type of failure + message: + type: string + description: >- + Human-readable error message describing the failure + additionalProperties: false + required: + - code + - message + title: VectorStoreFileLastError + description: >- + Error information for failed vector store file processing. + VectorStoreFileObject: + type: object + properties: + id: + type: string + description: Unique identifier for the file + object: + type: string + default: vector_store.file + description: >- + Object type identifier, always "vector_store.file" + attributes: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + Key-value attributes associated with the file + chunking_strategy: + oneOf: + - $ref: '#/components/schemas/VectorStoreChunkingStrategyAuto' + - $ref: '#/components/schemas/VectorStoreChunkingStrategyStatic' + discriminator: + propertyName: type + mapping: + auto: '#/components/schemas/VectorStoreChunkingStrategyAuto' + static: '#/components/schemas/VectorStoreChunkingStrategyStatic' + description: >- + Strategy used for splitting the file into chunks + created_at: + type: integer + description: >- + Timestamp when the file was added to the vector store + last_error: + $ref: '#/components/schemas/VectorStoreFileLastError' + description: >- + (Optional) Error information if file processing failed + status: + $ref: '#/components/schemas/VectorStoreFileStatus' + description: Current processing status of the file + usage_bytes: + type: integer + default: 0 + description: Storage space used by this file in bytes + vector_store_id: + type: string + description: >- + ID of the vector store containing this file + additionalProperties: false + required: + - id + - object + - attributes + - chunking_strategy + - created_at + - status + - usage_bytes + - vector_store_id + title: VectorStoreFileObject + description: OpenAI Vector Store File object. + VectorStoreFilesListInBatchResponse: + type: object + properties: + object: + type: string + default: list + description: Object type identifier, always "list" + data: + type: array + items: + $ref: '#/components/schemas/VectorStoreFileObject' + description: >- + List of vector store file objects in the batch + first_id: + type: string + description: >- + (Optional) ID of the first file in the list for pagination + last_id: + type: string + description: >- + (Optional) ID of the last file in the list for pagination + has_more: + type: boolean + default: false + description: >- + Whether there are more files available beyond this page + additionalProperties: false + required: + - object + - data + - has_more + title: VectorStoreFilesListInBatchResponse + description: >- + Response from listing files in a vector store file batch. + VectorStoreListFilesResponse: + type: object + properties: + object: + type: string + default: list + description: Object type identifier, always "list" + data: + type: array + items: + $ref: '#/components/schemas/VectorStoreFileObject' + description: List of vector store file objects + first_id: + type: string + description: >- + (Optional) ID of the first file in the list for pagination + last_id: + type: string + description: >- + (Optional) ID of the last file in the list for pagination + has_more: + type: boolean + default: false + description: >- + Whether there are more files available beyond this page + additionalProperties: false + required: + - object + - data + - has_more + title: VectorStoreListFilesResponse + description: >- + Response from listing files in a vector store. + OpenaiAttachFileToVectorStoreRequest: + type: object + properties: + file_id: + type: string + description: >- + The ID of the file to attach to the vector store. + attributes: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + The key-value attributes stored with the file, which can be used for filtering. + chunking_strategy: + $ref: '#/components/schemas/VectorStoreChunkingStrategy' + description: >- + The chunking strategy to use for the file. + additionalProperties: false + required: + - file_id + title: OpenaiAttachFileToVectorStoreRequest + OpenaiUpdateVectorStoreFileRequest: + type: object + properties: + attributes: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + The updated key-value attributes to store with the file. + additionalProperties: false + required: + - attributes + title: OpenaiUpdateVectorStoreFileRequest + VectorStoreFileDeleteResponse: + type: object + properties: + id: + type: string + description: Unique identifier of the deleted file + object: + type: string + default: vector_store.file.deleted + description: >- + Object type identifier for the deletion response + deleted: + type: boolean + default: true + description: >- + Whether the deletion operation was successful + additionalProperties: false + required: + - id + - object + - deleted + title: VectorStoreFileDeleteResponse + description: >- + Response from deleting a vector store file. + VectorStoreContent: + type: object + properties: + type: + type: string + const: text + description: >- + Content type, currently only "text" is supported + text: + type: string + description: The actual text content + additionalProperties: false + required: + - type + - text + title: VectorStoreContent + description: >- + Content item from a vector store file or search result. + VectorStoreFileContentsResponse: + type: object + properties: + file_id: + type: string + description: Unique identifier for the file + filename: + type: string + description: Name of the file + attributes: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + Key-value attributes associated with the file + content: + type: array + items: + $ref: '#/components/schemas/VectorStoreContent' + description: List of content items from the file + additionalProperties: false + required: + - file_id + - filename + - attributes + - content + title: VectorStoreFileContentsResponse + description: >- + Response from retrieving the contents of a vector store file. + OpenaiSearchVectorStoreRequest: + type: object + properties: + query: + oneOf: + - type: string + - type: array + items: + type: string + description: >- + The query string or array for performing the search. + filters: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + Filters based on file attributes to narrow the search results. + max_num_results: + type: integer + description: >- + Maximum number of results to return (1 to 50 inclusive, default 10). + ranking_options: + type: object + properties: + ranker: + type: string + description: >- + (Optional) Name of the ranking algorithm to use + score_threshold: + type: number + default: 0.0 + description: >- + (Optional) Minimum relevance score threshold for results + additionalProperties: false + description: >- + Ranking options for fine-tuning the search results. + rewrite_query: + type: boolean + description: >- + Whether to rewrite the natural language query for vector search (default + false) + search_mode: + type: string + description: >- + The search mode to use - "keyword", "vector", or "hybrid" (default "vector") + additionalProperties: false + required: + - query + title: OpenaiSearchVectorStoreRequest + VectorStoreSearchResponse: + type: object + properties: + file_id: + type: string + description: >- + Unique identifier of the file containing the result + filename: + type: string + description: Name of the file containing the result + score: + type: number + description: Relevance score for this search result + attributes: + type: object + additionalProperties: + oneOf: + - type: string + - type: number + - type: boolean + description: >- + (Optional) Key-value attributes associated with the file + content: + type: array + items: + $ref: '#/components/schemas/VectorStoreContent' + description: >- + List of content items matching the search query + additionalProperties: false + required: + - file_id + - filename + - score + - content + title: VectorStoreSearchResponse + description: Response from searching a vector store. + VectorStoreSearchResponsePage: + type: object + properties: + object: + type: string + default: vector_store.search_results.page + description: >- + Object type identifier for the search results page + search_query: + type: string + description: >- + The original search query that was executed + data: + type: array + items: + $ref: '#/components/schemas/VectorStoreSearchResponse' + description: List of search result objects + has_more: + type: boolean + default: false + description: >- + Whether there are more results available beyond this page + next_page: + type: string + description: >- + (Optional) Token for retrieving the next page of results + additionalProperties: false + required: + - object + - search_query + - data + - has_more + title: VectorStoreSearchResponsePage + description: >- + Paginated response from searching a vector store. + VersionInfo: + type: object + properties: + version: + type: string + description: Version number of the service + additionalProperties: false + required: + - version + title: VersionInfo + description: Version information for the service. + AppendRowsRequest: + type: object + properties: + rows: + type: array + items: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: The rows to append to the dataset. + additionalProperties: false + required: + - rows + title: AppendRowsRequest + PaginatedResponse: + type: object + properties: + data: + type: array + items: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: The list of items for the current page + has_more: + type: boolean + description: >- + Whether there are more items available after this set + url: + type: string + description: The URL for accessing this list + additionalProperties: false + required: + - data + - has_more + title: PaginatedResponse + description: >- + A generic paginated response that follows a simple format. + Dataset: + type: object + properties: + identifier: + type: string + provider_resource_id: + type: string + provider_id: + type: string + type: + type: string + enum: + - model + - shield + - vector_db + - dataset + - scoring_function + - benchmark + - tool + - tool_group + - prompt + const: dataset + default: dataset + description: >- + Type of resource, always 'dataset' for datasets + purpose: + type: string + enum: + - post-training/messages + - eval/question-answer + - eval/messages-answer + description: >- + Purpose of the dataset indicating its intended use + source: + oneOf: + - $ref: '#/components/schemas/URIDataSource' + - $ref: '#/components/schemas/RowsDataSource' + discriminator: + propertyName: type + mapping: + uri: '#/components/schemas/URIDataSource' + rows: '#/components/schemas/RowsDataSource' + description: >- + Data source configuration for the dataset + metadata: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: Additional metadata for the dataset + additionalProperties: false + required: + - identifier + - provider_id + - type + - purpose + - source + - metadata + title: Dataset + description: >- + Dataset resource for storing and accessing training or evaluation data. + RowsDataSource: + type: object + properties: + type: + type: string + const: rows + default: rows + rows: + type: array + items: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + The dataset is stored in rows. E.g. - [ {"messages": [{"role": "user", + "content": "Hello, world!"}, {"role": "assistant", "content": "Hello, + world!"}]} ] + additionalProperties: false + required: + - type + - rows + title: RowsDataSource + description: A dataset stored in rows. + URIDataSource: + type: object + properties: + type: + type: string + const: uri + default: uri + uri: + type: string + description: >- + The dataset can be obtained from a URI. E.g. - "https://mywebsite.com/mydata.jsonl" + - "lsfs://mydata.jsonl" - "data:csv;base64,{base64_content}" + additionalProperties: false + required: + - type + - uri + title: URIDataSource + description: >- + A dataset that can be obtained from a URI. + ListDatasetsResponse: + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/Dataset' + description: List of datasets + additionalProperties: false + required: + - data + title: ListDatasetsResponse + description: Response from listing datasets. + DataSource: + oneOf: + - $ref: '#/components/schemas/URIDataSource' + - $ref: '#/components/schemas/RowsDataSource' + discriminator: + propertyName: type + mapping: + uri: '#/components/schemas/URIDataSource' + rows: '#/components/schemas/RowsDataSource' + RegisterDatasetRequest: + type: object + properties: + purpose: + type: string + enum: + - post-training/messages + - eval/question-answer + - eval/messages-answer + description: >- + The purpose of the dataset. One of: - "post-training/messages": The dataset + contains a messages column with list of messages for post-training. { + "messages": [ {"role": "user", "content": "Hello, world!"}, {"role": "assistant", + "content": "Hello, world!"}, ] } - "eval/question-answer": The dataset + contains a question column and an answer column for evaluation. { "question": + "What is the capital of France?", "answer": "Paris" } - "eval/messages-answer": + The dataset contains a messages column with list of messages and an answer + column for evaluation. { "messages": [ {"role": "user", "content": "Hello, + my name is John Doe."}, {"role": "assistant", "content": "Hello, John + Doe. How can I help you today?"}, {"role": "user", "content": "What's + my name?"}, ], "answer": "John Doe" } + source: + $ref: '#/components/schemas/DataSource' + description: >- + The data source of the dataset. Ensure that the data source schema is + compatible with the purpose of the dataset. Examples: - { "type": "uri", + "uri": "https://mywebsite.com/mydata.jsonl" } - { "type": "uri", "uri": + "lsfs://mydata.jsonl" } - { "type": "uri", "uri": "data:csv;base64,{base64_content}" + } - { "type": "uri", "uri": "huggingface://llamastack/simpleqa?split=train" + } - { "type": "rows", "rows": [ { "messages": [ {"role": "user", "content": + "Hello, world!"}, {"role": "assistant", "content": "Hello, world!"}, ] + } ] } + metadata: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + The metadata for the dataset. - E.g. {"description": "My dataset"}. + dataset_id: + type: string + description: >- + The ID of the dataset. If not provided, an ID will be generated. + additionalProperties: false + required: + - purpose + - source + title: RegisterDatasetRequest + AgentConfig: + type: object + properties: + sampling_params: + $ref: '#/components/schemas/SamplingParams' + input_shields: + type: array + items: + type: string + output_shields: + type: array + items: + type: string + toolgroups: + type: array + items: + $ref: '#/components/schemas/AgentTool' + client_tools: + type: array + items: + $ref: '#/components/schemas/ToolDef' + tool_choice: + type: string + enum: + - auto + - required + - none + title: ToolChoice + description: >- + Whether tool use is required or automatic. This is a hint to the model + which may not be followed. It depends on the Instruction Following capabilities + of the model. + deprecated: true + tool_prompt_format: + type: string + enum: + - json + - function_tag + - python_list + title: ToolPromptFormat + description: >- + Prompt format for calling custom / zero shot tools. + deprecated: true + tool_config: + $ref: '#/components/schemas/ToolConfig' + max_infer_iters: + type: integer + default: 10 + model: + type: string + description: >- + The model identifier to use for the agent + instructions: + type: string + description: The system instructions for the agent + name: + type: string + description: >- + Optional name for the agent, used in telemetry and identification + enable_session_persistence: + type: boolean + default: false + description: >- + Optional flag indicating whether session data has to be persisted + response_format: + $ref: '#/components/schemas/ResponseFormat' + description: Optional response format configuration + additionalProperties: false + required: + - model + - instructions + title: AgentConfig + description: Configuration for an agent. + AgentTool: + oneOf: + - type: string + - type: object + properties: + name: + type: string + args: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + additionalProperties: false + required: + - name + - args + title: AgentToolGroupWithArgs + GrammarResponseFormat: + type: object + properties: + type: + type: string + enum: + - json_schema + - grammar + description: >- + Must be "grammar" to identify this format type + const: grammar + default: grammar + bnf: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + The BNF grammar specification the response should conform to + additionalProperties: false + required: + - type + - bnf + title: GrammarResponseFormat + description: >- + Configuration for grammar-guided response generation. + GreedySamplingStrategy: + type: object + properties: + type: + type: string + const: greedy + default: greedy + description: >- + Must be "greedy" to identify this sampling strategy + additionalProperties: false + required: + - type + title: GreedySamplingStrategy + description: >- + Greedy sampling strategy that selects the highest probability token at each + step. + JsonSchemaResponseFormat: + type: object + properties: + type: + type: string + enum: + - json_schema + - grammar + description: >- + Must be "json_schema" to identify this format type + const: json_schema + default: json_schema + json_schema: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + The JSON schema the response should conform to. In a Python SDK, this + is often a `pydantic` model. + additionalProperties: false + required: + - type + - json_schema + title: JsonSchemaResponseFormat + description: >- + Configuration for JSON schema-guided response generation. + ResponseFormat: + oneOf: + - $ref: '#/components/schemas/JsonSchemaResponseFormat' + - $ref: '#/components/schemas/GrammarResponseFormat' + discriminator: + propertyName: type + mapping: + json_schema: '#/components/schemas/JsonSchemaResponseFormat' + grammar: '#/components/schemas/GrammarResponseFormat' + SamplingParams: + type: object + properties: + strategy: + oneOf: + - $ref: '#/components/schemas/GreedySamplingStrategy' + - $ref: '#/components/schemas/TopPSamplingStrategy' + - $ref: '#/components/schemas/TopKSamplingStrategy' + discriminator: + propertyName: type + mapping: + greedy: '#/components/schemas/GreedySamplingStrategy' + top_p: '#/components/schemas/TopPSamplingStrategy' + top_k: '#/components/schemas/TopKSamplingStrategy' + description: The sampling strategy. + max_tokens: + type: integer + default: 0 + description: >- + The maximum number of tokens that can be generated in the completion. + The token count of your prompt plus max_tokens cannot exceed the model's + context length. + repetition_penalty: + type: number + default: 1.0 + description: >- + Number between -2.0 and 2.0. Positive values penalize new tokens based + on whether they appear in the text so far, increasing the model's likelihood + to talk about new topics. + stop: + type: array + items: + type: string + description: >- + Up to 4 sequences where the API will stop generating further tokens. The + returned text will not contain the stop sequence. + additionalProperties: false + required: + - strategy + title: SamplingParams + description: Sampling parameters. + ToolConfig: + type: object + properties: + tool_choice: + oneOf: + - type: string + enum: + - auto + - required + - none + title: ToolChoice + description: >- + Whether tool use is required or automatic. This is a hint to the model + which may not be followed. It depends on the Instruction Following + capabilities of the model. + - type: string + default: auto + description: >- + (Optional) Whether tool use is automatic, required, or none. Can also + specify a tool name to use a specific tool. Defaults to ToolChoice.auto. + tool_prompt_format: + type: string + enum: + - json + - function_tag + - python_list + description: >- + (Optional) Instructs the model how to format tool calls. By default, Llama + Stack will attempt to use a format that is best adapted to the model. + - `ToolPromptFormat.json`: The tool calls are formatted as a JSON object. + - `ToolPromptFormat.function_tag`: The tool calls are enclosed in a + tag. - `ToolPromptFormat.python_list`: The tool calls are output as Python + syntax -- a list of function calls. + system_message_behavior: + type: string + enum: + - append + - replace + description: >- + (Optional) Config for how to override the default system prompt. - `SystemMessageBehavior.append`: + Appends the provided system message to the default system prompt. - `SystemMessageBehavior.replace`: + Replaces the default system prompt with the provided system message. The + system message can include the string '{{function_definitions}}' to indicate + where the function definitions should be inserted. + default: append + additionalProperties: false + title: ToolConfig + description: Configuration for tool use. + TopKSamplingStrategy: + type: object + properties: + type: + type: string + const: top_k + default: top_k + description: >- + Must be "top_k" to identify this sampling strategy + top_k: + type: integer + description: >- + Number of top tokens to consider for sampling. Must be at least 1 + additionalProperties: false + required: + - type + - top_k + title: TopKSamplingStrategy + description: >- + Top-k sampling strategy that restricts sampling to the k most likely tokens. + TopPSamplingStrategy: + type: object + properties: + type: + type: string + const: top_p + default: top_p + description: >- + Must be "top_p" to identify this sampling strategy + temperature: + type: number + description: >- + Controls randomness in sampling. Higher values increase randomness + top_p: + type: number + default: 0.95 + description: >- + Cumulative probability threshold for nucleus sampling. Defaults to 0.95 + additionalProperties: false + required: + - type + title: TopPSamplingStrategy + description: >- + Top-p (nucleus) sampling strategy that samples from the smallest set of tokens + with cumulative probability >= p. + CreateAgentRequest: + type: object + properties: + agent_config: + $ref: '#/components/schemas/AgentConfig' + description: The configuration for the agent. + additionalProperties: false + required: + - agent_config + title: CreateAgentRequest + AgentCreateResponse: + type: object + properties: + agent_id: + type: string + description: Unique identifier for the created agent + additionalProperties: false + required: + - agent_id + title: AgentCreateResponse + description: >- + Response returned when creating a new agent. + Agent: + type: object + properties: + agent_id: + type: string + description: Unique identifier for the agent + agent_config: + $ref: '#/components/schemas/AgentConfig' + description: Configuration settings for the agent + created_at: + type: string + format: date-time + description: Timestamp when the agent was created + additionalProperties: false + required: + - agent_id + - agent_config + - created_at + title: Agent + description: >- + An agent instance with configuration and metadata. + CreateAgentSessionRequest: + type: object + properties: + session_name: + type: string + description: The name of the session to create. + additionalProperties: false + required: + - session_name + title: CreateAgentSessionRequest + AgentSessionCreateResponse: + type: object + properties: + session_id: + type: string + description: >- + Unique identifier for the created session + additionalProperties: false + required: + - session_id + title: AgentSessionCreateResponse + description: >- + Response returned when creating a new agent session. + InferenceStep: + type: object + properties: + turn_id: + type: string + description: The ID of the turn. + step_id: + type: string + description: The ID of the step. + started_at: + type: string + format: date-time + description: The time the step started. + completed_at: + type: string + format: date-time + description: The time the step completed. + step_type: + type: string + enum: + - inference + - tool_execution + - shield_call + - memory_retrieval + title: StepType + description: Type of the step in an agent turn. + const: inference + default: inference + model_response: + $ref: '#/components/schemas/CompletionMessage' + description: The response from the LLM. + additionalProperties: false + required: + - turn_id + - step_id + - step_type + - model_response + title: InferenceStep + description: An inference step in an agent turn. + MemoryRetrievalStep: + type: object + properties: + turn_id: + type: string + description: The ID of the turn. + step_id: + type: string + description: The ID of the step. + started_at: + type: string + format: date-time + description: The time the step started. + completed_at: + type: string + format: date-time + description: The time the step completed. + step_type: + type: string + enum: + - inference + - tool_execution + - shield_call + - memory_retrieval + title: StepType + description: Type of the step in an agent turn. + const: memory_retrieval + default: memory_retrieval + vector_db_ids: + type: string + description: >- + The IDs of the vector databases to retrieve context from. + inserted_context: + $ref: '#/components/schemas/InterleavedContent' + description: >- + The context retrieved from the vector databases. + additionalProperties: false + required: + - turn_id + - step_id + - step_type + - vector_db_ids + - inserted_context + title: MemoryRetrievalStep + description: >- + A memory retrieval step in an agent turn. + Session: + type: object + properties: + session_id: + type: string + description: >- + Unique identifier for the conversation session + session_name: + type: string + description: Human-readable name for the session + turns: + type: array + items: + $ref: '#/components/schemas/Turn' + description: >- + List of all turns that have occurred in this session + started_at: + type: string + format: date-time + description: Timestamp when the session was created + additionalProperties: false + required: + - session_id + - session_name + - turns + - started_at + title: Session + description: >- + A single session of an interaction with an Agentic System. + ShieldCallStep: + type: object + properties: + turn_id: + type: string + description: The ID of the turn. + step_id: + type: string + description: The ID of the step. + started_at: + type: string + format: date-time + description: The time the step started. + completed_at: + type: string + format: date-time + description: The time the step completed. + step_type: + type: string + enum: + - inference + - tool_execution + - shield_call + - memory_retrieval + title: StepType + description: Type of the step in an agent turn. + const: shield_call + default: shield_call + violation: + $ref: '#/components/schemas/SafetyViolation' + description: The violation from the shield call. + additionalProperties: false + required: + - turn_id + - step_id + - step_type + title: ShieldCallStep + description: A shield call step in an agent turn. + ToolExecutionStep: + type: object + properties: + turn_id: + type: string + description: The ID of the turn. + step_id: + type: string + description: The ID of the step. + started_at: + type: string + format: date-time + description: The time the step started. + completed_at: + type: string + format: date-time + description: The time the step completed. + step_type: + type: string + enum: + - inference + - tool_execution + - shield_call + - memory_retrieval + title: StepType + description: Type of the step in an agent turn. + const: tool_execution + default: tool_execution + tool_calls: + type: array + items: + $ref: '#/components/schemas/ToolCall' + description: The tool calls to execute. + tool_responses: + type: array + items: + $ref: '#/components/schemas/ToolResponse' + description: The tool responses from the tool calls. + additionalProperties: false + required: + - turn_id + - step_id + - step_type + - tool_calls + - tool_responses + title: ToolExecutionStep + description: A tool execution step in an agent turn. + ToolResponse: + type: object + properties: + call_id: + type: string + description: >- + Unique identifier for the tool call this response is for + tool_name: + oneOf: + - type: string + enum: + - brave_search + - wolfram_alpha + - photogen + - code_interpreter + title: BuiltinTool + - type: string + description: Name of the tool that was invoked + content: + $ref: '#/components/schemas/InterleavedContent' + description: The response content from the tool + metadata: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) Additional metadata about the tool response + additionalProperties: false + required: + - call_id + - tool_name + - content + title: ToolResponse + description: Response from a tool invocation. + Turn: + type: object + properties: + turn_id: + type: string + description: >- + Unique identifier for the turn within a session + session_id: + type: string + description: >- + Unique identifier for the conversation session + input_messages: + type: array + items: + oneOf: + - $ref: '#/components/schemas/UserMessage' + - $ref: '#/components/schemas/ToolResponseMessage' + description: >- + List of messages that initiated this turn + steps: + type: array + items: + oneOf: + - $ref: '#/components/schemas/InferenceStep' + - $ref: '#/components/schemas/ToolExecutionStep' + - $ref: '#/components/schemas/ShieldCallStep' + - $ref: '#/components/schemas/MemoryRetrievalStep' + discriminator: + propertyName: step_type + mapping: + inference: '#/components/schemas/InferenceStep' + tool_execution: '#/components/schemas/ToolExecutionStep' + shield_call: '#/components/schemas/ShieldCallStep' + memory_retrieval: '#/components/schemas/MemoryRetrievalStep' + description: >- + Ordered list of processing steps executed during this turn + output_message: + $ref: '#/components/schemas/CompletionMessage' + description: >- + The model's generated response containing content and metadata + output_attachments: + type: array + items: + type: object + properties: + content: + oneOf: + - type: string + - $ref: '#/components/schemas/InterleavedContentItem' + - type: array + items: + $ref: '#/components/schemas/InterleavedContentItem' + - $ref: '#/components/schemas/URL' + description: The content of the attachment. + mime_type: + type: string + description: The MIME type of the attachment. + additionalProperties: false + required: + - content + - mime_type + title: Attachment + description: An attachment to an agent turn. + description: >- + (Optional) Files or media attached to the agent's response + started_at: + type: string + format: date-time + description: Timestamp when the turn began + completed_at: + type: string + format: date-time + description: >- + (Optional) Timestamp when the turn finished, if completed + additionalProperties: false + required: + - turn_id + - session_id + - input_messages + - steps + - output_message + - started_at + title: Turn + description: >- + A single turn in an interaction with an Agentic System. + CreateAgentTurnRequest: + type: object + properties: + messages: + type: array + items: + oneOf: + - $ref: '#/components/schemas/UserMessage' + - $ref: '#/components/schemas/ToolResponseMessage' + description: List of messages to start the turn with. + stream: + type: boolean + description: >- + (Optional) If True, generate an SSE event stream of the response. Defaults + to False. + documents: + type: array + items: + type: object + properties: + content: + oneOf: + - type: string + - $ref: '#/components/schemas/InterleavedContentItem' + - type: array + items: + $ref: '#/components/schemas/InterleavedContentItem' + - $ref: '#/components/schemas/URL' + description: The content of the document. + mime_type: + type: string + description: The MIME type of the document. + additionalProperties: false + required: + - content + - mime_type + title: Document + description: A document to be used by an agent. + description: >- + (Optional) List of documents to create the turn with. + toolgroups: + type: array + items: + $ref: '#/components/schemas/AgentTool' + description: >- + (Optional) List of toolgroups to create the turn with, will be used in + addition to the agent's config toolgroups for the request. + tool_config: + $ref: '#/components/schemas/ToolConfig' + description: >- + (Optional) The tool configuration to create the turn with, will be used + to override the agent's tool_config. + additionalProperties: false + required: + - messages + title: CreateAgentTurnRequest + AgentTurnResponseEvent: + type: object + properties: + payload: + oneOf: + - $ref: '#/components/schemas/AgentTurnResponseStepStartPayload' + - $ref: '#/components/schemas/AgentTurnResponseStepProgressPayload' + - $ref: '#/components/schemas/AgentTurnResponseStepCompletePayload' + - $ref: '#/components/schemas/AgentTurnResponseTurnStartPayload' + - $ref: '#/components/schemas/AgentTurnResponseTurnCompletePayload' + - $ref: '#/components/schemas/AgentTurnResponseTurnAwaitingInputPayload' + discriminator: + propertyName: event_type + mapping: + step_start: '#/components/schemas/AgentTurnResponseStepStartPayload' + step_progress: '#/components/schemas/AgentTurnResponseStepProgressPayload' + step_complete: '#/components/schemas/AgentTurnResponseStepCompletePayload' + turn_start: '#/components/schemas/AgentTurnResponseTurnStartPayload' + turn_complete: '#/components/schemas/AgentTurnResponseTurnCompletePayload' + turn_awaiting_input: '#/components/schemas/AgentTurnResponseTurnAwaitingInputPayload' + description: >- + Event-specific payload containing event data + additionalProperties: false + required: + - payload + title: AgentTurnResponseEvent + description: >- + An event in an agent turn response stream. + AgentTurnResponseStepCompletePayload: + type: object + properties: + event_type: + type: string + enum: + - step_start + - step_complete + - step_progress + - turn_start + - turn_complete + - turn_awaiting_input + const: step_complete + default: step_complete + description: Type of event being reported + step_type: + type: string + enum: + - inference + - tool_execution + - shield_call + - memory_retrieval + description: Type of step being executed + step_id: + type: string + description: >- + Unique identifier for the step within a turn + step_details: + oneOf: + - $ref: '#/components/schemas/InferenceStep' + - $ref: '#/components/schemas/ToolExecutionStep' + - $ref: '#/components/schemas/ShieldCallStep' + - $ref: '#/components/schemas/MemoryRetrievalStep' + discriminator: + propertyName: step_type + mapping: + inference: '#/components/schemas/InferenceStep' + tool_execution: '#/components/schemas/ToolExecutionStep' + shield_call: '#/components/schemas/ShieldCallStep' + memory_retrieval: '#/components/schemas/MemoryRetrievalStep' + description: Complete details of the executed step + additionalProperties: false + required: + - event_type + - step_type + - step_id + - step_details + title: AgentTurnResponseStepCompletePayload + description: >- + Payload for step completion events in agent turn responses. + AgentTurnResponseStepProgressPayload: + type: object + properties: + event_type: + type: string + enum: + - step_start + - step_complete + - step_progress + - turn_start + - turn_complete + - turn_awaiting_input + const: step_progress + default: step_progress + description: Type of event being reported + step_type: + type: string + enum: + - inference + - tool_execution + - shield_call + - memory_retrieval + description: Type of step being executed + step_id: + type: string + description: >- + Unique identifier for the step within a turn + delta: + oneOf: + - $ref: '#/components/schemas/TextDelta' + - $ref: '#/components/schemas/ImageDelta' + - $ref: '#/components/schemas/ToolCallDelta' + discriminator: + propertyName: type + mapping: + text: '#/components/schemas/TextDelta' + image: '#/components/schemas/ImageDelta' + tool_call: '#/components/schemas/ToolCallDelta' + description: >- + Incremental content changes during step execution + additionalProperties: false + required: + - event_type + - step_type + - step_id + - delta + title: AgentTurnResponseStepProgressPayload + description: >- + Payload for step progress events in agent turn responses. + AgentTurnResponseStepStartPayload: + type: object + properties: + event_type: + type: string + enum: + - step_start + - step_complete + - step_progress + - turn_start + - turn_complete + - turn_awaiting_input + const: step_start + default: step_start + description: Type of event being reported + step_type: + type: string + enum: + - inference + - tool_execution + - shield_call + - memory_retrieval + description: Type of step being executed + step_id: + type: string + description: >- + Unique identifier for the step within a turn + metadata: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) Additional metadata for the step + additionalProperties: false + required: + - event_type + - step_type + - step_id + title: AgentTurnResponseStepStartPayload + description: >- + Payload for step start events in agent turn responses. + AgentTurnResponseStreamChunk: + type: object + properties: + event: + $ref: '#/components/schemas/AgentTurnResponseEvent' + description: >- + Individual event in the agent turn response stream + additionalProperties: false + required: + - event + title: AgentTurnResponseStreamChunk + description: Streamed agent turn completion response. + "AgentTurnResponseTurnAwaitingInputPayload": + type: object + properties: + event_type: + type: string + enum: + - step_start + - step_complete + - step_progress + - turn_start + - turn_complete + - turn_awaiting_input + const: turn_awaiting_input + default: turn_awaiting_input + description: Type of event being reported + turn: + $ref: '#/components/schemas/Turn' + description: >- + Turn data when waiting for external tool responses + additionalProperties: false + required: + - event_type + - turn + title: >- + AgentTurnResponseTurnAwaitingInputPayload + description: >- + Payload for turn awaiting input events in agent turn responses. + AgentTurnResponseTurnCompletePayload: + type: object + properties: + event_type: + type: string + enum: + - step_start + - step_complete + - step_progress + - turn_start + - turn_complete + - turn_awaiting_input + const: turn_complete + default: turn_complete + description: Type of event being reported + turn: + $ref: '#/components/schemas/Turn' + description: >- + Complete turn data including all steps and results + additionalProperties: false + required: + - event_type + - turn + title: AgentTurnResponseTurnCompletePayload + description: >- + Payload for turn completion events in agent turn responses. + AgentTurnResponseTurnStartPayload: + type: object + properties: + event_type: + type: string + enum: + - step_start + - step_complete + - step_progress + - turn_start + - turn_complete + - turn_awaiting_input + const: turn_start + default: turn_start + description: Type of event being reported + turn_id: + type: string + description: >- + Unique identifier for the turn within a session + additionalProperties: false + required: + - event_type + - turn_id + title: AgentTurnResponseTurnStartPayload + description: >- + Payload for turn start events in agent turn responses. + ImageDelta: + type: object + properties: + type: + type: string + const: image + default: image + description: >- + Discriminator type of the delta. Always "image" + image: + type: string + contentEncoding: base64 + description: The incremental image data as bytes + additionalProperties: false + required: + - type + - image + title: ImageDelta + description: >- + An image content delta for streaming responses. + TextDelta: + type: object + properties: + type: + type: string + const: text + default: text + description: >- + Discriminator type of the delta. Always "text" + text: + type: string + description: The incremental text content + additionalProperties: false + required: + - type + - text + title: TextDelta + description: >- + A text content delta for streaming responses. + ToolCallDelta: + type: object + properties: + type: + type: string + const: tool_call + default: tool_call + description: >- + Discriminator type of the delta. Always "tool_call" + tool_call: + oneOf: + - type: string + - $ref: '#/components/schemas/ToolCall' + description: >- + Either an in-progress tool call string or the final parsed tool call + parse_status: + type: string + enum: + - started + - in_progress + - failed + - succeeded + description: Current parsing status of the tool call + additionalProperties: false + required: + - type + - tool_call + - parse_status + title: ToolCallDelta + description: >- + A tool call content delta for streaming responses. + ResumeAgentTurnRequest: + type: object + properties: + tool_responses: + type: array + items: + $ref: '#/components/schemas/ToolResponse' + description: >- + The tool call responses to resume the turn with. + stream: + type: boolean + description: Whether to stream the response. + additionalProperties: false + required: + - tool_responses + title: ResumeAgentTurnRequest + AgentStepResponse: + type: object + properties: + step: + oneOf: + - $ref: '#/components/schemas/InferenceStep' + - $ref: '#/components/schemas/ToolExecutionStep' + - $ref: '#/components/schemas/ShieldCallStep' + - $ref: '#/components/schemas/MemoryRetrievalStep' + discriminator: + propertyName: step_type + mapping: + inference: '#/components/schemas/InferenceStep' + tool_execution: '#/components/schemas/ToolExecutionStep' + shield_call: '#/components/schemas/ShieldCallStep' + memory_retrieval: '#/components/schemas/MemoryRetrievalStep' + description: >- + The complete step data and execution details + additionalProperties: false + required: + - step + title: AgentStepResponse + description: >- + Response containing details of a specific agent step. + Benchmark: + type: object + properties: + identifier: + type: string + provider_resource_id: + type: string + provider_id: + type: string + type: + type: string + enum: + - model + - shield + - vector_db + - dataset + - scoring_function + - benchmark + - tool + - tool_group + - prompt + const: benchmark + default: benchmark + description: The resource type, always benchmark + dataset_id: + type: string + description: >- + Identifier of the dataset to use for the benchmark evaluation + scoring_functions: + type: array + items: + type: string + description: >- + List of scoring function identifiers to apply during evaluation + metadata: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: Metadata for this evaluation task + additionalProperties: false + required: + - identifier + - provider_id + - type + - dataset_id + - scoring_functions + - metadata + title: Benchmark + description: >- + A benchmark resource for evaluating model performance. + ListBenchmarksResponse: + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/Benchmark' + additionalProperties: false + required: + - data + title: ListBenchmarksResponse + RegisterBenchmarkRequest: + type: object + properties: + benchmark_id: + type: string + description: The ID of the benchmark to register. + dataset_id: + type: string + description: >- + The ID of the dataset to use for the benchmark. + scoring_functions: + type: array + items: + type: string + description: >- + The scoring functions to use for the benchmark. + provider_benchmark_id: + type: string + description: >- + The ID of the provider benchmark to use for the benchmark. + provider_id: + type: string + description: >- + The ID of the provider to use for the benchmark. + metadata: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: The metadata to use for the benchmark. + additionalProperties: false + required: + - benchmark_id + - dataset_id + - scoring_functions + title: RegisterBenchmarkRequest + AgentCandidate: + type: object + properties: + type: + type: string + const: agent + default: agent + config: + $ref: '#/components/schemas/AgentConfig' + description: >- + The configuration for the agent candidate. + additionalProperties: false + required: + - type + - config + title: AgentCandidate + description: An agent candidate for evaluation. + BenchmarkConfig: + type: object + properties: + eval_candidate: + oneOf: + - $ref: '#/components/schemas/ModelCandidate' + - $ref: '#/components/schemas/AgentCandidate' + discriminator: + propertyName: type + mapping: + model: '#/components/schemas/ModelCandidate' + agent: '#/components/schemas/AgentCandidate' + description: The candidate to evaluate. + scoring_params: + type: object + additionalProperties: + $ref: '#/components/schemas/ScoringFnParams' + description: >- + Map between scoring function id and parameters for each scoring function + you want to run + num_examples: + type: integer + description: >- + (Optional) The number of examples to evaluate. If not provided, all examples + in the dataset will be evaluated + additionalProperties: false + required: + - eval_candidate + - scoring_params + title: BenchmarkConfig + description: >- + A benchmark configuration for evaluation. + ModelCandidate: + type: object + properties: + type: + type: string + const: model + default: model + model: + type: string + description: The model ID to evaluate. + sampling_params: + $ref: '#/components/schemas/SamplingParams' + description: The sampling parameters for the model. + system_message: + $ref: '#/components/schemas/SystemMessage' + description: >- + (Optional) The system message providing instructions or context to the + model. + additionalProperties: false + required: + - type + - model + - sampling_params + title: ModelCandidate + description: A model candidate for evaluation. + EvaluateRowsRequest: + type: object + properties: + input_rows: + type: array + items: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: The rows to evaluate. + scoring_functions: + type: array + items: + type: string + description: >- + The scoring functions to use for the evaluation. + benchmark_config: + $ref: '#/components/schemas/BenchmarkConfig' + description: The configuration for the benchmark. + additionalProperties: false + required: + - input_rows + - scoring_functions + - benchmark_config + title: EvaluateRowsRequest + EvaluateResponse: + type: object + properties: + generations: + type: array + items: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: The generations from the evaluation. + scores: + type: object + additionalProperties: + $ref: '#/components/schemas/ScoringResult' + description: The scores from the evaluation. + additionalProperties: false + required: + - generations + - scores + title: EvaluateResponse + description: The response from an evaluation. + RunEvalRequest: + type: object + properties: + benchmark_config: + $ref: '#/components/schemas/BenchmarkConfig' + description: The configuration for the benchmark. + additionalProperties: false + required: + - benchmark_config + title: RunEvalRequest + Job: + type: object + properties: + job_id: + type: string + description: Unique identifier for the job + status: + type: string + enum: + - completed + - in_progress + - failed + - scheduled + - cancelled + description: Current execution status of the job + additionalProperties: false + required: + - job_id + - status + title: Job + description: >- + A job execution instance with status tracking. + RerankRequest: + type: object + properties: + model: + type: string + description: >- + The identifier of the reranking model to use. + query: + oneOf: + - type: string + - $ref: '#/components/schemas/OpenAIChatCompletionContentPartTextParam' + - $ref: '#/components/schemas/OpenAIChatCompletionContentPartImageParam' + description: >- + The search query to rank items against. Can be a string, text content + part, or image content part. The input must not exceed the model's max + input token length. + items: + type: array + items: + oneOf: + - type: string + - $ref: '#/components/schemas/OpenAIChatCompletionContentPartTextParam' + - $ref: '#/components/schemas/OpenAIChatCompletionContentPartImageParam' + description: >- + List of items to rerank. Each item can be a string, text content part, + or image content part. Each input must not exceed the model's max input + token length. + max_num_results: + type: integer + description: >- + (Optional) Maximum number of results to return. Default: returns all. + additionalProperties: false + required: + - model + - query + - items + title: RerankRequest + RerankData: + type: object + properties: + index: + type: integer + description: >- + The original index of the document in the input list + relevance_score: + type: number + description: >- + The relevance score from the model output. Values are inverted when applicable + so that higher scores indicate greater relevance. + additionalProperties: false + required: + - index + - relevance_score + title: RerankData + description: >- + A single rerank result from a reranking response. + RerankResponse: + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/RerankData' + description: >- + List of rerank result objects, sorted by relevance score (descending) + additionalProperties: false + required: + - data + title: RerankResponse + description: Response from a reranking request. + Checkpoint: + type: object + properties: + identifier: + type: string + description: Unique identifier for the checkpoint + created_at: + type: string + format: date-time + description: >- + Timestamp when the checkpoint was created + epoch: + type: integer + description: >- + Training epoch when the checkpoint was saved + post_training_job_id: + type: string + description: >- + Identifier of the training job that created this checkpoint + path: + type: string + description: >- + File system path where the checkpoint is stored + training_metrics: + $ref: '#/components/schemas/PostTrainingMetric' + description: >- + (Optional) Training metrics associated with this checkpoint + additionalProperties: false + required: + - identifier + - created_at + - epoch + - post_training_job_id + - path + title: Checkpoint + description: Checkpoint created during training runs. + PostTrainingJobArtifactsResponse: + type: object + properties: + job_uuid: + type: string + description: Unique identifier for the training job + checkpoints: + type: array + items: + $ref: '#/components/schemas/Checkpoint' + description: >- + List of model checkpoints created during training + additionalProperties: false + required: + - job_uuid + - checkpoints + title: PostTrainingJobArtifactsResponse + description: Artifacts of a finetuning job. + PostTrainingMetric: + type: object + properties: + epoch: + type: integer + description: Training epoch number + train_loss: + type: number + description: Loss value on the training dataset + validation_loss: + type: number + description: Loss value on the validation dataset + perplexity: + type: number + description: >- + Perplexity metric indicating model confidence + additionalProperties: false + required: + - epoch + - train_loss + - validation_loss + - perplexity + title: PostTrainingMetric + description: >- + Training metrics captured during post-training jobs. + CancelTrainingJobRequest: + type: object + properties: + job_uuid: + type: string + description: The UUID of the job to cancel. + additionalProperties: false + required: + - job_uuid + title: CancelTrainingJobRequest + PostTrainingJobStatusResponse: + type: object + properties: + job_uuid: + type: string + description: Unique identifier for the training job + status: + type: string + enum: + - completed + - in_progress + - failed + - scheduled + - cancelled + description: Current status of the training job + scheduled_at: + type: string + format: date-time + description: >- + (Optional) Timestamp when the job was scheduled + started_at: + type: string + format: date-time + description: >- + (Optional) Timestamp when the job execution began + completed_at: + type: string + format: date-time + description: >- + (Optional) Timestamp when the job finished, if completed + resources_allocated: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) Information about computational resources allocated to the + job + checkpoints: + type: array + items: + $ref: '#/components/schemas/Checkpoint' + description: >- + List of model checkpoints created during training + additionalProperties: false + required: + - job_uuid + - status + - checkpoints + title: PostTrainingJobStatusResponse + description: Status of a finetuning job. + ListPostTrainingJobsResponse: + type: object + properties: + data: + type: array + items: + type: object + properties: + job_uuid: + type: string + additionalProperties: false + required: + - job_uuid + title: PostTrainingJob + additionalProperties: false + required: + - data + title: ListPostTrainingJobsResponse + DPOAlignmentConfig: + type: object + properties: + beta: + type: number + description: Temperature parameter for the DPO loss + loss_type: + $ref: '#/components/schemas/DPOLossType' + default: sigmoid + description: The type of loss function to use for DPO + additionalProperties: false + required: + - beta + - loss_type + title: DPOAlignmentConfig + description: >- + Configuration for Direct Preference Optimization (DPO) alignment. + DPOLossType: + type: string + enum: + - sigmoid + - hinge + - ipo + - kto_pair + title: DPOLossType + DataConfig: + type: object + properties: + dataset_id: + type: string + description: >- + Unique identifier for the training dataset + batch_size: + type: integer + description: Number of samples per training batch + shuffle: + type: boolean + description: >- + Whether to shuffle the dataset during training + data_format: + $ref: '#/components/schemas/DatasetFormat' + description: >- + Format of the dataset (instruct or dialog) + validation_dataset_id: + type: string + description: >- + (Optional) Unique identifier for the validation dataset + packed: + type: boolean + default: false + description: >- + (Optional) Whether to pack multiple samples into a single sequence for + efficiency + train_on_input: + type: boolean + default: false + description: >- + (Optional) Whether to compute loss on input tokens as well as output tokens + additionalProperties: false + required: + - dataset_id + - batch_size + - shuffle + - data_format + title: DataConfig + description: >- + Configuration for training data and data loading. + DatasetFormat: + type: string + enum: + - instruct + - dialog + title: DatasetFormat + description: Format of the training dataset. + EfficiencyConfig: + type: object + properties: + enable_activation_checkpointing: + type: boolean + default: false + description: >- + (Optional) Whether to use activation checkpointing to reduce memory usage + enable_activation_offloading: + type: boolean + default: false + description: >- + (Optional) Whether to offload activations to CPU to save GPU memory + memory_efficient_fsdp_wrap: + type: boolean + default: false + description: >- + (Optional) Whether to use memory-efficient FSDP wrapping + fsdp_cpu_offload: + type: boolean + default: false + description: >- + (Optional) Whether to offload FSDP parameters to CPU + additionalProperties: false + title: EfficiencyConfig + description: >- + Configuration for memory and compute efficiency optimizations. + OptimizerConfig: + type: object + properties: + optimizer_type: + $ref: '#/components/schemas/OptimizerType' + description: >- + Type of optimizer to use (adam, adamw, or sgd) + lr: + type: number + description: Learning rate for the optimizer + weight_decay: + type: number + description: >- + Weight decay coefficient for regularization + num_warmup_steps: + type: integer + description: Number of steps for learning rate warmup + additionalProperties: false + required: + - optimizer_type + - lr + - weight_decay + - num_warmup_steps + title: OptimizerConfig + description: >- + Configuration parameters for the optimization algorithm. + OptimizerType: + type: string + enum: + - adam + - adamw + - sgd + title: OptimizerType + description: >- + Available optimizer algorithms for training. + TrainingConfig: + type: object + properties: + n_epochs: + type: integer + description: Number of training epochs to run + max_steps_per_epoch: + type: integer + default: 1 + description: Maximum number of steps to run per epoch + gradient_accumulation_steps: + type: integer + default: 1 + description: >- + Number of steps to accumulate gradients before updating + max_validation_steps: + type: integer + default: 1 + description: >- + (Optional) Maximum number of validation steps per epoch + data_config: + $ref: '#/components/schemas/DataConfig' + description: >- + (Optional) Configuration for data loading and formatting + optimizer_config: + $ref: '#/components/schemas/OptimizerConfig' + description: >- + (Optional) Configuration for the optimization algorithm + efficiency_config: + $ref: '#/components/schemas/EfficiencyConfig' + description: >- + (Optional) Configuration for memory and compute optimizations + dtype: + type: string + default: bf16 + description: >- + (Optional) Data type for model parameters (bf16, fp16, fp32) + additionalProperties: false + required: + - n_epochs + - max_steps_per_epoch + - gradient_accumulation_steps + title: TrainingConfig + description: >- + Comprehensive configuration for the training process. + PreferenceOptimizeRequest: + type: object + properties: + job_uuid: + type: string + description: The UUID of the job to create. + finetuned_model: + type: string + description: The model to fine-tune. + algorithm_config: + $ref: '#/components/schemas/DPOAlignmentConfig' + description: The algorithm configuration. + training_config: + $ref: '#/components/schemas/TrainingConfig' + description: The training configuration. + hyperparam_search_config: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: The hyperparam search configuration. + logger_config: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: The logger configuration. + additionalProperties: false + required: + - job_uuid + - finetuned_model + - algorithm_config + - training_config + - hyperparam_search_config + - logger_config + title: PreferenceOptimizeRequest + PostTrainingJob: + type: object + properties: + job_uuid: + type: string + additionalProperties: false + required: + - job_uuid + title: PostTrainingJob + AlgorithmConfig: + oneOf: + - $ref: '#/components/schemas/LoraFinetuningConfig' + - $ref: '#/components/schemas/QATFinetuningConfig' + discriminator: + propertyName: type + mapping: + LoRA: '#/components/schemas/LoraFinetuningConfig' + QAT: '#/components/schemas/QATFinetuningConfig' + LoraFinetuningConfig: + type: object + properties: + type: + type: string + const: LoRA + default: LoRA + description: Algorithm type identifier, always "LoRA" + lora_attn_modules: + type: array + items: + type: string + description: >- + List of attention module names to apply LoRA to + apply_lora_to_mlp: + type: boolean + description: Whether to apply LoRA to MLP layers + apply_lora_to_output: + type: boolean + description: >- + Whether to apply LoRA to output projection layers + rank: + type: integer + description: >- + Rank of the LoRA adaptation (lower rank = fewer parameters) + alpha: + type: integer + description: >- + LoRA scaling parameter that controls adaptation strength + use_dora: + type: boolean + default: false + description: >- + (Optional) Whether to use DoRA (Weight-Decomposed Low-Rank Adaptation) + quantize_base: + type: boolean + default: false + description: >- + (Optional) Whether to quantize the base model weights + additionalProperties: false + required: + - type + - lora_attn_modules + - apply_lora_to_mlp + - apply_lora_to_output + - rank + - alpha + title: LoraFinetuningConfig + description: >- + Configuration for Low-Rank Adaptation (LoRA) fine-tuning. + QATFinetuningConfig: + type: object + properties: + type: + type: string + const: QAT + default: QAT + description: Algorithm type identifier, always "QAT" + quantizer_name: + type: string + description: >- + Name of the quantization algorithm to use + group_size: + type: integer + description: Size of groups for grouped quantization + additionalProperties: false + required: + - type + - quantizer_name + - group_size + title: QATFinetuningConfig + description: >- + Configuration for Quantization-Aware Training (QAT) fine-tuning. + SupervisedFineTuneRequest: + type: object + properties: + job_uuid: + type: string + description: The UUID of the job to create. + training_config: + $ref: '#/components/schemas/TrainingConfig' + description: The training configuration. + hyperparam_search_config: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: The hyperparam search configuration. + logger_config: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: The logger configuration. + model: + type: string + description: The model to fine-tune. + checkpoint_dir: + type: string + description: The directory to save checkpoint(s) to. + algorithm_config: + $ref: '#/components/schemas/AlgorithmConfig' + description: The algorithm configuration. + additionalProperties: false + required: + - job_uuid + - training_config + - hyperparam_search_config + - logger_config + title: SupervisedFineTuneRequest + QueryMetricsRequest: + type: object + properties: + start_time: + type: integer + description: The start time of the metric to query. + end_time: + type: integer + description: The end time of the metric to query. + granularity: + type: string + description: The granularity of the metric to query. + query_type: + type: string + enum: + - range + - instant + description: The type of query to perform. + label_matchers: + type: array + items: + type: object + properties: + name: + type: string + description: The name of the label to match + value: + type: string + description: The value to match against + operator: + type: string + enum: + - '=' + - '!=' + - =~ + - '!~' + description: >- + The comparison operator to use for matching + default: '=' + additionalProperties: false + required: + - name + - value + - operator + title: MetricLabelMatcher + description: >- + A matcher for filtering metrics by label values. + description: >- + The label matchers to apply to the metric. + additionalProperties: false + required: + - start_time + - query_type + title: QueryMetricsRequest + MetricDataPoint: + type: object + properties: + timestamp: + type: integer + description: >- + Unix timestamp when the metric value was recorded + value: + type: number + description: >- + The numeric value of the metric at this timestamp + unit: + type: string + additionalProperties: false + required: + - timestamp + - value + - unit + title: MetricDataPoint + description: >- + A single data point in a metric time series. + MetricLabel: + type: object + properties: + name: + type: string + description: The name of the label + value: + type: string + description: The value of the label + additionalProperties: false + required: + - name + - value + title: MetricLabel + description: A label associated with a metric. + MetricSeries: + type: object + properties: + metric: + type: string + description: The name of the metric + labels: + type: array + items: + $ref: '#/components/schemas/MetricLabel' + description: >- + List of labels associated with this metric series + values: + type: array + items: + $ref: '#/components/schemas/MetricDataPoint' + description: >- + List of data points in chronological order + additionalProperties: false + required: + - metric + - labels + - values + title: MetricSeries + description: A time series of metric data points. + QueryMetricsResponse: + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/MetricSeries' + description: >- + List of metric series matching the query criteria + additionalProperties: false + required: + - data + title: QueryMetricsResponse + description: >- + Response containing metric time series data. + QueryCondition: + type: object + properties: + key: + type: string + description: The attribute key to filter on + op: + $ref: '#/components/schemas/QueryConditionOp' + description: The comparison operator to apply + value: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: The value to compare against + additionalProperties: false + required: + - key + - op + - value + title: QueryCondition + description: A condition for filtering query results. + QueryConditionOp: + type: string + enum: + - eq + - ne + - gt + - lt + title: QueryConditionOp + description: >- + Comparison operators for query conditions. + QuerySpansRequest: + type: object + properties: + attribute_filters: + type: array + items: + $ref: '#/components/schemas/QueryCondition' + description: >- + The attribute filters to apply to the spans. + attributes_to_return: + type: array + items: + type: string + description: The attributes to return in the spans. + max_depth: + type: integer + description: The maximum depth of the tree. + additionalProperties: false + required: + - attribute_filters + - attributes_to_return + title: QuerySpansRequest + Span: + type: object + properties: + span_id: + type: string + description: Unique identifier for the span + trace_id: + type: string + description: >- + Unique identifier for the trace this span belongs to + parent_span_id: + type: string + description: >- + (Optional) Unique identifier for the parent span, if this is a child span + name: + type: string + description: >- + Human-readable name describing the operation this span represents + start_time: + type: string + format: date-time + description: Timestamp when the operation began + end_time: + type: string + format: date-time + description: >- + (Optional) Timestamp when the operation finished, if completed + attributes: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) Key-value pairs containing additional metadata about the span + additionalProperties: false + required: + - span_id + - trace_id + - name + - start_time + title: Span + description: >- + A span representing a single operation within a trace. + QuerySpansResponse: + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/Span' + description: >- + List of spans matching the query criteria + additionalProperties: false + required: + - data + title: QuerySpansResponse + description: Response containing a list of spans. + SaveSpansToDatasetRequest: + type: object + properties: + attribute_filters: + type: array + items: + $ref: '#/components/schemas/QueryCondition' + description: >- + The attribute filters to apply to the spans. + attributes_to_save: + type: array + items: + type: string + description: The attributes to save to the dataset. + dataset_id: + type: string + description: >- + The ID of the dataset to save the spans to. + max_depth: + type: integer + description: The maximum depth of the tree. + additionalProperties: false + required: + - attribute_filters + - attributes_to_save + - dataset_id + title: SaveSpansToDatasetRequest + GetSpanTreeRequest: + type: object + properties: + attributes_to_return: + type: array + items: + type: string + description: The attributes to return in the tree. + max_depth: + type: integer + description: The maximum depth of the tree. + additionalProperties: false + title: GetSpanTreeRequest + SpanWithStatus: + type: object + properties: + span_id: + type: string + description: Unique identifier for the span + trace_id: + type: string + description: >- + Unique identifier for the trace this span belongs to + parent_span_id: + type: string + description: >- + (Optional) Unique identifier for the parent span, if this is a child span + name: + type: string + description: >- + Human-readable name describing the operation this span represents + start_time: + type: string + format: date-time + description: Timestamp when the operation began + end_time: + type: string + format: date-time + description: >- + (Optional) Timestamp when the operation finished, if completed + attributes: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) Key-value pairs containing additional metadata about the span + status: + $ref: '#/components/schemas/SpanStatus' + description: >- + (Optional) The current status of the span + additionalProperties: false + required: + - span_id + - trace_id + - name + - start_time + title: SpanWithStatus + description: A span that includes status information. + QuerySpanTreeResponse: + type: object + properties: + data: + type: object + additionalProperties: + $ref: '#/components/schemas/SpanWithStatus' + description: >- + Dictionary mapping span IDs to spans with status information + additionalProperties: false + required: + - data + title: QuerySpanTreeResponse + description: >- + Response containing a tree structure of spans. + QueryTracesRequest: + type: object + properties: + attribute_filters: + type: array + items: + $ref: '#/components/schemas/QueryCondition' + description: >- + The attribute filters to apply to the traces. + limit: + type: integer + description: The limit of traces to return. + offset: + type: integer + description: The offset of the traces to return. + order_by: + type: array + items: + type: string + description: The order by of the traces to return. + additionalProperties: false + title: QueryTracesRequest + Trace: + type: object + properties: + trace_id: + type: string + description: Unique identifier for the trace + root_span_id: + type: string + description: >- + Unique identifier for the root span that started this trace + start_time: + type: string + format: date-time + description: Timestamp when the trace began + end_time: + type: string + format: date-time + description: >- + (Optional) Timestamp when the trace finished, if completed + additionalProperties: false + required: + - trace_id + - root_span_id + - start_time + title: Trace + description: >- + A trace representing the complete execution path of a request across multiple + operations. + QueryTracesResponse: + type: object + properties: + data: + type: array + items: + $ref: '#/components/schemas/Trace' + description: >- + List of traces matching the query criteria + additionalProperties: false + required: + - data + title: QueryTracesResponse + description: Response containing a list of traces. + responses: + BadRequest400: + description: The request was invalid or malformed + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + example: + status: 400 + title: Bad Request + detail: The request was invalid or malformed + TooManyRequests429: + description: >- + The client has sent too many requests in a given amount of time + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + example: + status: 429 + title: Too Many Requests + detail: >- + You have exceeded the rate limit. Please try again later. + InternalServerError500: + description: >- + The server encountered an unexpected error + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + example: + status: 500 + title: Internal Server Error + detail: >- + An unexpected error occurred. Our team has been notified. + DefaultError: + description: An unexpected error occurred + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + example: + status: 0 + title: Error + detail: An unexpected error occurred +security: + - Default: [] +tags: + - name: Agents + description: >- + APIs for creating and interacting with agentic systems. + x-displayName: Agents + - name: Benchmarks + description: '' + - name: DatasetIO + description: '' + - name: Datasets + description: '' + - name: Eval + description: '' + x-displayName: >- + Llama Stack Evaluation API for running evaluations on model and agent candidates. + - name: Files + description: '' + - name: Inference + description: >- + This API provides the raw interface to the underlying models. Two kinds of models + are supported: + + - LLM models: these models generate "raw" and "chat" (conversational) completions. + + - Embedding models: these models generate embeddings to be used for semantic + search. + x-displayName: >- + Llama Stack Inference API for generating completions, chat completions, and + embeddings. + - name: Inspect + description: '' + - name: Models + description: '' + - name: PostTraining (Coming Soon) + description: '' + - name: Prompts + description: '' + x-displayName: >- + Protocol for prompt management operations. + - name: Providers + description: '' + x-displayName: >- + Providers API for inspecting, listing, and modifying providers and their configurations. + - name: Safety + description: '' + - name: Scoring + description: '' + - name: ScoringFunctions + description: '' + - name: Shields + description: '' + - name: SyntheticDataGeneration (Coming Soon) + description: '' + - name: Telemetry + description: '' + - name: ToolGroups + description: '' + - name: ToolRuntime + description: '' + - name: VectorDBs + description: '' + - name: VectorIO + description: '' +x-tagGroups: + - name: Operations + tags: + - Agents + - Benchmarks + - DatasetIO + - Datasets + - Eval + - Files + - Inference + - Inspect + - Models + - PostTraining (Coming Soon) + - Prompts + - Providers + - Safety + - Scoring + - ScoringFunctions + - Shields + - SyntheticDataGeneration (Coming Soon) + - Telemetry + - ToolGroups + - ToolRuntime + - VectorDBs + - VectorIO From 36543a1100b8310b63f7bf8d767e5fc5823a801b Mon Sep 17 00:00:00 2001 From: Matthew Farrellee Date: Thu, 2 Oct 2025 14:30:13 -0400 Subject: [PATCH 07/13] chore: fix agents tests for non-ollama providers, provide max_tokens (#3657) # What does this PR do? closes #3656 ## Test Plan openai is not enabled in ci, so manual testing with: ``` $ ./scripts/integration-tests.sh --stack-config ci-tests --suite base --setup gpt --subdirs agents --inference-mode live === Llama Stack Integration Test Runner === Stack Config: ci-tests Setup: gpt Inference Mode: live Test Suite: base Test Subdirs: agents Test Pattern: Checking llama packages llama-stack 0.2.23 .../llama-stack llama-stack-client 0.3.0a3 ollama 0.5.1 === System Resources Before Tests === ... === Applying Setup Environment Variables === Setting up environment variables: === Running Integration Tests === Test subdirs to run: agents Added test files from agents: 3 files === Running all collected tests in a single pytest command === Total test files: 3 + pytest -s -v tests/integration/agents/test_persistence.py tests/integration/agents/test_openai_responses.py tests/integration/agents/test_agents.py --stack-config=ci-tests --inference-mode=live -k 'not( builtin_tool or safety_with_image or code_interpreter or test_rag )' --setup=gpt --color=yes --capture=tee-sys WARNING 2025-10-02 13:14:32,653 root:258 uncategorized: Unknown logging category: providers::utils. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:33,043 root:258 uncategorized: Unknown logging category: tests. Falling back to default 'root' level: 20 INFO 2025-10-02 13:14:33,063 tests.integration.conftest:86 tests: Applying setup 'gpt' ========================================= test session starts ========================================== platform linux -- Python 3.12.11, pytest-8.4.2, pluggy-1.6.0 -- .../.venv/bin/python cachedir: .pytest_cache metadata: {'Python': '3.12.11', 'Platform': 'Linux-6.16.7-200.fc42.x86_64-x86_64-with-glibc2.41', 'Packages': {'pytest': '8.4.2', 'pluggy': '1.6.0'}, 'Plugins': {'html': '4.1.1', 'anyio': '4.9.0', 'timeout': '2.4.0', 'cov': '6.2.1', 'asyncio': '1.1.0', 'nbval': '0.11.0', 'socket': '0.7.0', 'json-report': '1.5.0', 'metadata': '3.1.1'}} rootdir: ... configfile: pyproject.toml plugins: html-4.1.1, anyio-4.9.0, timeout-2.4.0, cov-6.2.1, asyncio-1.1.0, nbval-0.11.0, socket-0.7.0, json-report-1.5.0, metadata-3.1.1 asyncio: mode=Mode.AUTO, asyncio_default_fixture_loop_scope=None, asyncio_default_test_loop_scope=function collected 32 items / 6 deselected / 26 selected tests/integration/agents/test_persistence.py::test_delete_agents_and_sessions SKIPPED (This ...) [ 3%] tests/integration/agents/test_persistence.py::test_get_agent_turns_and_steps SKIPPED (This t...) [ 7%] tests/integration/agents/test_openai_responses.py::test_responses_store[openai_client-txt=openai/gpt-4o-tools0-True] instantiating llama_stack_client WARNING 2025-10-02 13:14:33,472 root:258 uncategorized: Unknown logging category: testing. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:33,477 root:258 uncategorized: Unknown logging category: providers::utils. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:33,960 root:258 uncategorized: Unknown logging category: tokenizer_utils. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:33,962 root:258 uncategorized: Unknown logging category: models::llama. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:33,963 root:258 uncategorized: Unknown logging category: models::llama. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:33,968 root:258 uncategorized: Unknown logging category: providers::utils. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:33,974 root:258 uncategorized: Unknown logging category: providers::utils. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:33,978 root:258 uncategorized: Unknown logging category: providers::utils. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:35,350 root:258 uncategorized: Unknown logging category: providers::utils. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:35,366 root:258 uncategorized: Unknown logging category: providers::utils. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:35,489 root:258 uncategorized: Unknown logging category: providers::utils. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:35,490 root:258 uncategorized: Unknown logging category: inference_store. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:35,697 root:258 uncategorized: Unknown logging category: providers::utils. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:35,918 root:258 uncategorized: Unknown logging category: providers::utils. Falling back to default 'root' level: 20 INFO 2025-10-02 13:14:35,945 llama_stack.providers.utils.inference.inference_store:74 inference_store: Write queue disabled for SQLite to avoid concurrency issues WARNING 2025-10-02 13:14:36,172 root:258 uncategorized: Unknown logging category: files. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:36,218 root:258 uncategorized: Unknown logging category: providers::utils. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:36,219 root:258 uncategorized: Unknown logging category: vector_io. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:36,231 root:258 uncategorized: Unknown logging category: vector_io. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:36,255 root:258 uncategorized: Unknown logging category: tool_runtime. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:36,486 root:258 uncategorized: Unknown logging category: responses_store. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:36,503 root:258 uncategorized: Unknown logging category: openai::responses. Falling back to default 'root' level: 20 INFO 2025-10-02 13:14:36,524 llama_stack.providers.utils.responses.responses_store:80 responses_store: Write queue disabled for SQLite to avoid concurrency issues WARNING 2025-10-02 13:14:36,528 root:258 uncategorized: Unknown logging category: providers::utils. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:36,703 root:258 uncategorized: Unknown logging category: uncategorized. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:36,726 llama_stack.core.routing_tables.models:36 core::routing_tables: Model refresh failed for provider fireworks: Pass Fireworks API Key in the header X-LlamaStack-Provider-Data as { "fireworks_api_key": } WARNING 2025-10-02 13:14:36,727 llama_stack.core.routing_tables.models:36 core::routing_tables: Model refresh failed for provider together: Pass Together API Key in the header X-LlamaStack-Provider-Data as { "together_api_key": } WARNING 2025-10-02 13:14:38,404 llama_stack.core.routing_tables.models:36 core::routing_tables: Model refresh failed for provider anthropic: API key is not set. Please provide a valid API key in the provider data header, e.g. x-llamastack-provider-data: {"anthropic_api_key": ""}, or in the provider config. WARNING 2025-10-02 13:14:38,406 llama_stack.core.routing_tables.models:36 core::routing_tables: Model refresh failed for provider gemini: API key is not set. Please provide a valid API key in the provider data header, e.g. x-llamastack-provider-data: {"gemini_api_key": ""}, or in the provider config. WARNING 2025-10-02 13:14:38,408 llama_stack.core.routing_tables.models:36 core::routing_tables: Model refresh failed for provider groq: API key is not set. Please provide a valid API key in the provider data header, e.g. x-llamastack-provider-data: {"groq_api_key": ""}, or in the provider config. WARNING 2025-10-02 13:14:38,411 llama_stack.core.routing_tables.models:36 core::routing_tables: Model refresh failed for provider sambanova: API key is not set. Please provide a valid API key in the provider data header, e.g. x-llamastack-provider-data: {"sambanova_api_key": ""}, or in the provider config. llama_stack_client instantiated in 5.237s SKIPPED [ 11%] tests/integration/agents/test_openai_responses.py::test_list_response_input_items[openai_client-txt=openai/gpt-4o] SKIPPED [ 15%] tests/integration/agents/test_openai_responses.py::test_list_response_input_items_with_limit_and_order[txt=openai/gpt-4o] SKIPPED [ 19%] tests/integration/agents/test_openai_responses.py::test_function_call_output_response[txt=openai/gpt-4o] SKIPPED [ 23%] tests/integration/agents/test_openai_responses.py::test_function_call_output_response_with_none_arguments[txt=openai/gpt-4o] SKIPPED [ 26%] tests/integration/agents/test_agents.py::test_agent_simple[openai/gpt-4o] PASSED [ 30%] tests/integration/agents/test_agents.py::test_agent_name[txt=openai/gpt-4o] SKIPPED (this te...) [ 34%] tests/integration/agents/test_agents.py::test_tool_config[openai/gpt-4o] PASSED [ 38%] tests/integration/agents/test_agents.py::test_custom_tool[openai/gpt-4o] FAILED [ 42%] tests/integration/agents/test_agents.py::test_custom_tool_infinite_loop[openai/gpt-4o] PASSED [ 46%] tests/integration/agents/test_agents.py::test_tool_choice_required[openai/gpt-4o] INFO 2025-10-02 13:14:51,559 llama_stack.providers.inline.agents.meta_reference.agent_instance:691 agents::meta_reference: done with MAX iterations (2), exiting. PASSED [ 50%] tests/integration/agents/test_agents.py::test_tool_choice_none[openai/gpt-4o] PASSED [ 53%] tests/integration/agents/test_agents.py::test_tool_choice_get_boiling_point[openai/gpt-4o] XFAIL [ 57%] tests/integration/agents/test_agents.py::test_create_turn_response[openai/gpt-4o-client_tools0] PASSED [ 61%] tests/integration/agents/test_agents.py::test_multi_tool_calls[openai/gpt-4o] PASSED [ 65%] tests/integration/agents/test_openai_responses.py::test_responses_store[openai_client-txt=openai/gpt-4o-tools0-False] SKIPPED [ 69%] tests/integration/agents/test_openai_responses.py::test_list_response_input_items[client_with_models-txt=openai/gpt-4o] PASSED [ 73%] tests/integration/agents/test_agents.py::test_create_turn_response[openai/gpt-4o-client_tools1] PASSED [ 76%] tests/integration/agents/test_openai_responses.py::test_responses_store[openai_client-txt=openai/gpt-4o-tools1-True] SKIPPED [ 80%] tests/integration/agents/test_openai_responses.py::test_responses_store[openai_client-txt=openai/gpt-4o-tools1-False] SKIPPED [ 84%] tests/integration/agents/test_openai_responses.py::test_responses_store[client_with_models-txt=openai/gpt-4o-tools0-True] SKIPPED [ 88%] tests/integration/agents/test_openai_responses.py::test_responses_store[client_with_models-txt=openai/gpt-4o-tools0-False] SKIPPED [ 92%] tests/integration/agents/test_openai_responses.py::test_responses_store[client_with_models-txt=openai/gpt-4o-tools1-True] SKIPPED [ 96%] tests/integration/agents/test_openai_responses.py::test_responses_store[client_with_models-txt=openai/gpt-4o-tools1-False] SKIPPED [100%] =============================================== FAILURES =============================================== ___________________________________ test_custom_tool[openai/gpt-4o] ____________________________________ tests/integration/agents/test_agents.py:370: in test_custom_tool assert "-100" in logs_str E assert '-100' in "inference> Polyjuice Potion is a fictional substance from the Harry Potter series, and it doesn't have a scientifically defined boiling point. If you have any other real liquid in mind, feel free to ask!" ========================================= slowest 10 durations ========================================= 5.47s setup tests/integration/agents/test_openai_responses.py::test_responses_store[openai_client-txt=openai/gpt-4o-tools0-True] 4.78s call tests/integration/agents/test_agents.py::test_custom_tool[openai/gpt-4o] 3.01s call tests/integration/agents/test_agents.py::test_tool_choice_required[openai/gpt-4o] 2.97s call tests/integration/agents/test_agents.py::test_agent_simple[openai/gpt-4o] 2.85s call tests/integration/agents/test_agents.py::test_tool_choice_none[openai/gpt-4o] 2.06s call tests/integration/agents/test_agents.py::test_multi_tool_calls[openai/gpt-4o] 1.83s call tests/integration/agents/test_agents.py::test_create_turn_response[openai/gpt-4o-client_tools0] 1.83s call tests/integration/agents/test_agents.py::test_custom_tool_infinite_loop[openai/gpt-4o] 1.29s call tests/integration/agents/test_agents.py::test_create_turn_response[openai/gpt-4o-client_tools1] 0.57s call tests/integration/agents/test_openai_responses.py::test_list_response_input_items[client_with_models-txt=openai/gpt-4o] ======================================= short test summary info ======================================== FAILED tests/integration/agents/test_agents.py::test_custom_tool[openai/gpt-4o] - assert '-100' in "inference> Polyjuice Potion is a fictional substance from the Harry Potter series... =========== 1 failed, 9 passed, 15 skipped, 6 deselected, 1 xfailed, 139 warnings in 27.18s ============ ``` note: the failure is separate from the issue being fixed --- tests/integration/agents/test_agents.py | 3 + .../{51398b60b155.json => 044dcd8fdeb1.json} | 86 +- .../recordings/responses/13ab2c1c38ed.json | 420 + .../{b367f68a8355.json => 18ada6a5dcf6.json} | 12 +- .../{ec4853ce509b.json => 1dd3641034a3.json} | 12 +- .../recordings/responses/41b2727ebdec.json | 16449 ++++++++++++++++ .../{dd6cc3f2e6ce.json => 67bec1334dc9.json} | 12 +- .../{7d28e973eff5.json => 67f94c4f8ba0.json} | 230 +- .../{f55d47f584e9.json => 8b531e81126a.json} | 12 +- .../recordings/responses/aeb1abed5560.json | 4137 ++++ .../recordings/responses/bebc02ac1fb5.json | 415 + .../recordings/responses/c7ff69e043ea.json | 389 + .../recordings/responses/d3fc756ea885.json | 415 + .../{afaacb433b7c.json => e11745e75e87.json} | 12 +- .../recordings/responses/e3bded498c54.json | 4137 ++++ .../{8e5912c90491.json => e4cee6b71b0e.json} | 12 +- .../recordings/responses/e871b8007b8c.json | 389 + .../recordings/responses/f389f5cdf583.json | 4137 ++++ .../recordings/responses/fc0662299704.json | 415 + 19 files changed, 31500 insertions(+), 194 deletions(-) rename tests/integration/recordings/responses/{51398b60b155.json => 044dcd8fdeb1.json} (91%) create mode 100644 tests/integration/recordings/responses/13ab2c1c38ed.json rename tests/integration/recordings/responses/{b367f68a8355.json => 18ada6a5dcf6.json} (94%) rename tests/integration/recordings/responses/{ec4853ce509b.json => 1dd3641034a3.json} (94%) create mode 100644 tests/integration/recordings/responses/41b2727ebdec.json rename tests/integration/recordings/responses/{dd6cc3f2e6ce.json => 67bec1334dc9.json} (94%) rename tests/integration/recordings/responses/{7d28e973eff5.json => 67f94c4f8ba0.json} (91%) rename tests/integration/recordings/responses/{f55d47f584e9.json => 8b531e81126a.json} (94%) create mode 100644 tests/integration/recordings/responses/aeb1abed5560.json create mode 100644 tests/integration/recordings/responses/bebc02ac1fb5.json create mode 100644 tests/integration/recordings/responses/c7ff69e043ea.json create mode 100644 tests/integration/recordings/responses/d3fc756ea885.json rename tests/integration/recordings/responses/{afaacb433b7c.json => e11745e75e87.json} (94%) create mode 100644 tests/integration/recordings/responses/e3bded498c54.json rename tests/integration/recordings/responses/{8e5912c90491.json => e4cee6b71b0e.json} (94%) create mode 100644 tests/integration/recordings/responses/e871b8007b8c.json create mode 100644 tests/integration/recordings/responses/f389f5cdf583.json create mode 100644 tests/integration/recordings/responses/fc0662299704.json diff --git a/tests/integration/agents/test_agents.py b/tests/integration/agents/test_agents.py index 23529f91e..07ba7bb01 100644 --- a/tests/integration/agents/test_agents.py +++ b/tests/integration/agents/test_agents.py @@ -68,6 +68,7 @@ def agent_config(llama_stack_client, text_model_id): "temperature": 0.0001, "top_p": 0.9, }, + "max_tokens": 512, }, tools=[], input_shields=available_shields, @@ -88,6 +89,7 @@ def agent_config_without_safety(text_model_id): "temperature": 0.0001, "top_p": 0.9, }, + "max_tokens": 512, }, tools=[], enable_session_persistence=False, @@ -198,6 +200,7 @@ def test_tool_config(agent_config): "temperature": 1.0, "top_p": 0.9, }, + "max_tokens": 512, }, toolgroups=[], enable_session_persistence=False, diff --git a/tests/integration/recordings/responses/51398b60b155.json b/tests/integration/recordings/responses/044dcd8fdeb1.json similarity index 91% rename from tests/integration/recordings/responses/51398b60b155.json rename to tests/integration/recordings/responses/044dcd8fdeb1.json index b73e8a44b..7e8b92202 100644 --- a/tests/integration/recordings/responses/51398b60b155.json +++ b/tests/integration/recordings/responses/044dcd8fdeb1.json @@ -15,7 +15,7 @@ "content": "Give me a sentence that contains the word: hello" } ], - "max_tokens": 0, + "max_tokens": 512, "stream": true, "temperature": 0.0001, "top_p": 0.9 @@ -28,7 +28,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-231", + "id": "chatcmpl-122", "choices": [ { "delta": { @@ -43,7 +43,7 @@ "logprobs": null } ], - "created": 1759368372, + "created": 1759427013, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -54,7 +54,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-231", + "id": "chatcmpl-122", "choices": [ { "delta": { @@ -69,7 +69,7 @@ "logprobs": null } ], - "created": 1759368372, + "created": 1759427013, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -80,7 +80,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-231", + "id": "chatcmpl-122", "choices": [ { "delta": { @@ -95,7 +95,7 @@ "logprobs": null } ], - "created": 1759368372, + "created": 1759427013, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -106,7 +106,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-231", + "id": "chatcmpl-122", "choices": [ { "delta": { @@ -121,7 +121,7 @@ "logprobs": null } ], - "created": 1759368372, + "created": 1759427013, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -132,7 +132,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-231", + "id": "chatcmpl-122", "choices": [ { "delta": { @@ -147,7 +147,7 @@ "logprobs": null } ], - "created": 1759368372, + "created": 1759427013, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -158,11 +158,11 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-231", + "id": "chatcmpl-122", "choices": [ { "delta": { - "content": " me", + "content": " us", "function_call": null, "refusal": null, "role": "assistant", @@ -173,7 +173,7 @@ "logprobs": null } ], - "created": 1759368372, + "created": 1759427013, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -184,7 +184,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-231", + "id": "chatcmpl-122", "choices": [ { "delta": { @@ -199,7 +199,7 @@ "logprobs": null } ], - "created": 1759368372, + "created": 1759427013, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -210,7 +210,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-231", + "id": "chatcmpl-122", "choices": [ { "delta": { @@ -225,7 +225,7 @@ "logprobs": null } ], - "created": 1759368372, + "created": 1759427013, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -236,7 +236,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-231", + "id": "chatcmpl-122", "choices": [ { "delta": { @@ -251,7 +251,7 @@ "logprobs": null } ], - "created": 1759368372, + "created": 1759427013, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -262,7 +262,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-231", + "id": "chatcmpl-122", "choices": [ { "delta": { @@ -277,7 +277,7 @@ "logprobs": null } ], - "created": 1759368372, + "created": 1759427013, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -288,7 +288,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-231", + "id": "chatcmpl-122", "choices": [ { "delta": { @@ -303,7 +303,7 @@ "logprobs": null } ], - "created": 1759368372, + "created": 1759427013, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -314,7 +314,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-231", + "id": "chatcmpl-122", "choices": [ { "delta": { @@ -329,7 +329,7 @@ "logprobs": null } ], - "created": 1759368372, + "created": 1759427013, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -340,7 +340,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-231", + "id": "chatcmpl-122", "choices": [ { "delta": { @@ -355,7 +355,7 @@ "logprobs": null } ], - "created": 1759368372, + "created": 1759427013, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -366,11 +366,11 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-231", + "id": "chatcmpl-122", "choices": [ { "delta": { - "content": " I", + "content": " we", "function_call": null, "refusal": null, "role": "assistant", @@ -381,7 +381,7 @@ "logprobs": null } ], - "created": 1759368372, + "created": 1759427013, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -392,7 +392,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-231", + "id": "chatcmpl-122", "choices": [ { "delta": { @@ -407,7 +407,7 @@ "logprobs": null } ], - "created": 1759368372, + "created": 1759427013, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -418,7 +418,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-231", + "id": "chatcmpl-122", "choices": [ { "delta": { @@ -433,7 +433,7 @@ "logprobs": null } ], - "created": 1759368372, + "created": 1759427013, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -444,7 +444,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-231", + "id": "chatcmpl-122", "choices": [ { "delta": { @@ -459,7 +459,7 @@ "logprobs": null } ], - "created": 1759368372, + "created": 1759427013, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -470,7 +470,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-231", + "id": "chatcmpl-122", "choices": [ { "delta": { @@ -485,7 +485,7 @@ "logprobs": null } ], - "created": 1759368372, + "created": 1759427013, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -496,7 +496,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-231", + "id": "chatcmpl-122", "choices": [ { "delta": { @@ -511,7 +511,7 @@ "logprobs": null } ], - "created": 1759368372, + "created": 1759427013, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -522,7 +522,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-231", + "id": "chatcmpl-122", "choices": [ { "delta": { @@ -537,7 +537,7 @@ "logprobs": null } ], - "created": 1759368372, + "created": 1759427013, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, diff --git a/tests/integration/recordings/responses/13ab2c1c38ed.json b/tests/integration/recordings/responses/13ab2c1c38ed.json new file mode 100644 index 000000000..0b8819160 --- /dev/null +++ b/tests/integration/recordings/responses/13ab2c1c38ed.json @@ -0,0 +1,420 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "What is the boiling point of the liquid polyjuice in celsius?" + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_h50zu2cg", + "type": "function", + "function": { + "name": "get_boiling_point", + "arguments": "{\"celcius\": true, \"liquid_name\": \"polyjuice\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_h50zu2cg", + "content": "-100" + } + ], + "max_tokens": 512, + "stream": true, + "temperature": 0.0001, + "tool_choice": { + "type": "function", + "function": { + "name": "get_boiling_point" + } + }, + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius", + "default": true + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-27", + "choices": [ + { + "delta": { + "content": "The", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427022, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-27", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427022, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-27", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427023, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-27", + "choices": [ + { + "delta": { + "content": " of", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427023, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-27", + "choices": [ + { + "delta": { + "content": " Poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427023, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-27", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427023, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-27", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427023, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-27", + "choices": [ + { + "delta": { + "content": " is", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427023, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-27", + "choices": [ + { + "delta": { + "content": " -", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427023, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-27", + "choices": [ + { + "delta": { + "content": "100", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427023, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-27", + "choices": [ + { + "delta": { + "content": "\u00b0C", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427023, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-27", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427023, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-27", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759427023, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/b367f68a8355.json b/tests/integration/recordings/responses/18ada6a5dcf6.json similarity index 94% rename from tests/integration/recordings/responses/b367f68a8355.json rename to tests/integration/recordings/responses/18ada6a5dcf6.json index 73d05fade..997c5afcc 100644 --- a/tests/integration/recordings/responses/b367f68a8355.json +++ b/tests/integration/recordings/responses/18ada6a5dcf6.json @@ -15,7 +15,7 @@ "content": "Get the boiling point of polyjuice with a tool call." } ], - "max_tokens": 0, + "max_tokens": 512, "stream": true, "temperature": 0.0001, "tool_choice": "auto", @@ -55,7 +55,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-787", + "id": "chatcmpl-521", "choices": [ { "delta": { @@ -66,7 +66,7 @@ "tool_calls": [ { "index": 0, - "id": "call_q055g6sq", + "id": "call_nhfpubt2", "function": { "arguments": "{\"celcius\":\"true\",\"liquid_name\":\"polyjuice\"}", "name": "get_boiling_point" @@ -80,7 +80,7 @@ "logprobs": null } ], - "created": 1759368376, + "created": 1759427016, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -91,7 +91,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-787", + "id": "chatcmpl-521", "choices": [ { "delta": { @@ -106,7 +106,7 @@ "logprobs": null } ], - "created": 1759368376, + "created": 1759427016, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, diff --git a/tests/integration/recordings/responses/ec4853ce509b.json b/tests/integration/recordings/responses/1dd3641034a3.json similarity index 94% rename from tests/integration/recordings/responses/ec4853ce509b.json rename to tests/integration/recordings/responses/1dd3641034a3.json index 5456514ab..c96d20036 100644 --- a/tests/integration/recordings/responses/ec4853ce509b.json +++ b/tests/integration/recordings/responses/1dd3641034a3.json @@ -15,7 +15,7 @@ "content": "What is the boiling point of the liquid polyjuice in celsius?" } ], - "max_tokens": 0, + "max_tokens": 512, "stream": true, "temperature": 0.0001, "tool_choice": "auto", @@ -55,7 +55,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-709", + "id": "chatcmpl-9", "choices": [ { "delta": { @@ -66,7 +66,7 @@ "tool_calls": [ { "index": 0, - "id": "call_3wa5qjdc", + "id": "call_88k1yds9", "function": { "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}", "name": "get_boiling_point" @@ -80,7 +80,7 @@ "logprobs": null } ], - "created": 1759368374, + "created": 1759427014, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -91,7 +91,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-709", + "id": "chatcmpl-9", "choices": [ { "delta": { @@ -106,7 +106,7 @@ "logprobs": null } ], - "created": 1759368374, + "created": 1759427014, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, diff --git a/tests/integration/recordings/responses/41b2727ebdec.json b/tests/integration/recordings/responses/41b2727ebdec.json new file mode 100644 index 000000000..c90c83414 --- /dev/null +++ b/tests/integration/recordings/responses/41b2727ebdec.json @@ -0,0 +1,16449 @@ +{ + "request": { + "method": "POST", + "url": "https://api.fireworks.ai/inference/v1/v1/embeddings", + "headers": {}, + "body": { + "model": "accounts/fireworks/models/qwen3-embedding-8b", + "input": [ + "Python is a high-level programming language that emphasizes code readability and allows programmers to express concepts in fewer lines of code than would be possible in languages such as C++ or Java.", + "Machine learning is a subset of artificial intelligence that enables systems to automatically learn and improve from experience without being explicitly programmed, using statistical techniques to give computer systems the ability to progressively improve performance on a specific task.", + "Data structures are fundamental to computer science because they provide organized ways to store and access data efficiently, enable faster processing of data through optimized algorithms, and form the building blocks for more complex software systems.", + "Neural networks are inspired by biological neural networks found in animal brains, using interconnected nodes called artificial neurons to process information through weighted connections that can be trained to recognize patterns and solve complex problems through iterative learning." + ], + "encoding_format": "float" + }, + "endpoint": "/v1/embeddings", + "model": "accounts/fireworks/models/qwen3-embedding-8b" + }, + "response": { + "body": { + "__type__": "openai.types.create_embedding_response.CreateEmbeddingResponse", + "__data__": { + "data": [ + { + "embedding": [ + 0.8984375, + 3.71875, + -2.40625, + 1.4296875, + 3.96875, + -4.8125, + -3.578125, + 2.421875, + 5.25, + -2.65625, + -5.8125, + 2.296875, + 3.03125, + -0.173828125, + 8.5625, + 4.8125, + 3.265625, + -2.765625, + 0.875, + -1.3984375, + -6.03125, + 6.1875, + 4.59375, + 2.5, + -1.296875, + -0.365234375, + -2.15625, + 1.5859375, + 1.46875, + -0.474609375, + -0.71875, + 3.953125, + 2.34375, + 4.34375, + -1.375, + 2.484375, + -2.65625, + -2.3125, + 1.1015625, + 3.21875, + -1.5703125, + -1.6328125, + -2.234375, + 0.82421875, + 1.859375, + -0.41015625, + -4.75, + -1.71875, + -0.1376953125, + 2.28125, + -4.09375, + -0.1484375, + 0.65234375, + 1.4609375, + -1.921875, + 1.2734375, + 1.640625, + -2.71875, + -0.93359375, + -3.53125, + -0.384765625, + 0.81640625, + 2.40625, + -1.0859375, + -1.3046875, + 1.0859375, + -0.68359375, + -1.609375, + -0.462890625, + 0.50390625, + 0.609375, + -2.203125, + -0.326171875, + -1.2421875, + -2.765625, + 2.078125, + -2.03125, + -0.93359375, + 3.375, + 3.25, + 0.59375, + -2.296875, + -1.890625, + -1.7109375, + -0.6171875, + -2.109375, + -0.6796875, + -1.7265625, + -0.181640625, + -0.25390625, + -2.609375, + 1.9609375, + 1.8515625, + 0.36328125, + 0.1552734375, + -5.34375, + 1.2265625, + -0.072265625, + 0.1806640625, + 1.6640625, + 4.625, + 0.6796875, + 2.328125, + 3.640625, + 1.4765625, + -0.023193359375, + 1.7890625, + -2.90625, + -0.625, + 0.43359375, + -0.2109375, + 4.09375, + 1.84375, + 2.15625, + -1.5703125, + -1.2421875, + -0.1611328125, + -1.046875, + 0.71875, + 0.5, + 0.5703125, + -2.125, + 0.087890625, + -0.9609375, + 2.046875, + -5.65625, + -0.423828125, + 0.1513671875, + -3.921875, + -0.3046875, + -0.2470703125, + 0.984375, + -2.890625, + -2.8125, + -2.90625, + -0.70703125, + 1.59375, + 1.1640625, + -3.359375, + 2.15625, + -2.828125, + -0.52734375, + 1.703125, + -1.0546875, + 1.0, + 0.96484375, + -2.96875, + -0.185546875, + -0.1826171875, + -0.48046875, + 1.4765625, + 1.5859375, + 2.28125, + -0.82421875, + -0.49609375, + -2.109375, + 2.015625, + 0.059814453125, + 1.15625, + 1.671875, + -3.984375, + -0.1552734375, + 0.197265625, + 2.21875, + 2.75, + -1.234375, + 4.09375, + -1.3359375, + 2.453125, + -3.34375, + 1.109375, + -0.20703125, + -2.515625, + 3.0625, + 1.6171875, + 0.98828125, + 4.34375, + -0.8515625, + -0.5078125, + 1.1875, + 1.96875, + 5.90625, + 0.828125, + 0.5703125, + 0.08349609375, + -1.3671875, + -1.625, + -0.10546875, + 2.140625, + 2.4375, + -0.404296875, + -1.59375, + 0.890625, + -2.3125, + 0.8671875, + -0.67578125, + -0.9921875, + -2.015625, + 1.015625, + 0.9140625, + 4.71875, + -4.34375, + 2.046875, + -0.4921875, + 0.25390625, + -0.8984375, + -0.62890625, + 2.234375, + -1.265625, + -4.25, + 1.7265625, + -0.953125, + 1.3671875, + 3.765625, + 1.6875, + 2.0625, + 0.48828125, + 0.8359375, + -0.62890625, + 1.7109375, + -0.1875, + 4.3125, + -2.21875, + 0.58203125, + -3.203125, + -0.80859375, + 0.474609375, + -1.546875, + 0.98828125, + -2.875, + 4.09375, + -4.96875, + -0.390625, + -0.296875, + 3.5, + -2.40625, + -2.578125, + -0.224609375, + 1.359375, + 0.1318359375, + -0.578125, + -1.546875, + -0.62890625, + 2.703125, + 0.68359375, + 1.6484375, + 3.65625, + 1.8828125, + 1.4765625, + 0.96484375, + -1.5234375, + -1.0546875, + 0.220703125, + -0.08740234375, + -0.458984375, + -1.140625, + -1.84375, + -1.375, + 0.0693359375, + 0.88671875, + 0.7578125, + -0.8046875, + -0.64453125, + -0.337890625, + 0.41796875, + -0.671875, + -4.46875, + -3.203125, + -1.3203125, + 1.125, + 1.1953125, + 0.01123046875, + -1.890625, + 1.015625, + 0.5, + 3.84375, + 1.6484375, + -2.40625, + -5.71875, + -1.75, + -3.0, + 2.296875, + 1.28125, + -2.03125, + 1.34375, + -1.25, + -0.12451171875, + 1.4453125, + -1.53125, + -0.3046875, + -2.59375, + -0.205078125, + -0.73046875, + 2.640625, + -2.203125, + -2.84375, + -3.640625, + -1.6328125, + 0.88671875, + 1.6875, + 0.69140625, + 1.4296875, + 2.6875, + -2.359375, + -3.21875, + 2.328125, + 2.84375, + 0.06982421875, + -0.8359375, + -2.609375, + -2.0625, + 2.328125, + -1.6875, + 0.486328125, + 0.90234375, + -3.421875, + 1.9921875, + 1.1171875, + -2.796875, + 0.85546875, + -2.015625, + 0.392578125, + -0.36328125, + 0.859375, + -1.3671875, + 2.6875, + -0.765625, + -0.6640625, + 0.51171875, + 2.3125, + 0.279296875, + -1.2890625, + -2.515625, + 2.921875, + -1.5, + 0.953125, + 1.3359375, + -2.40625, + 3.71875, + 0.625, + -1.9609375, + 0.23828125, + 1.2734375, + 1.03125, + 0.0223388671875, + -0.96484375, + -2.734375, + 2.984375, + -2.46875, + -1.6640625, + -0.75, + 0.78125, + 0.71875, + -1.78125, + 2.125, + -1.8359375, + -1.046875, + 0.60546875, + 4.3125, + -0.71484375, + -0.8046875, + 0.79296875, + 1.4375, + -0.1669921875, + -0.025390625, + 4.375, + 1.9453125, + -1.796875, + -2.703125, + -0.1513671875, + -1.1171875, + 1.9453125, + 1.9375, + -0.291015625, + 2.96875, + 0.478515625, + 2.84375, + 0.921875, + 2.5625, + 2.5625, + 2.0625, + -8.0625, + 1.0078125, + -2.0, + -1.125, + 2.140625, + -1.3046875, + -1.0859375, + -0.2392578125, + -3.890625, + -4.21875, + -1.015625, + 3.234375, + 2.109375, + -3.546875, + -1.234375, + 0.474609375, + 0.87890625, + 1.3046875, + -1.1640625, + 1.9453125, + -0.1083984375, + -0.2392578125, + 0.341796875, + 0.53125, + 2.5625, + 0.71484375, + 0.58203125, + 0.04296875, + 0.6328125, + 0.6171875, + 0.275390625, + 0.62109375, + 0.1728515625, + 2.671875, + 0.98046875, + 0.017578125, + 3.796875, + -5.125, + -1.2265625, + -1.1796875, + 0.64453125, + 1.1171875, + -2.078125, + -1.6484375, + 1.59375, + -3.359375, + -2.6875, + 1.546875, + -2.90625, + 1.09375, + 0.92578125, + -3.03125, + -0.05859375, + -3.25, + 0.384765625, + -1.203125, + 0.80078125, + -0.37890625, + -0.59765625, + -1.1171875, + 0.67578125, + 3.890625, + 0.38671875, + -2.96875, + -2.40625, + -0.25390625, + -1.171875, + -1.6796875, + -2.40625, + -1.6875, + 2.15625, + -1.234375, + -0.875, + 1.796875, + -2.640625, + -3.21875, + -1.4609375, + -0.86328125, + 0.859375, + -1.8828125, + -2.0625, + 1.2265625, + 2.46875, + 0.05419921875, + -1.2890625, + 1.765625, + 1.6328125, + -4.125, + 3.140625, + -1.21875, + -3.140625, + 0.421875, + -2.71875, + 2.53125, + 2.921875, + -0.93359375, + 0.244140625, + -0.298828125, + -1.40625, + 0.0115966796875, + -0.7890625, + -1.5234375, + -8.5, + 1.6171875, + 3.1875, + -0.68359375, + -6.3125, + -1.7109375, + 1.3984375, + 0.275390625, + -1.5625, + 0.765625, + 1.3359375, + -0.470703125, + -3.203125, + -1.7109375, + 0.80078125, + 0.94140625, + 2.375, + 0.166015625, + -0.87109375, + -3.0, + 1.0625, + -0.9609375, + -1.171875, + 3.515625, + -3.109375, + 0.953125, + 3.984375, + 4.0625, + -0.431640625, + -3.21875, + 0.9375, + 0.1796875, + -0.94140625, + 1.6171875, + -1.515625, + -1.71875, + -1.015625, + 2.015625, + -2.46875, + 0.66015625, + 2.90625, + 2.25, + -0.48046875, + -1.0546875, + -1.0546875, + 3.484375, + 1.8359375, + -1.28125, + -0.9375, + -1.625, + -4.25, + 0.447265625, + 0.6875, + -0.1904296875, + -1.2421875, + 1.15625, + -1.703125, + 0.74609375, + 2.5625, + 0.1318359375, + 0.58203125, + 0.0517578125, + 0.83984375, + -1.3515625, + 0.53515625, + 0.8203125, + 0.796875, + 2.21875, + 2.359375, + 2.453125, + 1.59375, + -1.2890625, + 1.046875, + -2.1875, + -0.1962890625, + 1.796875, + -0.234375, + -2.25, + 0.8671875, + -0.80859375, + 1.3984375, + -2.5, + 1.484375, + -1.8828125, + -2.3125, + -1.3359375, + 0.07421875, + -2.078125, + 0.734375, + -1.5, + -1.234375, + -0.3828125, + -1.6328125, + -1.8515625, + 0.59765625, + -1.4296875, + -5.9375, + -2.0625, + -0.26171875, + 0.7890625, + -1.046875, + 1.59375, + -1.2734375, + -3.0625, + -1.234375, + 0.70703125, + 0.291015625, + 1.046875, + 0.29296875, + 3.484375, + -0.07763671875, + -1.671875, + -2.84375, + -1.1171875, + -1.046875, + 2.40625, + 1.6015625, + -1.3203125, + -4.0, + 1.4296875, + 1.0546875, + -0.3984375, + 1.3046875, + -1.9375, + 0.52734375, + 2.8125, + -4.53125, + 5.0, + -0.2412109375, + -1.4375, + 0.2421875, + -0.78515625, + -4.25, + 3.3125, + 0.6484375, + 1.0625, + -2.59375, + -3.453125, + -3.296875, + -0.62109375, + -0.75, + 1.1171875, + 0.74609375, + -0.7578125, + 1.140625, + 0.9140625, + -0.062255859375, + -2.109375, + 3.3125, + -0.50390625, + -0.89453125, + -1.9609375, + -2.03125, + -1.0859375, + 1.28125, + -1.5, + 4.875, + 1.0625, + 1.03125, + 0.578125, + 0.8984375, + 1.6484375, + 4.625, + 1.046875, + 1.875, + 0.91796875, + -1.3515625, + -2.921875, + -0.91796875, + -1.53125, + -1.4296875, + -3.796875, + 0.9765625, + 2.28125, + 0.10009765625, + -6.5625, + -0.3359375, + -1.28125, + -1.25, + 0.248046875, + 1.234375, + 3.84375, + -0.279296875, + -0.47265625, + -0.1494140625, + 3.484375, + -0.9140625, + 0.453125, + -0.341796875, + -2.3125, + -1.1953125, + 0.43359375, + 3.1875, + 1.2109375, + 2.21875, + 0.84765625, + -4.5, + 1.625, + 1.78125, + -1.5546875, + -0.0201416015625, + 4.75, + -3.15625, + 2.1875, + -0.61328125, + 1.78125, + -1.7734375, + 1.015625, + -2.875, + 0.357421875, + -1.078125, + 0.11083984375, + 1.515625, + -0.08203125, + -1.9375, + -1.0859375, + 4.15625, + 2.4375, + 0.765625, + -0.53515625, + -4.9375, + -1.1171875, + -2.78125, + 0.4140625, + -0.33203125, + -1.984375, + 0.291015625, + -3.21875, + 0.671875, + -3.09375, + -0.6875, + -0.2119140625, + 0.37109375, + 2.359375, + 2.875, + -4.3125, + 1.1484375, + 1.578125, + 3.875, + 1.53125, + -0.9453125, + 2.015625, + -2.1875, + 4.875, + -0.57421875, + 1.4609375, + -1.8359375, + -2.671875, + 0.609375, + -0.458984375, + 0.80859375, + -1.3203125, + 2.09375, + -4.28125, + -0.1640625, + -0.08984375, + 1.7734375, + -1.578125, + -0.142578125, + -0.3828125, + 0.34765625, + -1.515625, + -2.046875, + -0.03759765625, + -0.890625, + 0.8828125, + 0.369140625, + 1.09375, + 1.09375, + -3.890625, + 0.6328125, + -0.9140625, + -0.3515625, + 0.51953125, + -1.3671875, + 3.390625, + -2.359375, + -1.6875, + 3.421875, + 0.232421875, + -1.953125, + 4.625, + 2.046875, + -1.59375, + 0.69921875, + 0.400390625, + -2.953125, + 0.1572265625, + -4.15625, + -1.375, + -0.349609375, + 4.46875, + -0.423828125, + -0.97265625, + 0.97265625, + -0.66796875, + 1.734375, + -1.109375, + -0.1943359375, + -0.40625, + 1.484375, + 0.0927734375, + 3.1875, + -4.96875, + 1.2421875, + 1.2734375, + 2.59375, + -1.8046875, + 1.1953125, + 1.75, + 1.3203125, + 1.3046875, + 1.0625, + -1.3046875, + 1.4765625, + -0.5078125, + 6.09375, + 0.578125, + -0.13671875, + -0.18359375, + 0.03466796875, + 1.6015625, + 0.06787109375, + 2.984375, + -1.1015625, + 4.03125, + 10.0625, + -4.09375, + 3.484375, + -1.4140625, + 1.8515625, + -1.5625, + -2.0625, + -1.5078125, + -0.06591796875, + -1.75, + 0.498046875, + -1.5625, + -2.40625, + 2.140625, + 2.484375, + -0.302734375, + -1.5625, + 0.349609375, + 0.296875, + 3.0, + 1.5703125, + -1.1875, + -1.625, + -1.6015625, + 1.03125, + 1.1640625, + -0.703125, + 3.28125, + -2.140625, + 0.0228271484375, + 2.4375, + -2.40625, + 1.5234375, + 1.0859375, + -4.4375, + -3.4375, + 0.1708984375, + -2.40625, + -0.181640625, + 0.189453125, + 0.01141357421875, + -0.8359375, + -2.875, + -1.890625, + 0.296875, + -3.484375, + 0.0289306640625, + 2.609375, + 0.98046875, + -2.5625, + -1.875, + 0.421875, + 1.109375, + -2.046875, + -2.046875, + 0.421875, + -1.265625, + -0.11572265625, + 2.109375, + 1.625, + -1.6484375, + -0.73046875, + 0.275390625, + 3.15625, + -0.6171875, + 2.578125, + -0.1796875, + 7.3125, + 1.046875, + -0.0054931640625, + -2.734375, + 0.99609375, + 0.48046875, + -0.2177734375, + 2.171875, + 0.0634765625, + 2.90625, + 1.2109375, + 2.296875, + 1.25, + 2.046875, + -2.046875, + -3.5625, + -0.69921875, + 0.7109375, + -2.625, + -0.84765625, + -3.59375, + 0.4296875, + -0.96875, + -2.5625, + -1.0078125, + 1.484375, + -0.1005859375, + 1.8984375, + -1.75, + 1.484375, + -1.5703125, + -1.203125, + -1.7109375, + -1.5234375, + 1.265625, + 0.15625, + 2.15625, + -0.84765625, + -0.2490234375, + 3.171875, + -2.84375, + -1.4140625, + -2.96875, + -1.6875, + 0.70703125, + 0.90234375, + -2.921875, + 0.91796875, + 0.7265625, + 1.609375, + 1.7265625, + -2.125, + -0.61328125, + -0.392578125, + 1.78125, + -1.28125, + 1.484375, + 2.015625, + 0.41796875, + 0.46484375, + -0.53125, + 0.1943359375, + 1.5234375, + 0.25, + -0.490234375, + -2.03125, + 3.015625, + -0.037109375, + -4.25, + -1.7734375, + -0.8515625, + -2.421875, + 0.859375, + -2.140625, + 0.15234375, + -2.421875, + -1.1796875, + 3.0625, + 1.171875, + 0.68359375, + -0.5703125, + -1.8515625, + -2.703125, + 0.384765625, + 2.03125, + -0.48046875, + 3.203125, + 3.09375, + 0.08154296875, + -1.046875, + 0.3125, + -0.59765625, + -4.125, + -0.14453125, + -1.515625, + -0.1953125, + 1.6328125, + 0.212890625, + -1.4375, + 0.50390625, + 1.578125, + -0.9921875, + -1.75, + 0.94140625, + 0.76953125, + 1.9140625, + 0.306640625, + 1.78125, + 2.359375, + 3.703125, + 2.234375, + -1.34375, + -5.1875, + 14.0625, + -1.8515625, + 2.28125, + 2.125, + 1.75, + 2.875, + -1.578125, + 0.99609375, + -0.08544921875, + 1.8828125, + 0.703125, + 0.56640625, + -0.75, + -1.671875, + 1.5625, + 2.390625, + -0.1826171875, + -0.443359375, + -0.248046875, + 2.078125, + -3.75, + 0.58203125, + -1.9375, + -0.267578125, + 1.625, + -0.169921875, + -2.5625, + -1.5859375, + 0.91015625, + -4.03125, + -1.6953125, + 0.8125, + -0.875, + 0.06640625, + -3.09375, + 2.828125, + -3.296875, + -2.265625, + -2.0, + -0.83203125, + -0.2412109375, + 1.5703125, + -1.5546875, + -0.85546875, + -0.88671875, + 3.453125, + 1.2890625, + 4.34375, + 0.1357421875, + -0.5, + -1.375, + 2.015625, + -2.234375, + -2.703125, + 1.5703125, + -1.1953125, + -1.5078125, + 0.0625, + 0.35546875, + -2.15625, + -2.375, + -1.2734375, + -1.9609375, + 3.03125, + 1.4453125, + -0.150390625, + 1.21875, + 2.890625, + 1.09375, + -0.04296875, + 0.42578125, + 4.15625, + -0.2412109375, + 1.6171875, + -0.9765625, + -1.5546875, + -0.431640625, + 0.126953125, + -1.53125, + -3.484375, + 2.5, + 2.125, + 0.10546875, + -1.1484375, + -0.11669921875, + 1.7578125, + 3.53125, + -0.71484375, + -1.3046875, + -1.1171875, + -1.1875, + 1.4765625, + -0.65625, + 1.984375, + -1.84375, + 3.046875, + 2.78125, + 1.3203125, + -0.4296875, + 0.50390625, + -0.267578125, + 0.078125, + -1.578125, + -1.59375, + 1.5078125, + -0.52734375, + -0.0703125, + 0.55078125, + -2.046875, + 0.4296875, + 3.359375, + -1.2890625, + -0.90625, + 1.671875, + 0.90234375, + -0.326171875, + -1.5, + 0.005615234375, + -1.6640625, + -1.7890625, + -1.75, + -0.6875, + 0.515625, + -1.734375, + -0.78125, + 2.59375, + -0.7109375, + 2.796875, + 1.1640625, + 0.0196533203125, + 2.234375, + -2.21875, + 0.87109375, + 0.3359375, + -0.265625, + -4.59375, + -2.078125, + 0.515625, + -1.484375, + -2.5625, + -5.125, + 0.076171875, + 1.9296875, + -0.64453125, + 1.0703125, + 3.125, + -1.9375, + 1.7734375, + 3.421875, + 3.484375, + -1.1015625, + 0.265625, + 1.015625, + 0.546875, + -0.9609375, + 0.13671875, + 1.1484375, + 1.328125, + -1.9609375, + -1.890625, + 0.6796875, + -0.306640625, + -2.390625, + 0.056640625, + -0.51953125, + 2.6875, + 0.72265625, + 5.53125, + -2.40625, + -1.53125, + 0.56640625, + 1.3828125, + 1.1171875, + 0.66796875, + -2.828125, + 2.03125, + 2.171875, + -0.10791015625, + 2.34375, + -1.0078125, + 0.8671875, + 2.09375, + -0.318359375, + -0.267578125, + 0.419921875, + -0.73046875, + 2.171875, + -0.64453125, + 0.41015625, + -0.546875, + 2.90625, + 0.458984375, + -5.09375, + 1.6015625, + 2.03125, + 1.15625, + -5.0, + -1.34375, + 1.3984375, + 4.1875, + -1.4296875, + -1.2265625, + -2.421875, + 0.93359375, + -0.474609375, + 0.04541015625, + -0.41015625, + 0.140625, + -4.96875, + 1.703125, + 1.4921875, + 0.365234375, + 4.0625, + -1.3984375, + 2.921875, + -1.3359375, + 1.375, + 2.125, + -2.703125, + -0.76171875, + -3.40625, + -0.216796875, + -0.859375, + -1.6171875, + -2.09375, + -1.484375, + 0.921875, + -1.0625, + 2.5625, + -1.0, + 2.125, + 1.25, + 0.6328125, + 4.9375, + -2.15625, + 0.78515625, + -0.46875, + 0.82421875, + 2.75, + 0.6171875, + 2.640625, + -1.5546875, + 0.83984375, + -1.0859375, + 2.265625, + 2.140625, + -1.7578125, + -1.53125, + -1.671875, + 0.5390625, + -4.125, + -0.52734375, + -1.9375, + -2.15625, + -0.65234375, + -0.203125, + 0.9453125, + 1.5234375, + 0.92578125, + 1.3671875, + -4.28125, + -2.0625, + 0.640625, + -1.515625, + 1.0234375, + 0.1865234375, + -0.63671875, + -0.828125, + -2.359375, + -0.65625, + -1.4140625, + 0.451171875, + -2.640625, + -0.70703125, + 1.203125, + -0.34765625, + 3.921875, + -0.2890625, + 0.1650390625, + -1.28125, + -1.4296875, + 1.1953125, + -0.06201171875, + 0.359375, + 3.921875, + 1.1796875, + 3.90625, + 2.515625, + 0.33203125, + 1.796875, + 0.53125, + 3.15625, + 0.69140625, + -1.2890625, + -0.1201171875, + -3.078125, + -0.6171875, + 4.15625, + -0.095703125, + 0.609375, + -0.251953125, + 1.7890625, + -0.259765625, + 0.921875, + -1.4453125, + 1.4765625, + -0.62890625, + -0.90625, + 3.75, + 2.578125, + -0.4609375, + 2.015625, + 0.396484375, + -1.703125, + 2.515625, + 0.27734375, + 0.490234375, + -0.0263671875, + 1.34375, + -2.296875, + -2.875, + 2.875, + 2.765625, + 3.703125, + -0.984375, + -1.265625, + -1.1484375, + 1.5625, + 2.65625, + -0.91015625, + 2.140625, + -0.181640625, + 0.296875, + -0.54296875, + -2.09375, + -2.1875, + 1.9453125, + -2.53125, + -0.16015625, + -0.2265625, + -1.9375, + 0.1142578125, + -0.66796875, + -4.40625, + 2.265625, + -4.40625, + -1.015625, + -1.203125, + -1.0078125, + -2.109375, + 2.234375, + -1.140625, + 2.671875, + 1.671875, + -1.1171875, + -2.5, + 2.28125, + -0.51953125, + -2.515625, + 5.0625, + -4.71875, + -3.671875, + -0.75390625, + -2.21875, + 2.0, + 0.71875, + 1.0234375, + 0.83203125, + -2.171875, + -1.7109375, + 1.3828125, + -3.15625, + -1.125, + -0.8515625, + 1.0234375, + 0.3515625, + -1.390625, + 1.21875, + 2.46875, + -0.60546875, + -0.3984375, + -0.8984375, + -2.53125, + -2.265625, + -1.390625, + -0.47265625, + -0.0849609375, + -2.109375, + 1.8203125, + 2.078125, + 0.859375, + -1.4609375, + 0.1591796875, + 3.796875, + -2.6875, + 1.0546875, + -1.5234375, + 3.03125, + -1.25, + -0.921875, + -1.1171875, + -0.296875, + -1.8828125, + -2.671875, + 2.8125, + -1.7578125, + 0.0235595703125, + 1.1015625, + 0.1484375, + -0.5234375, + -1.8125, + 0.9609375, + -4.34375, + 1.8203125, + 1.8984375, + -0.50390625, + -1.15625, + 1.3203125, + -1.9296875, + 2.109375, + 0.26171875, + 0.3828125, + 1.109375, + 0.3046875, + 1.1015625, + 0.201171875, + -1.3671875, + 3.046875, + -2.109375, + 0.11767578125, + -0.88671875, + -1.5390625, + -4.15625, + 1.15625, + 1.625, + 2.421875, + -0.44140625, + 0.91015625, + -5.40625, + -2.484375, + -3.734375, + 0.8046875, + -0.1796875, + 2.21875, + 1.0703125, + 0.027099609375, + -0.37890625, + 0.412109375, + 2.921875, + 1.5859375, + 0.1259765625, + -0.5625, + -2.9375, + 3.90625, + -1.359375, + 1.5234375, + -2.390625, + 1.4765625, + 1.0234375, + -0.73828125, + -1.4609375, + -0.5234375, + 0.412109375, + -0.94140625, + -2.1875, + -0.59375, + 1.1015625, + 0.11376953125, + -1.1875, + 1.7265625, + 3.265625, + 0.236328125, + 3.328125, + -0.01055908203125, + -2.015625, + 3.234375, + -2.921875, + -5.125, + 1.65625, + 3.15625, + -1.0, + 2.828125, + -0.0771484375, + -1.9140625, + -2.875, + 3.453125, + 3.421875, + -1.375, + -0.34765625, + -1.640625, + 1.1640625, + 1.0234375, + 1.8671875, + 0.1474609375, + -2.21875, + 1.4609375, + 1.984375, + 0.97265625, + 0.0174560546875, + -1.75, + -1.1484375, + -0.71484375, + -1.6015625, + -2.578125, + 0.84375, + 3.046875, + 1.1328125, + -1.6171875, + 0.01025390625, + 0.54296875, + -0.9921875, + -4.5625, + -0.04638671875, + 1.1796875, + -1.40625, + -1.34375, + 0.2119140625, + -5.625, + 0.65625, + -1.375, + 0.59375, + 2.84375, + 0.058349609375, + 0.0712890625, + -0.7890625, + 0.357421875, + 1.625, + 1.6015625, + -1.984375, + 1.34375, + -1.765625, + 1.7734375, + 1.234375, + 0.69921875, + -2.8125, + 3.03125, + -1.015625, + -2.3125, + 3.53125, + 1.890625, + -2.546875, + -0.8515625, + 0.2001953125, + -1.9921875, + 0.2080078125, + 1.4609375, + 3.28125, + 1.4296875, + -0.6796875, + 0.37109375, + 4.125, + 0.7734375, + 0.98046875, + -0.314453125, + 0.5078125, + 0.671875, + 1.7578125, + 2.59375, + -5.15625, + -0.73046875, + -0.57421875, + 0.8359375, + 0.2158203125, + 4.6875, + -0.036376953125, + 0.59375, + -2.546875, + -2.125, + 0.65234375, + 0.7265625, + -2.390625, + 1.25, + -1.3671875, + -0.6953125, + 0.51171875, + 0.91015625, + 2.84375, + -2.75, + 1.84375, + 2.84375, + -1.03125, + -1.3203125, + -2.84375, + 0.49609375, + 3.578125, + 0.39453125, + 1.0078125, + -1.40625, + -3.265625, + 2.90625, + 2.828125, + -1.015625, + -2.28125, + -0.244140625, + 1.0078125, + -1.125, + 4.75, + -0.95703125, + -2.03125, + 0.01123046875, + 0.67578125, + -1.6328125, + 1.390625, + -0.6875, + 4.6875, + 0.8125, + 0.4609375, + -3.90625, + -0.046142578125, + 1.984375, + -0.439453125, + 3.484375, + -0.45703125, + -1.625, + 0.78125, + 2.203125, + 0.93359375, + 2.5625, + 1.2421875, + -0.6796875, + -0.71484375, + 2.625, + -2.140625, + 0.91015625, + 1.3046875, + 4.25, + -0.90625, + 1.875, + -2.421875, + 1.9375, + -0.9453125, + -0.94921875, + -0.546875, + -0.416015625, + -1.6796875, + 3.09375, + 0.63671875, + -2.0, + -1.4765625, + -1.046875, + -0.60546875, + -0.8671875, + -0.1767578125, + -3.421875, + -0.35546875, + 2.671875, + 1.078125, + -0.2392578125, + -1.390625, + 2.953125, + 3.65625, + -0.2373046875, + 2.234375, + 0.45703125, + 0.625, + -0.37109375, + 0.443359375, + 0.11767578125, + 1.421875, + -1.5546875, + 3.640625, + -1.75, + 0.796875, + 5.21875, + 0.77734375, + -1.3203125, + -0.6328125, + -0.2333984375, + -0.84375, + -0.06689453125, + -4.6875, + -1.3671875, + -1.0859375, + -0.79296875, + -0.98046875, + 0.66796875, + -0.1259765625, + -0.7265625, + 2.90625, + -0.4765625, + 1.921875, + 0.365234375, + 1.328125, + -5.21875, + -0.3515625, + 2.0625, + -1.0546875, + 2.734375, + -1.546875, + 0.87109375, + 1.1953125, + 0.0128173828125, + 2.390625, + 0.412109375, + 1.4765625, + 0.99609375, + -3.171875, + 0.20703125, + -1.0625, + -2.828125, + 1.859375, + -0.08935546875, + 0.96484375, + -1.5078125, + 1.234375, + -3.5, + 5.6875, + 0.78125, + -0.1318359375, + -0.169921875, + -1.6953125, + -1.6875, + 1.6796875, + 0.1435546875, + 0.005859375, + -0.6953125, + 0.302734375, + 0.875, + -0.1435546875, + -0.82421875, + -0.66015625, + -0.41796875, + 2.234375, + 2.171875, + -2.0625, + -0.89453125, + -3.515625, + 0.330078125, + 3.921875, + 0.5859375, + -5.125, + -2.0625, + 0.384765625, + 1.3046875, + -1.6171875, + -1.09375, + -0.79296875, + -0.41796875, + -0.9375, + 0.21875, + 1.5078125, + -3.296875, + -1.28125, + -0.796875, + -1.4296875, + -2.921875, + 1.1171875, + -1.640625, + -1.265625, + -1.8671875, + 1.078125, + -1.046875, + -1.75, + -1.0546875, + -1.359375, + 0.51171875, + 0.58984375, + 1.7109375, + 2.59375, + -0.376953125, + -3.0, + 3.296875, + 1.6953125, + -0.376953125, + -2.40625, + 2.25, + -1.3828125, + -0.171875, + -0.265625, + 0.0732421875, + -2.078125, + 2.21875, + 2.015625, + -4.15625, + 1.46875, + -0.52734375, + 1.9140625, + 2.15625, + 3.953125, + 0.482421875, + 2.78125, + 0.61328125, + 1.1171875, + 1.3203125, + -1.828125, + -0.58203125, + -2.140625, + -0.92578125, + 0.328125, + -1.625, + -2.015625, + -3.046875, + -1.90625, + 2.34375, + -10.5625, + 0.2119140625, + -0.79296875, + 0.42578125, + -1.2890625, + -0.453125, + 2.453125, + 1.3671875, + -2.90625, + -1.421875, + 3.984375, + -4.40625, + 0.8828125, + -0.3046875, + 3.4375, + 0.34765625, + 1.5859375, + -0.1279296875, + 1.765625, + -3.28125, + -0.578125, + 2.140625, + 1.1875, + 0.255859375, + -0.703125, + 0.328125, + 3.53125, + -0.66015625, + 0.92578125, + -6.125, + -1.6953125, + 1.0859375, + 2.28125, + 1.375, + 2.140625, + 0.203125, + -1.0546875, + -2.390625, + 0.40625, + 2.484375, + -0.62890625, + -0.10986328125, + -1.8671875, + -1.15625, + 0.1904296875, + -2.828125, + -1.4765625, + -2.609375, + -2.5, + 1.6328125, + -0.5546875, + -1.1484375, + 5.59375, + 1.203125, + -6.0, + 0.00011682510375976562, + -1.2265625, + 0.1435546875, + -0.53515625, + 1.265625, + -0.66015625, + -0.6328125, + -0.08544921875, + -0.26171875, + 0.216796875, + 1.0, + -1.8984375, + 1.8515625, + 0.0123291015625, + 0.2734375, + -1.7421875, + 1.8984375, + 0.796875, + 0.52734375, + -1.9140625, + 0.1259765625, + -0.59375, + 0.640625, + 4.3125, + -0.56640625, + -0.64453125, + 1.375, + 0.71875, + 1.140625, + 0.5703125, + -3.5, + -1.6015625, + 1.015625, + 1.1640625, + -1.53125, + 1.609375, + 1.8984375, + -1.5703125, + -0.1416015625, + -4.8125, + -0.326171875, + -0.283203125, + 1.1484375, + 2.46875, + -4.4375, + -0.61328125, + -0.154296875, + 1.484375, + 0.21875, + -2.0, + 0.625, + 13.0, + -1.453125, + -1.65625, + 0.73046875, + 1.84375, + 0.28125, + -1.3515625, + -3.125, + -3.5, + -0.73046875, + -0.60546875, + 0.87109375, + -2.046875, + 0.51953125, + -0.373046875, + -3.65625, + 3.5, + -2.0625, + 3.0, + 2.75, + -1.9765625, + 0.140625, + -3.171875, + 0.796875, + 2.46875, + 0.8046875, + 0.85546875, + 2.078125, + -1.2265625, + -1.0234375, + 1.828125, + -0.26171875, + -0.08740234375, + -1.7578125, + 0.9140625, + 0.1435546875, + 2.046875, + 1.296875, + -1.7421875, + 0.1689453125, + -3.9375, + -1.390625, + 0.890625, + -0.66015625, + 1.3046875, + -0.87890625, + -3.4375, + 2.109375, + 0.154296875, + 0.81640625, + 2.0, + -1.03125, + 1.8125, + 1.3828125, + -4.34375, + -0.01080322265625, + 0.76171875, + 1.9140625, + 2.296875, + -0.41015625, + 2.046875, + 1.515625, + 3.046875, + -1.078125, + -2.65625, + 2.953125, + -1.4765625, + -1.9140625, + 0.64453125, + 0.5859375, + -0.71875, + -0.11962890625, + 3.015625, + 0.9609375, + 3.046875, + 1.953125, + 0.076171875, + -1.8671875, + 1.3046875, + -0.63671875, + 0.435546875, + -0.921875, + 0.77734375, + -0.37109375, + 1.1328125, + 0.41015625, + 0.02685546875, + -0.4296875, + 0.482421875, + -0.53515625, + -0.59765625, + 0.10546875, + 1.2109375, + 0.7734375, + 2.015625, + -0.9375, + 0.0169677734375, + 0.66796875, + -0.06787109375, + 0.53125, + -1.65625, + 1.3125, + -1.421875, + -0.3515625, + 1.1015625, + 0.54296875, + -4.125, + -4.65625, + 3.859375, + -1.7421875, + 0.376953125, + -0.1513671875, + 0.279296875, + -2.4375, + 0.271484375, + 0.037841796875, + 6.0, + 1.4453125, + 0.11279296875, + 2.59375, + -3.15625, + 0.921875, + -0.8359375, + -2.65625, + -2.0625, + -3.109375, + 1.6875, + -1.8125, + 2.046875, + -0.1455078125, + -0.83984375, + 1.203125, + 2.9375, + -0.64453125, + -0.314453125, + -1.046875, + 2.453125, + -0.1396484375, + -0.76953125, + 0.359375, + 0.85546875, + -0.5078125, + -1.765625, + 0.46875, + 0.365234375, + 1.953125, + -1.6640625, + -2.484375, + -1.7578125, + -1.1328125, + 2.296875, + -0.05712890625, + 0.028076171875, + 3.515625, + -2.390625, + 3.390625, + -1.1015625, + 0.96484375, + -0.119140625, + -2.515625, + 2.328125, + 0.43359375, + -1.2265625, + -0.73046875, + -1.6796875, + 3.53125, + -2.796875, + 0.1669921875, + 0.57421875, + 0.9765625, + -0.302734375, + 0.8125, + -2.046875, + 1.015625, + 0.91015625, + -1.765625, + 4.5625, + -1.4765625, + -2.515625, + -0.1171875, + -0.953125, + 2.421875, + 2.34375, + -1.421875, + 1.1328125, + -4.9375, + 0.1728515625, + 1.109375, + 2.078125, + 2.40625, + 1.875, + -4.34375, + -2.0625, + -1.3046875, + 0.2578125, + 1.078125, + 0.4375, + -1.171875, + -1.578125, + -0.54296875, + 0.1640625, + 0.0576171875, + 0.1103515625, + 1.265625, + 1.46875, + -0.33203125, + 1.7890625, + 2.078125, + -0.8125, + 0.7890625, + -3.421875, + -1.8984375, + 2.078125, + 0.6640625, + 2.28125, + 0.90234375, + -0.474609375, + 3.453125, + 0.69140625, + -0.36328125, + 5.5, + 3.453125, + -0.091796875, + -0.796875, + -1.578125, + 0.3984375, + 0.73828125, + 2.25, + 0.01092529296875, + -3.375, + 0.8828125, + 1.6875, + 2.1875, + 2.296875, + 2.34375, + -2.125, + 1.2890625, + 5.0, + -2.953125, + -2.359375, + -3.921875, + -1.203125, + 0.6640625, + -0.859375, + -1.296875, + 0.8515625, + -2.515625, + 1.1640625, + 1.5234375, + -0.0791015625, + 1.109375, + 0.46875, + 0.8828125, + 1.3984375, + 1.109375, + -0.34765625, + -0.1494140625, + 0.9921875, + 3.734375, + -1.3046875, + -2.75, + 0.74609375, + -1.296875, + 0.0220947265625, + 1.734375, + 4.125, + 1.59375, + -2.640625, + -0.22265625, + 13.625, + -2.1875, + 0.1416015625, + -0.26953125, + 3.125, + 1.9375, + 1.0546875, + -1.734375, + -0.984375, + -0.7578125, + 1.953125, + 2.21875, + -8.5625, + -2.875, + -3.46875, + 0.671875, + 0.3984375, + 2.0, + -0.014404296875, + 0.1552734375, + -2.03125, + -1.3046875, + 0.2236328125, + 1.2734375, + -1.0078125, + -1.0234375, + 2.171875, + 1.109375, + -0.640625, + -3.109375, + -0.283203125, + 2.625, + -1.1484375, + 1.46875, + -1.328125, + -3.375, + -0.6640625, + 0.7578125, + -0.298828125, + -1.46875, + -1.2734375, + -2.171875, + -0.05224609375, + 2.703125, + -15.75, + -0.400390625, + 1.6484375, + 0.56640625, + 1.828125, + 1.390625, + -0.91796875, + 0.1923828125, + -0.72265625, + 1.6171875, + 0.8359375, + -4.125, + -2.09375, + -0.494140625, + 1.640625, + -1.5078125, + 3.34375, + 3.4375, + 0.3671875, + 4.75, + -1.546875, + -0.50390625, + 3.34375, + -3.109375, + 2.4375, + -1.765625, + 0.859375, + 0.0673828125, + -3.0, + -1.8984375, + -0.2578125, + 2.78125, + 1.3203125, + -1.140625, + 1.1875, + -0.1044921875, + 0.1435546875, + -0.85546875, + -1.8515625, + 0.439453125, + -0.46875, + -0.57421875, + 0.330078125, + -0.2099609375, + 1.2578125, + -0.333984375, + -5.1875, + -0.29296875, + -0.455078125, + 1.9609375, + -1.7734375, + 3.625, + 2.46875, + -6.8125, + -1.5, + -2.5, + -0.431640625, + 0.0028533935546875, + -1.609375, + -1.8203125, + 2.8125, + 1.0, + -3.34375, + 0.369140625, + -3.390625, + -1.65625, + 0.70703125, + -0.33203125, + 1.6953125, + 2.96875, + 0.8515625, + 3.875, + -0.578125, + -6.25, + -0.0008544921875, + 0.271484375, + -0.76953125, + -2.953125, + 1.8984375, + -2.484375, + 1.9921875, + 1.875, + -6.8125, + 2.6875, + -0.26953125, + -0.0206298828125, + 1.3828125, + 1.4921875, + 0.62109375, + 1.5625, + -0.3515625, + -0.6484375, + -1.7421875, + 3.109375, + 0.72265625, + 2.1875, + 1.2734375, + 0.70703125, + 0.359375, + -0.765625, + -0.08447265625, + -1.8359375, + -4.21875, + 2.71875, + 2.84375, + -0.60546875, + -2.515625, + -1.578125, + -1.875, + 1.609375, + 0.37890625, + -0.609375, + -1.4375, + 0.2431640625, + 5.59375, + 2.59375, + -1.25, + -1.6640625, + 0.42578125, + 1.5390625, + -1.3359375, + 3.90625, + -1.8125, + -0.255859375, + 1.21875, + 2.015625, + 0.1494140625, + 0.96484375, + -2.0625, + 1.1328125, + -2.859375, + 0.482421875, + -2.25, + -0.2119140625, + -1.109375, + -0.134765625, + 1.53125, + -1.53125, + -1.453125, + 0.423828125, + -2.140625, + -0.447265625, + 3.46875, + -2.453125, + -1.1328125, + 0.30078125, + -1.5, + -0.1298828125, + -4.78125, + 0.068359375, + -1.4375, + 2.4375, + 1.890625, + -2.28125, + -1.6640625, + 3.328125, + -4.125, + 3.421875, + -0.0029296875, + -3.25, + -0.1298828125, + -0.80078125, + 3.0, + -1.8046875, + -1.28125, + 0.474609375, + -0.455078125, + 3.65625, + 1.625, + -0.58984375, + 3.515625, + 3.734375, + -0.78515625, + -1.5546875, + -1.1640625, + -3.203125, + 0.1318359375, + 2.15625, + -3.078125, + 0.89453125, + -0.07275390625, + -2.375, + 0.48046875, + 3.125, + 3.046875, + 0.201171875, + 0.2421875, + -2.234375, + 3.46875, + 0.6171875, + -2.390625, + -1.546875, + 2.0, + 0.1708984375, + -0.3828125, + -2.328125, + -1.5390625, + -1.578125, + 1.0546875, + 0.58984375, + 1.921875, + -1.859375, + 0.41796875, + -1.8359375, + 1.6640625, + 4.09375, + 3.40625, + 3.484375, + -1.8203125, + 3.15625, + 1.40625, + 0.2421875, + 3.78125, + 2.765625, + 1.3046875, + -1.875, + -2.765625, + -1.7578125, + -1.8046875, + 1.03125, + -1.0390625, + 1.09375, + 0.82421875, + 0.27734375, + -0.62109375, + 3.421875, + 0.640625, + -0.1650390625, + -0.283203125, + 1.7265625, + -1.03125, + 1.4609375, + 0.423828125, + -0.6328125, + 3.140625, + -0.028564453125, + 1.53125, + 3.234375, + -0.59375, + -2.640625, + 0.6171875, + 3.4375, + 2.78125, + 0.7265625, + -0.462890625, + -0.94140625, + -1.0546875, + -2.78125, + 1.328125, + -0.78515625, + 0.427734375, + 3.375, + -0.42578125, + 1.6328125, + 3.109375, + -0.609375, + -3.0, + 0.7421875, + -1.921875, + 0.640625, + -0.016845703125, + -1.2578125, + 2.765625, + -0.53125, + 1.9921875, + 0.953125, + 0.30859375, + -2.4375, + -1.4921875, + 1.5, + 2.171875, + -0.47265625, + 1.171875, + -1.625, + -3.34375, + 0.6796875, + 0.4296875, + -2.953125, + 2.65625, + 0.185546875, + -0.6875, + -0.1748046875, + 1.5, + -0.051025390625, + 2.484375, + -0.376953125, + -1.2734375, + -0.419921875, + -0.357421875, + 1.3203125, + 1.765625, + 1.8125, + -1.3203125, + 0.984375, + -2.421875, + 1.1796875, + -0.2890625, + 0.6875, + 0.55859375, + -4.65625, + 3.828125, + 3.046875, + -2.234375, + -1.7421875, + 1.015625, + -0.072265625, + -0.0888671875, + -2.09375, + 0.38671875, + 0.671875, + 0.384765625, + -2.421875, + -4.34375, + 0.7265625, + 0.328125, + -1.4296875, + -1.5859375, + 1.7109375, + -1.75, + 0.6875, + 2.234375, + -1.328125, + -1.015625, + -6.15625, + 0.126953125, + -0.56640625, + 2.671875, + 1.2421875, + -2.1875, + 1.3203125, + 0.306640625, + -1.375, + -0.0625, + -1.359375, + 0.4140625, + -1.078125, + 2.0, + -0.92578125, + 2.765625, + -0.056396484375, + -2.234375, + -0.333984375, + 0.62109375, + 0.2236328125, + 0.44921875, + 0.90234375, + -0.703125, + 0.5703125, + -1.09375, + -1.4609375, + -5.1875, + -2.03125, + -0.578125, + -1.8125, + -0.5, + -0.92578125, + 0.83203125, + 5.375, + 0.8046875, + -1.6875, + 3.421875, + -0.173828125, + -0.359375, + -0.32421875, + 3.765625, + 0.90625, + 0.55859375, + 1.140625, + 1.5, + -0.61328125, + 2.21875, + 3.265625, + 0.86328125, + -3.71875, + 6.5625, + -2.1875, + 0.92578125, + -5.40625, + -2.25, + -2.109375, + 2.109375, + 4.59375, + 2.953125, + -1.390625, + 0.91015625, + 0.51171875, + -0.37109375, + -2.078125, + -1.34375, + -0.7890625, + -0.578125, + -0.046142578125, + 2.984375, + -1.6875, + 2.015625, + -2.3125, + 3.375, + 0.5546875, + -1.671875, + 1.375, + 0.4765625, + 2.046875, + -1.2890625, + 2.96875, + -0.3203125, + 0.5390625, + -0.69921875, + -2.125, + -0.03173828125, + -0.30859375, + 1.234375, + -5.4375, + 1.6953125, + 0.443359375, + 0.466796875, + 2.609375, + -0.8984375, + 0.859375, + -1.7109375, + -1.0234375, + -1.90625, + 2.0, + 0.0791015625, + -0.70703125, + 0.640625, + -1.484375, + 0.8515625, + -0.345703125, + 1.984375, + 0.94921875, + -2.21875, + 0.71484375, + -0.546875, + 2.3125, + 1.5234375, + 1.5078125, + -0.1357421875, + 1.734375, + -0.82421875, + -2.84375, + 3.90625, + -2.0, + 1.8359375, + -1.2265625, + -1.234375, + -3.28125, + -1.328125, + -0.26171875, + 2.65625, + 1.1796875, + 1.8203125, + 3.015625, + 1.671875, + 1.625, + -0.130859375, + -0.2421875, + 0.16796875, + -3.3125, + 0.65625, + 0.29296875, + 0.6640625, + -2.125, + -1.359375, + 1.734375, + -1.390625, + -0.09619140625, + -0.5859375, + 1.1796875, + 1.25, + -4.09375, + -0.9609375, + 1.140625, + -0.263671875, + -2.296875, + 0.1337890625, + -3.15625, + 2.84375, + 3.171875, + -1.421875, + -2.546875, + 2.09375, + 0.443359375, + 1.9765625, + -1.6875, + 4.8125, + -3.21875, + -1.75, + -0.8359375, + -5.4375, + 2.578125, + 1.7578125, + -1.1171875, + -2.046875, + 0.75, + 0.5703125, + 4.96875, + -2.25, + -0.2470703125, + -1.0625, + 1.421875, + 0.30078125, + 0.7421875, + -0.86328125, + -2.46875, + 0.244140625, + 1.8984375, + -0.79296875, + -3.0, + 1.671875, + -1.875, + -2.171875, + 1.4375, + 0.435546875, + -0.6953125, + -0.80859375, + 0.10546875, + -0.02734375, + 2.15625, + 3.0, + -1.3046875, + 2.5625, + 1.0859375, + 2.6875, + 1.25, + 3.28125, + 0.76171875, + 0.46875, + 0.0810546875, + 0.47265625, + -0.99609375, + -2.03125, + 0.875, + -3.0, + 0.3828125, + 1.1171875, + 1.03125, + -2.078125, + 1.9296875, + 0.875, + -2.328125, + 2.359375, + 1.2890625, + 1.6796875, + 3.5, + -0.68359375, + -0.193359375, + 1.9296875, + 0.9375, + 0.65234375, + -3.609375, + -2.65625, + -0.64453125, + -1.2265625, + 0.404296875, + -0.640625, + -0.7265625, + -3.4375, + -2.640625, + 0.64453125, + -1.4375, + -1.2578125, + -1.28125, + 0.396484375, + 2.6875, + -0.66796875, + -3.5, + -0.8984375, + 1.7109375, + -0.6640625, + -0.84765625, + -0.42578125, + -0.33984375, + -1.6953125, + -1.875, + -0.126953125, + -0.0947265625, + -2.453125, + 0.123046875, + 1.2265625, + 0.5703125, + -0.35546875, + -0.58203125, + 0.01123046875, + -1.703125, + -2.015625, + -1.8671875, + -0.9609375, + -1.015625, + -2.671875, + -0.546875, + -0.9609375, + -0.404296875, + 2.796875, + -0.6796875, + -0.71875, + 0.07421875, + 1.5703125, + 1.015625, + 1.9609375, + -0.53515625, + -1.8515625, + 1.8984375, + -3.828125, + 2.078125, + 1.3828125, + -1.78125, + 2.109375, + -0.0673828125, + 6.09375, + 0.83984375, + 3.046875, + -0.169921875, + 1.546875, + -0.3828125, + -1.34375, + -0.5703125, + 0.1826171875, + -1.9453125, + 0.296875, + 1.7421875, + -0.5, + 0.083984375, + 1.4140625, + 1.6796875, + 2.375, + 2.46875, + 0.0, + -3.3125, + -2.890625, + -2.03125, + 0.39453125, + -2.21875, + -2.25, + 2.921875, + 1.2734375, + -0.58203125, + -2.5625, + 0.984375, + -1.203125, + 6.625, + -2.6875, + 1.4140625, + 1.140625, + -1.6015625, + 1.0703125, + -1.0546875, + -0.443359375, + -0.0228271484375, + -0.039794921875, + -2.4375, + 0.65625, + 0.734375, + 0.62890625, + 0.88671875, + 1.1484375, + -0.51953125, + 0.7109375, + 1.28125, + 1.484375, + -1.1953125, + 2.3125, + 2.4375, + 0.7421875, + 0.6796875, + -2.546875, + 1.7578125, + -1.3515625, + 1.9765625, + -2.515625, + 4.75, + -1.078125, + 1.046875, + 1.75, + 0.33203125, + 2.859375, + 2.6875, + -2.21875, + 0.08740234375, + -1.0390625, + 0.7890625, + -0.625, + 1.34375, + -1.6171875, + -2.078125, + -0.8828125, + -0.138671875, + 1.1015625, + -0.55078125, + 2.390625, + 0.474609375, + -0.4140625, + 1.7578125, + -1.390625, + -1.0234375, + -1.2265625, + 0.310546875, + 3.734375, + -1.0546875, + 3.03125, + 3.625, + -0.7890625, + -1.1484375, + -0.73046875, + 0.392578125, + 0.80078125, + 2.375, + -1.09375, + -2.28125, + -1.1328125, + 0.81640625, + -4.5625, + 0.60546875, + -0.85546875, + 0.88671875, + 2.984375, + -1.625, + 2.1875, + 5.5, + 0.90234375, + 0.34765625, + -0.3984375, + -1.6796875, + 1.453125, + 0.404296875, + 1.2578125, + 1.53125, + -0.8046875, + 1.8203125, + -1.890625, + 1.359375, + 0.1708984375, + -0.173828125, + -1.546875, + -3.046875, + 0.039306640625, + 0.423828125, + -2.8125, + 0.890625, + 0.01171875, + -0.271484375, + -0.97265625, + 3.265625, + -2.546875, + -4.34375, + 2.421875, + 1.3828125, + 1.515625, + 2.765625, + -2.09375, + 2.984375, + -1.2734375, + -1.9921875, + -0.03466796875, + 2.140625, + 0.68359375, + -0.97265625, + -0.6328125, + -1.953125, + -2.796875, + -0.64453125, + -1.34375, + 0.02880859375, + -5.78125, + -0.96484375, + 1.546875, + 0.2177734375, + -1.984375, + 2.140625, + 0.6015625, + 0.6796875, + 1.984375, + 0.71875, + 0.75390625, + 0.9609375, + 3.78125, + 0.28125, + -0.19921875, + -2.03125, + 1.078125, + 1.6015625, + 1.6953125, + 0.0634765625, + -0.0703125, + 0.435546875, + 0.068359375, + 4.125, + -2.21875, + -0.494140625, + -2.40625, + 1.625, + 0.734375, + -2.765625, + 2.140625, + -1.5390625, + 1.9296875, + 0.984375, + 0.875, + -0.98828125, + -0.83984375, + -3.0, + 1.53125, + -0.8984375, + -0.007293701171875, + 0.462890625, + -1.0625, + 0.345703125, + 2.703125, + 3.4375, + -3.453125, + -2.953125, + 0.63671875, + -0.578125, + -2.796875, + 0.94921875, + -3.4375, + 1.3359375, + -4.09375, + 0.48046875, + -3.0, + 0.421875, + 0.25, + -0.74609375, + -1.0859375, + 0.171875, + -2.234375, + -2.703125, + 1.4140625, + -0.76171875, + 9.25, + 0.474609375, + -2.40625, + 1.4765625, + 3.40625, + -0.6953125, + 4.625, + -2.265625, + 0.64453125, + 0.8046875, + 0.6015625, + 1.8203125, + 0.59375, + 2.21875, + 0.0028076171875, + -0.283203125, + -1.0703125, + 1.5390625, + 1.234375, + -1.2265625, + 0.53125, + -0.1064453125, + -2.0, + -2.953125, + 1.3828125, + 3.796875, + 1.3203125, + 1.1171875, + -1.84375, + -0.98828125, + -2.84375, + -2.921875, + 3.859375, + -2.359375, + -0.279296875, + -0.5546875, + 0.5078125, + 2.625, + 0.95703125, + -3.796875, + 1.7265625, + -1.8359375, + 4.625, + -2.1875, + -0.84765625, + -2.21875, + 1.1953125, + 4.125, + 3.53125, + -0.60546875, + 0.63671875, + 0.052490234375, + 1.4375, + 0.95703125, + -3.40625, + -1.53125, + -1.21875, + -3.78125, + -0.57421875, + 1.3125, + -0.034912109375, + 0.365234375, + -0.79296875, + -0.8125, + -1.7109375, + -0.50390625, + 1.09375, + -0.26171875, + -1.046875, + -2.890625, + 0.8515625, + 0.0654296875, + 1.453125, + -2.140625, + 3.546875, + 0.92578125, + 4.875, + -2.390625, + 0.53125, + 0.38671875, + 3.671875, + -0.73046875, + 1.609375, + -2.046875, + -2.046875, + -0.026123046875, + -3.71875, + -0.3671875, + -0.09326171875, + 3.1875, + -0.087890625, + -0.90625, + -0.240234375, + 1.4296875, + -0.65625, + 1.4609375, + 2.28125, + 0.04541015625, + -1.109375, + 0.388671875, + 0.85546875, + 1.0859375, + 3.203125, + -1.1640625, + 1.3125, + 0.98828125, + 0.5625, + -1.03125, + -1.578125, + -0.7265625, + -1.09375, + -2.65625, + 0.80859375, + -1.21875, + 3.125, + 0.030517578125, + -1.1015625, + 0.396484375, + -2.171875, + 1.2421875, + 0.64453125, + 0.21875, + -0.53125, + -1.7578125, + -0.54296875, + -0.67578125, + -2.515625, + 0.484375, + -1.171875, + 0.4765625, + 1.7265625, + -6.96875, + 0.0196533203125, + -0.51171875, + -1.5234375, + -0.0556640625, + 1.4140625, + 1.109375, + 0.5, + -2.875, + -0.40625, + 2.4375, + -3.75, + 1.4140625, + 2.921875, + -0.875, + 0.52734375, + 3.734375, + -0.31640625, + -0.26171875, + 1.4453125, + 4.8125, + 1.6171875, + 3.5, + -0.0439453125, + 1.6796875, + -3.59375, + 1.84375, + -0.36328125, + 0.169921875, + 0.447265625, + -2.125, + 0.47265625, + -1.078125, + -3.421875, + -2.21875, + 1.59375, + -1.3359375, + 2.484375, + 0.93359375, + 0.5234375, + -2.796875, + 1.046875, + -2.609375, + -1.71875, + -0.0299072265625, + -2.453125, + -1.3515625, + 0.21875, + -1.765625, + -0.33984375, + 1.5, + -1.6875, + -0.53515625, + -0.8828125, + 0.41796875, + 2.859375, + 1.6171875, + 3.484375, + 0.265625, + -0.74609375, + -4.28125, + 1.734375, + -0.287109375, + -5.40625, + -1.3984375, + 2.65625, + 1.3984375, + -0.365234375, + 0.5234375, + -0.0274658203125, + -0.1025390625, + 2.59375, + -0.2333984375, + -1.1328125, + -1.640625, + 1.4140625, + 1.328125, + 0.0, + -3.078125, + -0.63671875, + -2.875, + 2.3125, + 1.5625, + -3.25, + 2.046875, + -0.0791015625, + 2.828125, + -5.15625, + -5.4375, + -2.359375, + -1.78125, + 1.25, + -0.86328125, + 1.125, + 3.0625, + -0.43359375, + -0.10009765625, + 1.8125, + 0.271484375, + -0.875, + 1.015625, + -1.6171875, + 0.2373046875, + 1.1796875, + -4.65625, + 1.359375, + -1.1171875, + 0.52734375, + -0.9296875, + -2.71875, + 2.78125, + -1.6015625, + -1.4609375, + 0.98828125, + -2.25, + -3.59375, + 0.251953125, + -3.296875, + -1.8359375, + 2.515625, + 0.10693359375, + 3.8125, + 3.0625, + -3.75, + 0.92578125, + -1.484375, + 2.1875, + 2.09375, + -4.4375, + 2.34375, + -1.7890625, + -2.140625, + 1.4609375, + -1.3125, + 0.2275390625, + -3.109375, + -1.15625, + 3.203125, + 1.3046875, + -0.453125, + -1.3671875, + -2.75, + -4.4375, + 0.0169677734375, + -1.234375, + -2.15625, + 1.96875, + 1.8671875, + 0.9921875, + 1.8984375, + 0.984375, + -2.265625, + 0.07958984375, + 1.5625, + 2.40625, + -1.3125, + -0.83984375, + 0.9375, + 2.859375, + 0.609375, + -0.2060546875, + -1.640625, + 0.24609375, + 5.1875, + 1.0546875, + -2.25, + -0.1943359375, + -2.6875, + -0.1416015625, + 2.234375, + -1.1875, + 0.90234375, + -2.0, + -2.125, + 3.25, + 0.130859375, + -0.89453125, + -2.421875, + -0.6875, + -6.0625, + 0.333984375, + 0.1787109375, + -2.109375, + 2.28125, + 2.375, + 1.0859375, + -0.7109375, + -2.0625, + -1.7265625, + 0.0250244140625, + -1.8203125, + 1.765625, + 1.5390625, + 2.6875, + 3.796875, + 1.9921875, + 1.6640625, + -1.3203125, + 0.5078125, + -1.4140625, + 1.0078125, + 2.75, + 0.6953125, + 1.2265625, + -0.6171875, + -1.7890625, + 2.5, + -1.359375, + -2.015625, + -0.36328125, + 0.0361328125, + 2.4375, + 1.375, + -1.8671875, + -8.4375, + 1.5859375, + 0.52734375, + -0.7109375, + 0.447265625, + -2.34375, + -2.078125, + -0.4453125, + -1.5625, + 0.6875, + -2.65625, + -0.36328125, + 0.017333984375, + -0.59765625, + -0.2412109375, + 0.0, + 0.91796875, + 3.296875, + 1.6171875, + -0.7890625, + -0.76171875, + -1.3046875, + 1.7578125, + -0.5703125, + 0.75, + -2.03125, + -2.078125, + -0.3359375, + 2.15625, + 0.27734375, + -2.25, + -0.25, + -0.8671875, + 3.53125, + 1.265625, + -0.86328125, + 0.69921875, + 4.0625, + -0.400390625, + 1.0859375, + -1.8203125, + 3.703125, + 2.5625, + -0.8671875, + 0.65234375, + -0.67578125, + -0.240234375, + 2.578125, + 2.125, + 1.25, + 1.15625, + -0.45703125, + 0.333984375, + -0.416015625, + 1.5, + 3.640625, + 0.13671875, + 1.7421875, + -0.7734375, + 1.484375, + -1.6171875, + 0.6328125, + -3.390625, + -2.078125, + 2.171875, + 1.96875, + -1.2421875, + 1.53125, + 4.625, + 1.421875, + -1.609375, + 0.1845703125, + -0.84375, + -2.109375, + 0.90234375, + 3.78125, + -1.1171875, + 0.875, + -0.057861328125, + -1.171875, + 1.0078125, + 1.71875, + 4.75, + -0.98046875, + -0.828125, + -1.1640625, + 1.734375, + -0.09130859375, + 2.734375, + 0.033935546875, + 0.90625, + 0.4296875, + 0.62890625, + 0.70703125, + 2.125, + 7.375, + 1.5078125, + -6.5, + -0.828125, + 1.3515625, + 0.34375, + -0.796875, + 2.078125, + 0.0615234375, + 0.859375, + 1.53125, + -0.01007080078125, + 3.546875, + -0.431640625, + 2.921875, + -3.59375, + 2.390625, + -0.953125, + 2.125, + 0.224609375, + -1.25, + -0.1533203125, + 1.53125, + 2.265625, + 0.51171875, + 1.8125, + -3.28125, + -1.484375, + -4.34375, + 3.03125, + -0.076171875, + -0.87890625, + -0.06982421875, + -0.28125, + -0.2060546875, + -0.447265625, + -1.0078125, + 1.59375, + 0.373046875, + 0.859375, + 0.1513671875, + 3.21875, + -0.60546875, + -1.7109375, + -1.6015625, + 1.921875, + 0.51171875, + -7.34375, + 2.75, + -0.8125, + 1.4765625, + -0.515625, + 1.90625, + -0.486328125, + -0.0172119140625, + -1.453125, + 2.515625, + 0.50390625, + -1.9140625, + -1.0859375, + 0.4375, + -1.6484375, + 2.578125, + 1.4921875, + 0.11572265625, + 1.375, + 0.5546875, + -2.65625, + 0.01171875, + -0.62109375, + -0.244140625, + -3.546875, + 4.78125, + 2.375, + 1.453125, + -3.640625, + 1.4140625, + 3.328125, + 1.0703125, + -3.1875, + 3.0625, + -2.90625, + 3.59375, + -0.2333984375, + -2.515625, + -1.3828125, + 1.28125, + 0.75, + 1.0078125, + 0.640625, + 1.1875, + 0.6796875, + 0.39453125, + 2.34375, + -0.55078125, + -4.03125, + -2.890625, + -2.21875, + 0.66015625, + 0.90625, + 1.8828125, + 2.65625, + 1.2890625, + 0.043212890625, + -0.51953125, + -1.5078125, + -1.8671875, + -0.01422119140625, + -2.015625, + -0.6484375, + -0.66015625, + 0.359375, + 5.09375, + 2.609375, + -0.1796875, + -0.474609375, + 0.6875, + 3.46875, + -4.71875, + 1.5390625, + 0.314453125, + 0.0986328125, + 0.6328125, + 1.7265625, + -2.90625, + -3.84375, + 0.53125, + 1.171875, + 0.625, + 1.546875, + -2.265625, + -5.125, + 0.107421875, + 0.349609375, + 1.65625, + 3.375, + 1.53125, + 0.5859375, + 1.3671875, + 1.9765625, + -4.46875, + 1.71875, + 2.15625, + 2.796875, + -0.045654296875, + 0.93359375, + 0.60546875, + -0.15234375, + -4.09375, + -1.625, + 0.67578125, + -1.1796875, + 3.375, + -0.70703125, + -0.33984375, + 0.78125, + -1.421875, + 3.71875, + 2.90625, + -0.66796875, + -3.515625, + -1.4375, + -1.171875, + -1.6484375, + 2.390625, + 0.9296875, + -0.5703125, + -4.59375, + 0.177734375, + 1.9296875, + -4.6875, + -1.5546875, + -4.375, + -3.15625, + -0.130859375, + 0.345703125, + -4.375, + -0.98828125, + -1.4296875, + -0.62109375, + 2.015625, + 3.390625, + -2.0625, + 0.11962890625, + -0.361328125, + -1.6640625, + 2.0, + -0.73828125, + 0.2255859375, + 2.171875, + -3.203125, + 1.6484375, + 2.453125, + -0.1826171875, + -1.671875, + 5.1875, + -3.59375, + -1.609375, + 0.12890625, + 1.0, + -0.97265625, + -2.125, + -2.671875, + -4.5625, + 0.0830078125, + -1.5, + 2.09375, + -1.1875, + 0.357421875, + -0.67578125, + 1.0390625, + 0.439453125, + 2.15625, + -5.5, + 0.9140625, + -0.181640625, + -2.703125, + 3.046875, + 1.4375, + -0.30078125, + -3.765625, + -4.5, + -0.703125, + -1.078125, + -1.3515625, + -0.57421875, + -1.0859375, + -0.578125, + -4.75, + 1.3125, + 1.09375, + -1.3203125, + -1.109375, + 1.046875, + -1.1796875, + 0.6640625, + 1.59375, + 2.28125, + -0.1875, + 0.78125, + -0.400390625, + -2.125, + -1.640625, + -0.1171875, + 2.078125, + -0.5546875, + -0.251953125, + 2.84375, + -2.75, + -3.890625, + -2.875, + -4.34375, + -3.109375, + -0.400390625, + 3.03125, + -4.78125, + 1.5859375, + -0.8046875, + 1.9921875, + -1.3203125, + -0.5234375, + -2.96875, + 1.2734375, + -1.875, + -0.146484375, + -0.8125, + -0.6328125, + -1.53125, + -3.25, + 2.609375, + 2.859375, + -0.3515625, + -4.03125, + 0.478515625, + 1.140625, + 1.9140625, + -2.625, + -2.828125, + 2.34375, + -1.0234375, + 2.46875, + -4.5, + -0.423828125, + 1.6015625, + 0.0, + -1.921875, + 2.203125, + 1.6796875, + -0.4609375, + -0.21875, + 1.5390625, + 1.9609375, + -3.34375, + -1.1796875, + 2.296875, + 1.6171875, + 1.2109375, + -1.578125, + 1.71875, + -1.109375, + 0.07275390625, + -1.359375, + -0.412109375, + -1.234375, + -0.03515625, + -1.1640625, + 2.640625, + -2.90625, + -0.2734375, + 1.2734375, + -1.9375, + -0.27734375, + -1.5859375, + 2.125, + 0.75, + 0.248046875, + -0.640625, + 0.5546875, + -0.8359375, + 0.32421875, + -0.1875, + 0.20703125, + -3.203125, + 1.140625, + -1.3125, + 3.265625, + -2.71875, + -0.66015625, + 0.87109375, + -0.78125, + -1.015625, + -0.244140625, + -0.248046875, + 2.234375, + -0.87109375, + 3.15625, + -2.125, + -4.34375, + -1.2109375, + -2.515625, + 0.58984375, + -0.921875, + -2.53125, + 0.88671875, + 1.9765625, + -0.6015625, + -1.2890625, + 2.21875, + 2.75, + -2.046875, + 2.25, + 1.625, + -1.2265625, + -0.341796875, + -1.8359375, + -2.140625, + 1.4765625, + -0.123046875, + 0.62109375, + -0.1416015625, + 1.5, + -0.18359375, + -1.0234375, + -2.078125, + 4.78125, + -1.8984375, + -0.1591796875, + -0.064453125, + 2.015625, + -3.15625, + -0.01116943359375, + -0.53515625, + 1.8046875, + -0.345703125, + 0.86328125, + -0.2236328125, + -2.328125, + 4.5625, + -1.2265625, + 0.11376953125, + -1.2578125, + 1.1328125, + 0.5703125, + 2.21875, + 4.15625, + -3.203125, + -0.111328125, + -1.6796875, + -4.65625, + -0.1240234375, + -0.306640625, + -1.5234375, + -0.322265625, + -1.4921875, + 1.34375, + -0.80078125, + -0.30859375, + 1.390625, + -1.7109375, + -2.9375, + 3.640625, + 0.83984375, + -1.078125, + 0.45703125, + -0.2158203125, + 0.92578125, + 1.7734375, + -0.9453125, + -13.5625, + -1.4140625, + 4.1875, + -1.390625, + 0.193359375, + -1.7265625, + -0.6640625, + 0.93359375, + 0.92578125, + -0.57421875, + -6.03125, + -1.1953125, + 0.283203125, + 0.404296875, + 0.33984375, + -0.298828125, + 1.3828125, + 0.00921630859375, + 1.4453125, + -0.6875, + 0.6484375, + -0.408203125, + 2.015625, + 0.58984375, + -1.96875, + 2.453125, + -0.29296875, + -1.6953125, + -1.4140625, + -2.34375, + -2.09375, + 1.640625, + 2.203125, + -0.7265625, + 1.2578125, + -3.484375, + -1.171875, + 2.1875, + 0.89453125, + 2.6875, + 1.171875, + 2.546875, + 0.9375, + 5.125, + -0.330078125, + 2.21875, + 0.2216796875, + -0.796875, + 2.46875, + -0.2109375, + 2.5, + 1.96875, + 1.265625, + -0.060791015625, + -1.546875, + 0.3359375, + -2.46875, + -0.451171875, + -0.29296875, + 2.515625, + 0.404296875, + -2.875, + -0.0986328125, + -0.369140625, + 5.71875, + 1.515625, + -1.578125, + -1.4140625, + -2.296875, + -0.08203125, + 0.5859375, + -0.26171875, + 0.5703125, + -0.7890625, + 2.1875, + 0.97265625, + 1.4609375, + -1.5546875, + 1.0, + 0.173828125, + 1.3359375, + 0.49609375, + 1.6171875, + 4.875, + 0.2412109375, + 3.59375, + -0.240234375, + 1.2265625, + -0.703125, + -0.5, + -0.5546875, + 3.921875, + -0.9609375, + -1.7890625, + -2.375, + -2.421875, + -2.5625, + 0.140625, + -2.921875, + -1.0, + -1.4140625, + 0.00579833984375, + -0.1767578125, + 1.5703125, + -1.1875, + -2.15625, + -3.4375, + -0.296875, + -2.046875, + 3.0625, + -1.3203125, + -0.06640625, + 0.9140625, + 2.0625, + -1.7421875, + -1.96875, + 3.953125, + 2.375, + 0.1708984375, + 2.8125, + -2.25, + 4.3125, + 0.6953125, + 0.8515625, + 0.9765625, + 2.265625, + 3.8125, + -1.875, + 1.453125, + -0.287109375, + 0.1923828125, + -1.921875, + -3.84375, + -1.265625, + -1.3359375, + 0.49609375, + -0.263671875, + -0.8203125, + 3.890625, + -0.0068359375, + -1.109375, + -1.78125, + 0.279296875, + -1.1171875, + 1.84375, + 0.52734375, + -0.6171875, + -2.515625, + -1.1796875, + -0.03515625, + 1.7578125, + -1.421875, + -0.06494140625, + -1.0703125, + -0.765625, + 1.4921875, + 0.796875, + 0.2578125, + 1.6640625, + 8.875, + -0.51171875, + -2.515625, + -0.9453125, + -0.546875, + -1.4296875, + -2.03125, + 1.0390625, + -0.341796875, + 1.84375, + -0.177734375, + 0.439453125, + 1.7109375, + -6.3125, + -2.59375, + 0.49609375, + 2.375, + 2.765625, + 0.80078125, + -1.6015625, + -1.3984375, + 1.5078125, + 1.7578125, + -0.24609375, + -0.203125, + -1.3046875, + -2.359375, + 0.318359375, + 0.734375, + 1.640625, + -1.390625, + 1.8359375, + 1.65625, + 4.125, + 0.6015625, + 1.546875, + -0.515625, + -1.6953125, + 0.9609375, + 1.578125, + 1.1796875, + 0.359375, + 0.41015625, + -1.078125, + -1.6484375, + 1.25, + -0.21484375, + 0.55078125, + -1.3984375, + -5.59375, + 2.09375, + 0.423828125, + 1.0, + 2.5625, + -1.7109375, + 0.466796875, + -0.21875, + -1.296875, + -3.421875 + ], + "index": 0, + "object": "embedding", + "raw_output": null + }, + { + "embedding": [ + 7.03125, + 1.5625, + -2.046875, + -0.39453125, + 7.40625, + 0.8046875, + -1.2265625, + -2.265625, + 4.78125, + -2.765625, + -5.65625, + 4.53125, + 1.3203125, + -2.234375, + 7.03125, + 1.078125, + -0.337890625, + -6.4375, + -0.7734375, + 0.90234375, + -4.15625, + 7.3125, + 2.28125, + 2.78125, + -2.90625, + 0.326171875, + 2.96875, + -0.0142822265625, + -1.0390625, + 2.015625, + -1.015625, + 2.296875, + 3.6875, + 1.84375, + 0.66796875, + 1.8828125, + 1.5625, + 0.546875, + -0.15234375, + -0.50390625, + 5.03125, + -2.28125, + -0.51953125, + 2.9375, + 1.2421875, + -0.419921875, + -1.609375, + -0.0291748046875, + 1.5859375, + 0.984375, + 0.64453125, + -1.4375, + 0.62109375, + -2.71875, + 0.53125, + 1.1015625, + -0.478515625, + -0.27734375, + -2.265625, + 1.078125, + -0.3515625, + -1.078125, + 0.8828125, + -2.5, + 0.71484375, + 3.0625, + 0.35546875, + -0.44921875, + -0.6875, + -2.78125, + 3.171875, + -2.515625, + -1.484375, + 0.48828125, + -1.2578125, + -4.28125, + -2.0625, + 0.0172119140625, + 3.4375, + 1.4609375, + 1.1015625, + -1.4296875, + -1.9609375, + 0.00482177734375, + 1.328125, + -4.53125, + 0.392578125, + -0.88671875, + -1.6640625, + 0.703125, + -1.296875, + 2.609375, + -0.13671875, + 2.21875, + -1.40625, + -9.25, + 0.5703125, + 1.25, + -0.359375, + -0.11376953125, + 3.4375, + 1.90625, + -2.09375, + 0.30078125, + 0.06884765625, + 0.486328125, + 0.70703125, + 0.349609375, + 1.3359375, + -0.322265625, + 0.5234375, + 4.59375, + 2.0, + 1.59375, + 1.7109375, + 0.080078125, + 0.72265625, + -1.390625, + 1.0078125, + 3.34375, + 5.09375, + -3.0625, + -1.078125, + 1.0703125, + -0.84765625, + -3.703125, + 0.9296875, + -1.421875, + 0.1767578125, + -0.75, + 0.5703125, + 2.0625, + 3.25, + -1.1171875, + -1.828125, + -3.9375, + 1.8125, + 0.91796875, + -3.921875, + -0.69140625, + -0.6484375, + -1.5234375, + 0.953125, + 0.97265625, + -2.015625, + -0.051025390625, + 1.640625, + 0.80078125, + -0.37890625, + 0.546875, + 0.88671875, + 0.76171875, + 1.3515625, + -0.5625, + -1.0078125, + 1.0078125, + -0.5703125, + 0.84765625, + -2.21875, + 2.296875, + -3.5, + 0.37890625, + -0.0093994140625, + 0.271484375, + 2.515625, + -0.83984375, + 1.4609375, + 0.416015625, + 3.328125, + -2.046875, + -1.5, + 0.392578125, + -1.84375, + 6.0625, + 0.55078125, + -0.71875, + 1.1328125, + -1.40625, + 0.53125, + -1.6796875, + 1.1015625, + 2.359375, + -3.09375, + -1.734375, + -0.055419921875, + 1.453125, + 0.4140625, + 0.291015625, + -2.875, + -1.328125, + -0.0118408203125, + 0.71484375, + -1.796875, + 0.11181640625, + -0.330078125, + -0.265625, + 0.1455078125, + -1.0078125, + 1.328125, + 4.125, + 3.6875, + -6.09375, + 0.462890625, + 3.46875, + -0.345703125, + 0.74609375, + 0.1015625, + 2.234375, + -1.0546875, + -2.484375, + 2.828125, + -1.0078125, + -0.51953125, + -0.953125, + 0.52734375, + -2.25, + 3.578125, + 3.765625, + 0.828125, + 1.6875, + -1.03125, + 0.98828125, + -2.921875, + 1.40625, + -0.78515625, + -1.0078125, + -0.005828857421875, + -2.75, + -0.06591796875, + -0.353515625, + 4.75, + -1.375, + -1.3515625, + -0.55859375, + 6.59375, + -1.421875, + 0.9453125, + -2.28125, + -0.32421875, + -0.318359375, + 1.34375, + -3.171875, + -0.80078125, + 2.859375, + 0.5390625, + -3.75, + 1.46875, + -1.578125, + -0.478515625, + 0.59765625, + -1.4609375, + -4.40625, + 2.6875, + -1.4609375, + -2.828125, + 1.53125, + -3.046875, + 1.2265625, + -1.140625, + 2.015625, + 1.796875, + 1.3828125, + -0.470703125, + -2.40625, + 3.671875, + -2.828125, + -0.8203125, + -3.109375, + -0.431640625, + 0.953125, + 1.1171875, + -3.453125, + -2.75, + 2.328125, + -2.046875, + 1.6015625, + 3.484375, + -2.015625, + 0.98828125, + -1.703125, + -2.734375, + -0.0810546875, + 0.3125, + -1.96875, + 2.703125, + 1.40625, + -1.578125, + 0.4921875, + -0.546875, + -0.8203125, + 0.8828125, + -1.484375, + -0.259765625, + -2.140625, + 1.1875, + 0.236328125, + -1.234375, + -1.7265625, + 2.046875, + 3.90625, + -1.734375, + 0.6015625, + 0.8671875, + -2.921875, + -0.7109375, + 4.125, + 1.9296875, + -1.0, + 2.390625, + -2.515625, + 1.21875, + 0.279296875, + -2.640625, + 0.435546875, + 0.7421875, + -2.921875, + 1.6171875, + -1.3125, + -0.75, + 0.9296875, + 0.53515625, + 0.4140625, + 0.1982421875, + -0.68359375, + -0.7109375, + -1.484375, + 0.0966796875, + -0.8984375, + 2.078125, + -1.609375, + -3.171875, + -4.71875, + -0.12255859375, + 2.703125, + -0.1708984375, + 1.4921875, + 1.34375, + -1.5625, + 4.78125, + 0.7734375, + 1.3046875, + 1.6875, + 1.2109375, + 0.859375, + 0.2255859375, + -4.59375, + -0.4453125, + 3.046875, + -4.375, + 0.5625, + -2.296875, + -1.0703125, + -0.7734375, + -2.984375, + -0.65625, + -3.015625, + -0.79296875, + -0.2001953125, + -0.2119140625, + -0.98046875, + 0.10693359375, + 1.40625, + -1.7421875, + -1.5625, + -1.46875, + 2.53125, + 0.79296875, + 0.29296875, + -2.09375, + -0.75390625, + -0.2265625, + 0.55078125, + 4.90625, + -1.53125, + 1.375, + 0.98828125, + -2.1875, + -2.15625, + 2.328125, + -0.296875, + 2.609375, + -7.46875, + -0.076171875, + 0.546875, + -0.1123046875, + -0.07666015625, + 0.369140625, + -3.46875, + -1.3984375, + -1.9921875, + -1.2421875, + -3.625, + 1.1484375, + 3.078125, + 0.1611328125, + 0.041015625, + -0.259765625, + -1.5078125, + 0.984375, + -0.8828125, + -1.1015625, + -1.265625, + -0.08935546875, + -0.2060546875, + -1.2421875, + 1.6953125, + 2.765625, + 2.109375, + -0.048095703125, + 2.4375, + -0.515625, + 0.310546875, + 2.4375, + -3.96875, + 2.75, + -1.0703125, + 3.875, + 2.953125, + -1.5078125, + 1.4453125, + -0.73828125, + -1.71875, + -0.03759765625, + 1.4765625, + -3.71875, + 2.484375, + 0.58984375, + 0.1962890625, + -2.46875, + -1.1484375, + 0.94140625, + 0.28125, + -4.28125, + 2.046875, + 0.86328125, + 6.21875, + 0.890625, + 0.5859375, + -0.9609375, + 0.88671875, + -0.333984375, + -0.1103515625, + -1.890625, + -6.21875, + -1.9296875, + -2.953125, + 0.166015625, + -2.375, + -0.78125, + 0.53515625, + 0.34765625, + 0.259765625, + -0.462890625, + 0.390625, + -2.375, + -0.76171875, + -1.84375, + 3.171875, + -0.578125, + 1.703125, + 2.390625, + -1.921875, + 0.796875, + 1.28125, + 2.1875, + 2.53125, + 2.4375, + 1.3359375, + -2.15625, + 0.09033203125, + 1.7265625, + -0.72265625, + -4.0625, + -0.98046875, + -0.8203125, + 2.5, + -0.470703125, + -1.53125, + 2.421875, + -5.0625, + -0.0810546875, + -1.203125, + -2.734375, + 0.8359375, + 1.234375, + 0.515625, + -1.21875, + -3.8125, + 1.9375, + -0.5, + 0.205078125, + 0.244140625, + 2.03125, + -0.52734375, + 0.734375, + -1.6640625, + 0.765625, + 0.6328125, + 2.234375, + -1.3046875, + -0.53515625, + 0.88671875, + -1.546875, + 0.169921875, + 1.625, + 0.400390625, + 1.21875, + 0.53125, + -0.72265625, + -1.5625, + 5.4375, + -4.3125, + -3.359375, + -2.359375, + 1.484375, + 0.5859375, + 0.45703125, + -0.09228515625, + 1.1953125, + 0.50390625, + 0.042236328125, + 0.59765625, + 1.671875, + 0.92578125, + -2.234375, + 2.6875, + -0.140625, + -1.46875, + 1.46875, + 2.8125, + -0.359375, + -0.2236328125, + 1.21875, + -1.8203125, + 0.50390625, + 0.77734375, + -0.828125, + -0.4765625, + 3.03125, + 0.01129150390625, + -0.462890625, + 1.84375, + -2.359375, + -0.0673828125, + -2.46875, + -1.3515625, + -1.4921875, + -0.0272216796875, + -1.828125, + -2.640625, + 1.9765625, + 3.28125, + 4.8125, + 3.1875, + -1.203125, + -1.8125, + 1.7421875, + -0.8046875, + -1.8671875, + -0.392578125, + -0.78515625, + 1.453125, + 0.0556640625, + -0.5546875, + -0.408203125, + 1.359375, + -0.828125, + 0.0213623046875, + -0.0115966796875, + -1.390625, + 1.375, + 1.25, + -1.3203125, + -1.2265625, + -0.58984375, + -1.3671875, + -3.0625, + -1.78125, + -3.34375, + -2.375, + 0.5234375, + 0.73046875, + -1.671875, + -0.578125, + -0.74609375, + 1.3515625, + 1.4375, + -1.0234375, + 1.125, + -4.25, + 1.296875, + 1.546875, + 0.45703125, + 0.59375, + -0.09423828125, + -1.5, + 1.296875, + 2.890625, + -1.203125, + 2.34375, + -0.62890625, + -4.21875, + 0.5234375, + -2.84375, + -3.0625, + 0.52734375, + -2.21875, + 2.078125, + 2.921875, + 2.0625, + 1.1640625, + 3.640625, + -4.5, + -2.578125, + -0.6796875, + 1.234375, + 2.09375, + -3.28125, + 3.15625, + -1.984375, + 1.328125, + -3.40625, + -0.265625, + 2.40625, + 2.515625, + -0.953125, + -0.419921875, + 3.78125, + -1.2734375, + -1.65625, + -1.40625, + 2.59375, + 0.392578125, + 2.625, + -2.1875, + -0.058349609375, + -1.4609375, + -1.390625, + 0.302734375, + 2.03125, + -0.6015625, + 4.65625, + -2.90625, + -0.8125, + -1.3828125, + 0.349609375, + 1.9140625, + 2.296875, + -1.0703125, + 1.140625, + 0.02294921875, + -1.515625, + -1.234375, + 0.34375, + -0.1845703125, + 1.15625, + 0.11181640625, + -0.10888671875, + -3.03125, + 0.12353515625, + -2.96875, + 0.2490234375, + 2.15625, + -0.59375, + 3.046875, + 3.765625, + -0.1376953125, + -0.1259765625, + -0.3671875, + -0.6484375, + -2.796875, + -2.453125, + -0.330078125, + -3.6875, + 3.390625, + -0.408203125, + 1.9921875, + 2.5, + 0.1904296875, + -1.1015625, + 4.53125, + 0.1884765625, + 0.01373291015625, + 0.75, + 0.43359375, + -4.0, + 1.3984375, + -2.34375, + 0.98046875, + -2.296875, + -1.515625, + -2.609375, + -2.328125, + -0.23046875, + -0.6640625, + 0.671875, + -2.296875, + -4.40625, + 0.86328125, + 2.21875, + 4.5625, + 0.69140625, + 0.6171875, + -1.421875, + -2.6875, + -2.484375, + 1.4453125, + -1.6015625, + 3.265625, + -1.59375, + -3.71875, + -1.578125, + -0.69140625, + -1.890625, + 1.234375, + -0.10986328125, + 0.279296875, + -0.30078125, + -4.625, + -0.609375, + 2.625, + 2.53125, + -1.3046875, + -0.46484375, + -0.1572265625, + -0.8046875, + 0.06787109375, + 1.453125, + 3.75, + 2.296875, + -3.078125, + -1.46875, + 1.8203125, + 0.216796875, + -2.640625, + 0.302734375, + -5.8125, + -0.6328125, + -0.58984375, + -1.546875, + -0.875, + -4.375, + -2.03125, + 0.90625, + -0.96875, + -4.9375, + -0.412109375, + -1.0703125, + 0.123046875, + -0.92578125, + 3.75, + 0.09375, + -0.8203125, + -1.234375, + 1.765625, + 1.90625, + -0.1142578125, + -0.43359375, + 0.1181640625, + 0.546875, + 1.4765625, + 1.890625, + -0.2197265625, + -2.9375, + 2.671875, + 1.1875, + 0.390625, + 1.171875, + 0.75390625, + -3.625, + 0.025390625, + -3.515625, + 0.5546875, + -0.5, + 1.8203125, + 1.515625, + 2.4375, + 1.953125, + 1.0390625, + -1.328125, + -1.875, + -1.5390625, + -0.10546875, + -0.84765625, + 0.515625, + 6.1875, + -1.8046875, + 0.435546875, + 3.8125, + 2.984375, + 1.8359375, + -2.46875, + 2.453125, + 3.171875, + 2.5625, + -1.15625, + 1.2265625, + 0.404296875, + -2.328125, + 5.21875, + 1.9453125, + 0.88671875, + -0.69140625, + -0.8515625, + 2.578125, + -0.7890625, + 0.302734375, + -0.54296875, + 1.4921875, + 3.078125, + -2.78125, + 0.10205078125, + -0.1435546875, + 1.734375, + -0.369140625, + -1.6171875, + -1.9609375, + -1.53125, + -0.2060546875, + 0.67578125, + -1.03125, + -4.5625, + 2.25, + 3.28125, + -2.75, + -0.8671875, + -2.46875, + 0.9296875, + -0.04736328125, + -2.84375, + 1.484375, + 0.40625, + -3.734375, + 2.0, + -1.046875, + -0.08056640625, + 5.1875, + 1.2421875, + -2.578125, + 1.6328125, + -1.65625, + 2.640625, + 0.09912109375, + 0.0458984375, + -2.84375, + -0.5859375, + -0.11474609375, + -0.5234375, + -1.484375, + 0.330078125, + 0.734375, + -3.578125, + -1.1796875, + 4.3125, + 0.92578125, + -1.1640625, + 0.416015625, + -0.040283203125, + 1.390625, + -2.734375, + -1.671875, + 0.61328125, + 2.203125, + 0.6171875, + -0.2890625, + -0.06689453125, + -0.32421875, + 2.984375, + 0.89453125, + -1.6015625, + 2.296875, + -0.310546875, + -0.1572265625, + -2.84375, + 3.15625, + 0.64453125, + 1.1484375, + -0.19921875, + -0.734375, + -1.3359375, + 3.109375, + -0.38671875, + 1.3984375, + 1.9140625, + 1.171875, + 1.21875, + 0.5234375, + 3.25, + -0.08642578125, + -0.734375, + -0.287109375, + -2.34375, + -0.51171875, + 0.23828125, + -0.54296875, + -2.0, + 0.90234375, + 1.5, + 2.0, + -0.2294921875, + 0.6484375, + -3.6875, + -3.796875, + -0.39453125, + 1.1640625, + 2.8125, + -0.9140625, + 0.9609375, + 1.1015625, + 3.484375, + -1.0234375, + 0.875, + 3.5, + 0.0206298828125, + -1.53125, + 3.71875, + 0.55859375, + -0.236328125, + -2.71875, + 3.109375, + -1.9296875, + -0.361328125, + -1.421875, + 2.28125, + 1.765625, + 3.609375, + 0.07421875, + -0.98046875, + 1.453125, + -0.78515625, + -2.859375, + -1.5078125, + 0.039794921875, + 0.376953125, + -0.87109375, + 2.640625, + 0.2578125, + -0.828125, + 3.078125, + -0.71484375, + 2.703125, + -1.8125, + 1.4140625, + -0.71484375, + -1.6015625, + -2.453125, + -0.3671875, + 0.63671875, + -2.890625, + 1.328125, + -1.390625, + -3.328125, + 5.125, + -0.419921875, + 0.2080078125, + -0.3671875, + 0.33984375, + 0.87109375, + -3.140625, + -1.265625, + -1.078125, + -2.125, + 2.96875, + 1.8984375, + -1.6171875, + -0.66796875, + 2.015625, + 0.1826171875, + -5.5, + 1.8984375, + -0.4921875, + -1.4296875, + -3.21875, + -0.326171875, + 2.21875, + 1.9453125, + 4.8125, + -2.828125, + 0.87890625, + -1.0546875, + -0.875, + 0.640625, + -0.8046875, + -0.54296875, + 1.703125, + 2.03125, + 0.80859375, + -0.63671875, + -4.125, + 12.125, + 0.058837890625, + 1.5390625, + 4.78125, + -0.48046875, + 2.0625, + -2.953125, + 0.330078125, + -0.263671875, + -0.10400390625, + -1.203125, + 0.018310546875, + 0.197265625, + 0.29296875, + -4.46875, + 3.03125, + 1.2890625, + -0.796875, + 3.78125, + 2.53125, + 0.51953125, + -2.109375, + -0.671875, + 0.55078125, + 0.5703125, + -3.328125, + 0.78125, + -1.984375, + 0.11572265625, + -3.625, + -1.875, + 1.8515625, + -1.8671875, + -2.53125, + 3.40625, + 1.6640625, + -0.7578125, + -0.53125, + -4.09375, + 2.265625, + -0.6484375, + 2.40625, + -3.15625, + -0.78515625, + 1.078125, + 0.6484375, + 0.3125, + 0.3671875, + -0.51953125, + -3.109375, + -2.0, + -0.3515625, + -2.359375, + 1.109375, + 0.8828125, + -1.484375, + -3.0, + -0.000499725341796875, + 3.15625, + -1.4453125, + 0.50390625, + -0.55859375, + -2.875, + -1.0546875, + 1.984375, + 0.1201171875, + 0.9921875, + -3.21875, + 0.333984375, + -2.84375, + 0.2294921875, + 0.953125, + -0.1884765625, + 2.03125, + 0.451171875, + -0.92578125, + 1.7734375, + -0.03857421875, + -0.36328125, + 0.55859375, + -0.6953125, + 0.6875, + 0.11669921875, + -1.6796875, + -0.7109375, + 2.796875, + 3.71875, + -1.7734375, + -1.7890625, + -0.81640625, + -0.6953125, + 0.09765625, + -0.67578125, + 3.796875, + -1.09375, + 1.578125, + 3.1875, + 2.171875, + 0.58203125, + -0.80078125, + -0.9765625, + -1.59375, + 0.95703125, + -0.294921875, + 0.353515625, + -2.15625, + -1.3515625, + -4.21875, + -0.97265625, + 2.546875, + 4.53125, + 2.03125, + -1.671875, + 0.302734375, + -2.421875, + 0.4765625, + 1.0234375, + 2.046875, + 0.6875, + -0.53515625, + -0.65625, + -5.03125, + 1.0625, + 2.140625, + -4.65625, + -9.75, + -1.890625, + 1.2578125, + 1.078125, + -0.1123046875, + -1.3515625, + -4.0625, + -2.015625, + -0.251953125, + -2.53125, + -0.89453125, + 2.0625, + 0.69921875, + -0.0029296875, + 0.72265625, + -7.5, + -0.00860595703125, + 0.0185546875, + 0.25, + 0.0986328125, + -1.8671875, + -0.1484375, + 1.3125, + -0.40234375, + 3.015625, + -2.421875, + -2.578125, + 1.875, + 0.41796875, + -1.90625, + -0.953125, + -0.72265625, + -1.671875, + -1.0703125, + -0.4140625, + -0.9921875, + -1.375, + 1.6015625, + 0.59765625, + 2.296875, + 2.03125, + 1.140625, + 2.109375, + -1.9921875, + -1.9765625, + 0.83984375, + -2.96875, + 1.78125, + 2.375, + 0.60546875, + -0.05126953125, + 1.5859375, + -2.234375, + 0.07470703125, + -0.447265625, + -0.3515625, + 1.875, + -0.177734375, + 0.71484375, + 2.53125, + 1.8828125, + 3.015625, + 2.328125, + -0.38671875, + -1.8515625, + -0.8359375, + -2.75, + -3.171875, + 0.03173828125, + -0.90625, + 2.421875, + -1.3359375, + -3.109375, + -0.0869140625, + 2.859375, + -1.6640625, + -0.34375, + 0.039306640625, + 0.29296875, + 1.78125, + 1.4453125, + -0.345703125, + -2.25, + -2.640625, + 2.921875, + 1.4375, + -0.2734375, + 2.25, + -1.1953125, + 1.5, + -0.9453125, + 3.734375, + 5.59375, + -1.5859375, + -2.234375, + -3.921875, + -2.625, + 2.640625, + 0.1064453125, + -2.9375, + -0.94921875, + 1.109375, + -1.0390625, + 0.66796875, + -0.76953125, + 0.09765625, + -0.388671875, + 1.265625, + 5.375, + 0.58984375, + 3.265625, + -0.515625, + 2.203125, + 0.48828125, + 2.234375, + 2.859375, + -1.421875, + -0.474609375, + 3.34375, + -0.4765625, + 1.5234375, + -2.703125, + -0.5703125, + 0.30078125, + 1.765625, + -4.28125, + -2.125, + -2.8125, + 1.8046875, + 2.765625, + -0.4609375, + -2.625, + 1.8515625, + 1.8828125, + 1.5390625, + 0.83984375, + -0.82421875, + 1.8984375, + -1.1171875, + -0.0771484375, + -4.40625, + 1.0, + -1.8046875, + -2.578125, + -1.5859375, + -1.7421875, + 2.5625, + -2.015625, + -1.0859375, + 1.4375, + 0.3203125, + -0.6015625, + -0.41796875, + -0.453125, + 1.828125, + -0.87109375, + 4.40625, + 0.9296875, + 0.92578125, + 2.90625, + 1.0, + 3.328125, + 2.890625, + 1.6171875, + -1.3125, + -1.5859375, + 0.474609375, + 0.6640625, + 1.0625, + 2.890625, + 1.875, + -1.8828125, + 1.6875, + -0.80859375, + 1.1640625, + -0.6171875, + -1.125, + -1.4296875, + 1.03125, + 3.25, + 2.734375, + 2.5625, + -1.140625, + 1.4453125, + 1.6875, + -1.546875, + 2.75, + 1.578125, + -4.875, + 1.6875, + 0.375, + 0.625, + -2.078125, + 0.57421875, + -0.33984375, + -3.90625, + 2.796875, + -3.03125, + 0.60546875, + -0.82421875, + -1.625, + -0.31640625, + -1.4296875, + -1.0078125, + -3.921875, + -1.453125, + -1.34375, + 3.65625, + 1.2421875, + -3.546875, + 1.6875, + 1.3984375, + -1.3125, + 2.265625, + 0.734375, + -0.9140625, + -2.0, + -2.015625, + -1.5234375, + 2.09375, + -3.078125, + -0.404296875, + -0.0810546875, + -0.69921875, + -0.34375, + 0.87890625, + -2.78125, + -0.57421875, + -1.1171875, + 0.98828125, + -2.71875, + 0.96484375, + 1.390625, + -0.07275390625, + 4.0625, + 3.21875, + -1.7421875, + 1.734375, + -2.421875, + -1.9140625, + 2.53125, + 2.859375, + 1.0625, + -2.90625, + -1.9453125, + -0.322265625, + -2.59375, + -0.0296630859375, + 0.22265625, + -1.6328125, + 0.6015625, + -0.73046875, + 2.125, + 0.427734375, + -0.2470703125, + -1.46875, + -0.091796875, + -2.21875, + 1.5078125, + 0.71484375, + -0.71484375, + 0.703125, + 2.6875, + 2.359375, + -3.34375, + 1.0625, + 0.328125, + 2.328125, + 1.6171875, + -0.2431640625, + -1.171875, + -0.734375, + 1.578125, + 2.875, + 3.953125, + 0.6328125, + -1.7109375, + -1.1640625, + -0.5859375, + 0.30078125, + 1.9296875, + -1.890625, + 0.7890625, + -4.9375, + 0.099609375, + 0.1669921875, + 1.2890625, + -0.546875, + -1.15625, + 3.96875, + -2.84375, + -0.75, + 0.95703125, + -1.0703125, + -3.09375, + -0.75390625, + -1.703125, + 0.5625, + 1.171875, + -1.90625, + -0.9609375, + -0.1337890625, + 0.6015625, + -3.90625, + 1.96875, + -0.20703125, + -1.6484375, + -1.7578125, + 2.125, + -0.828125, + 1.5703125, + 1.0078125, + -1.265625, + -1.9296875, + -1.625, + 2.3125, + -0.306640625, + -0.83984375, + -1.5859375, + 2.375, + -2.296875, + 1.0703125, + 2.6875, + 1.09375, + 0.3359375, + -1.59375, + 0.65234375, + -0.365234375, + 2.203125, + -6.0, + -1.6484375, + -1.140625, + 0.1923828125, + 1.0078125, + 2.96875, + -3.75, + -3.25, + -1.46875, + -0.279296875, + -3.21875, + 4.5625, + 1.2421875, + 2.09375, + 0.515625, + 4.71875, + 2.71875, + -3.734375, + 2.59375, + 1.140625, + -1.8203125, + 0.322265625, + 0.9921875, + -1.6015625, + 0.72265625, + -0.045166015625, + -3.15625, + 4.25, + 0.96484375, + 1.0625, + -1.703125, + 3.078125, + 0.9609375, + -1.6015625, + 0.275390625, + -1.078125, + 0.130859375, + 0.58984375, + -2.734375, + -0.25390625, + 0.734375, + 1.15625, + -1.6953125, + 1.5546875, + -1.6015625, + -0.83203125, + 2.046875, + -2.234375, + -2.859375, + -0.7578125, + 0.2470703125, + 3.0625, + 2.046875, + 2.078125, + 0.478515625, + -2.046875, + -2.125, + -2.828125, + 1.25, + 0.8515625, + 0.08154296875, + -4.78125, + 0.78515625, + -0.51953125, + 0.0218505859375, + -0.349609375, + -0.1748046875, + -1.65625, + -1.8671875, + 1.2734375, + 4.46875, + 1.03125, + -1.8984375, + -0.76953125, + 2.0, + 0.16015625, + -0.078125, + 0.94921875, + -0.068359375, + 0.91796875, + -1.28125, + 3.484375, + -1.7265625, + -0.470703125, + -0.91796875, + 0.48828125, + -0.390625, + 1.4140625, + -3.125, + -1.9296875, + 2.765625, + -3.375, + -0.279296875, + 3.078125, + 1.3046875, + -1.0703125, + 0.12890625, + 1.7421875, + -1.40625, + -0.275390625, + 0.412109375, + -0.04052734375, + -2.359375, + 1.9921875, + -3.3125, + -0.022705078125, + -2.046875, + -0.7421875, + 2.765625, + 2.28125, + 1.453125, + 2.0625, + -0.84765625, + 2.84375, + -5.75, + 0.376953125, + -1.0234375, + 1.71875, + -1.9453125, + 0.12890625, + 0.244140625, + 1.734375, + 1.453125, + -2.15625, + 2.171875, + 1.03125, + -1.6875, + -1.2734375, + 0.388671875, + 2.046875, + 0.384765625, + 0.8984375, + 3.390625, + -1.5390625, + -0.88671875, + 0.09326171875, + 3.1875, + -0.7421875, + -0.83984375, + 2.40625, + 0.625, + -0.1240234375, + 1.875, + -1.515625, + -0.022705078125, + -0.01519775390625, + -0.62109375, + 0.546875, + 1.328125, + -3.3125, + -0.43359375, + 1.0625, + 2.1875, + -2.078125, + -1.46875, + 4.09375, + 0.23046875, + 4.3125, + 0.26171875, + -1.3125, + 0.875, + 0.29296875, + 0.0673828125, + -1.6328125, + 2.53125, + 2.625, + -3.828125, + -1.2578125, + -1.34375, + 2.078125, + -0.796875, + 1.328125, + 1.9921875, + 2.140625, + 0.45703125, + 1.3203125, + -0.482421875, + 2.0, + -0.80078125, + -0.98046875, + -1.6328125, + 0.240234375, + 0.478515625, + 0.18359375, + -0.1689453125, + 0.91015625, + 0.63671875, + -0.45703125, + -0.52734375, + -2.671875, + -2.640625, + 1.4296875, + -5.6875, + -3.171875, + 1.3671875, + 3.765625, + 1.203125, + 1.8828125, + 1.0625, + 0.5078125, + 1.375, + 2.9375, + 1.3515625, + 1.6875, + 1.7734375, + 0.9921875, + -1.5390625, + 1.125, + 2.15625, + -0.1640625, + -2.15625, + -0.65234375, + -0.703125, + -1.3125, + 0.0194091796875, + -0.7421875, + -1.7734375, + 0.1630859375, + -2.34375, + 0.318359375, + -0.95703125, + 0.90234375, + -1.5078125, + 0.71484375, + -2.109375, + -1.375, + 1.625, + -0.796875, + 2.09375, + -1.5078125, + 2.546875, + 2.0, + 1.3828125, + -0.8984375, + -0.5859375, + 0.486328125, + 3.0, + -1.4453125, + 1.4375, + -3.359375, + 0.515625, + -2.625, + 4.59375, + -1.1015625, + 1.734375, + -1.359375, + -0.431640625, + 3.046875, + 2.484375, + -3.71875, + 1.0078125, + -2.53125, + 0.41015625, + 0.71484375, + -0.50390625, + -1.671875, + -2.53125, + -2.1875, + 2.078125, + 2.09375, + -2.265625, + 0.048828125, + -2.78125, + -0.65625, + 0.8125, + 1.28125, + -0.82421875, + 2.921875, + 0.9375, + 0.041259765625, + -4.40625, + 0.50390625, + 0.0947265625, + 3.875, + -0.86328125, + -0.66796875, + -1.734375, + -2.3125, + -2.5, + 0.82421875, + 1.328125, + -0.5390625, + 0.95703125, + -0.07275390625, + -1.3125, + 0.201171875, + 1.3515625, + -2.59375, + -4.15625, + -2.375, + 0.111328125, + 2.171875, + -3.59375, + -3.53125, + -2.078125, + 0.56640625, + 1.25, + -0.546875, + 1.671875, + 1.9296875, + -1.1875, + 2.515625, + -0.1318359375, + -0.328125, + 0.95703125, + -3.203125, + -1.421875, + -1.453125, + -0.2734375, + -0.0869140625, + -3.171875, + -1.5078125, + -0.5390625, + -2.296875, + 0.70703125, + -0.16796875, + 1.7265625, + 2.140625, + -0.447265625, + -1.875, + 1.3046875, + 1.203125, + 3.515625, + 1.515625, + 3.953125, + 1.2578125, + 0.328125, + 1.765625, + 1.6640625, + 1.265625, + -2.25, + 1.5390625, + -1.15625, + -1.6953125, + -1.015625, + 1.8984375, + -1.765625, + -2.25, + -2.203125, + 0.59765625, + -5.34375, + -1.6171875, + 0.291015625, + -3.15625, + 2.75, + -1.3046875, + 1.671875, + 1.109375, + -3.84375, + -2.21875, + 0.61328125, + -6.1875, + 0.498046875, + 0.478515625, + 1.9765625, + 1.375, + -2.625, + -2.609375, + 0.59765625, + -0.9609375, + -1.3515625, + 1.3046875, + -0.13671875, + 2.578125, + -1.828125, + -1.7421875, + 2.96875, + -2.140625, + 2.703125, + -3.125, + -2.453125, + -6.0, + -0.3203125, + 1.390625, + 0.82421875, + -0.3359375, + 0.006195068359375, + -1.828125, + 1.828125, + 0.08154296875, + 0.5703125, + 0.031982421875, + 5.1875, + -1.90625, + 1.421875, + 1.6328125, + 1.9765625, + 1.484375, + -0.375, + 0.1005859375, + 0.88671875, + 2.734375, + 4.375, + 2.390625, + -2.078125, + -0.0001163482666015625, + 2.109375, + -1.390625, + 0.1259765625, + -0.94140625, + -0.400390625, + 0.7421875, + -0.431640625, + -1.171875, + 1.0625, + 0.392578125, + -0.123046875, + 0.6484375, + 2.25, + 3.265625, + -2.734375, + -0.88671875, + 2.015625, + 1.6484375, + -0.66796875, + -0.028564453125, + 0.7890625, + -2.203125, + -0.9453125, + 1.6484375, + 1.5234375, + 2.265625, + -1.59375, + -0.365234375, + 2.546875, + -0.28125, + -1.8515625, + -1.0078125, + -0.1357421875, + 1.4375, + -0.578125, + 0.3671875, + -0.2890625, + 0.0927734375, + -3.15625, + -0.349609375, + 0.439453125, + -1.4296875, + 1.7734375, + -2.734375, + -3.625, + -0.298828125, + -1.265625, + 0.90625, + -0.54296875, + -3.1875, + 7.625, + -5.34375, + 0.33984375, + 2.96875, + -1.375, + 3.4375, + -1.1484375, + -2.84375, + -0.57421875, + 0.703125, + -2.53125, + 2.703125, + -2.109375, + 0.298828125, + -0.267578125, + -0.390625, + 0.61328125, + 2.265625, + 4.15625, + -0.4765625, + -0.189453125, + -1.203125, + -3.421875, + -0.90234375, + 0.4296875, + 1.359375, + -1.9375, + -0.55859375, + 2.296875, + -0.84765625, + 0.74609375, + 2.703125, + -0.2421875, + 1.03125, + -1.625, + -1.71875, + -1.75, + 3.015625, + 3.59375, + -0.828125, + 1.0859375, + -0.62109375, + -0.54296875, + 1.3671875, + 0.6875, + 2.84375, + -1.6484375, + 2.484375, + 0.2392578125, + -2.703125, + 2.1875, + -0.2431640625, + 0.8203125, + 1.890625, + -4.5625, + 0.37109375, + 1.703125, + 1.0390625, + 1.7109375, + 1.453125, + 0.66796875, + -1.5, + 1.7734375, + -0.66015625, + -0.6484375, + -0.3671875, + -0.625, + -0.58203125, + 0.90625, + 1.1328125, + 3.921875, + -0.94140625, + -1.0078125, + 4.34375, + 1.125, + -4.5625, + -3.546875, + 2.046875, + 1.2421875, + -0.0244140625, + 1.2890625, + 1.578125, + 1.734375, + -0.8203125, + -1.609375, + -1.625, + -1.6953125, + 1.2578125, + -0.35546875, + 1.28125, + -0.55078125, + 2.421875, + 0.8671875, + 0.61328125, + -1.3203125, + -0.412109375, + 0.78125, + -2.609375, + 0.373046875, + 1.3203125, + 4.15625, + 2.28125, + -3.25, + 0.74609375, + 3.625, + -0.33203125, + -0.578125, + -2.328125, + 3.28125, + 0.0167236328125, + 1.2109375, + 0.98828125, + -1.0078125, + -0.51953125, + -0.11572265625, + -1.1171875, + -0.3125, + 0.07763671875, + -0.7890625, + -1.0078125, + -0.84765625, + 0.58984375, + 0.40625, + -1.4609375, + -1.4140625, + -1.0703125, + 3.125, + -0.65234375, + -1.9375, + 1.4609375, + -0.51953125, + -0.32421875, + 1.96875, + -0.609375, + 3.171875, + -0.11962890625, + 0.53515625, + -0.01422119140625, + -1.71875, + 0.236328125, + 0.357421875, + -0.1455078125, + -4.3125, + 3.015625, + -0.482421875, + 3.140625, + -0.734375, + -0.90625, + 1.125, + -3.109375, + 2.1875, + 0.83984375, + 2.0, + -0.328125, + -0.859375, + 3.71875, + 3.421875, + -0.87109375, + -0.60546875, + 1.2734375, + 3.84375, + 0.640625, + -2.109375, + -0.90625, + 2.6875, + -1.859375, + -2.125, + -0.08642578125, + 1.1015625, + 2.0625, + 0.671875, + -1.46875, + 0.75390625, + 1.3515625, + 2.640625, + -0.7890625, + -0.0634765625, + -1.1640625, + 0.51953125, + -0.4375, + -1.671875, + 2.1875, + 1.5625, + 0.201171875, + 1.921875, + -4.5625, + 0.94921875, + 2.609375, + 0.48828125, + 4.90625, + 3.109375, + -1.4140625, + -2.4375, + -1.3125, + 1.6796875, + -0.6171875, + -0.76953125, + -0.57421875, + -2.53125, + 1.375, + -1.796875, + 0.796875, + -2.875, + 2.109375, + 1.125, + -2.140625, + 0.4765625, + 0.953125, + 0.412109375, + 1.6953125, + -0.9765625, + -0.56640625, + -0.439453125, + -0.0458984375, + -1.578125, + -0.345703125, + -0.291015625, + 2.328125, + -1.1796875, + 0.67578125, + 0.83984375, + -4.09375, + 3.640625, + 0.5703125, + -0.6015625, + -2.359375, + -4.15625, + 4.59375, + 0.765625, + -2.84375, + -2.6875, + -0.53125, + -1.5859375, + 0.75, + -0.26171875, + 4.21875, + -0.1328125, + 2.984375, + 0.0673828125, + 0.90625, + -2.359375, + -0.984375, + 1.3046875, + 2.375, + -1.328125, + -0.796875, + 0.9375, + -0.2412109375, + 0.61328125, + -3.015625, + 1.0390625, + 2.171875, + -2.96875, + -0.6796875, + 1.359375, + -2.25, + -1.296875, + 0.75390625, + 0.119140625, + -2.8125, + 0.1005859375, + 2.015625, + 0.29296875, + -0.1357421875, + 0.27734375, + 4.625, + -1.015625, + 0.431640625, + -0.466796875, + 8.9375, + -4.3125, + -3.640625, + 0.51953125, + -1.3828125, + 0.9921875, + 0.5078125, + -0.56640625, + -1.203125, + -2.578125, + 0.287109375, + -1.5546875, + -6.8125, + 0.053466796875, + 2.046875, + 1.6328125, + -0.53125, + 1.28125, + 0.013671875, + 0.5390625, + 1.1328125, + -3.203125, + 4.4375, + 2.53125, + 1.140625, + 0.765625, + 2.3125, + -0.330078125, + 0.890625, + -2.359375, + -0.3515625, + 1.734375, + 0.1728515625, + 3.375, + -2.078125, + -1.75, + 0.70703125, + -1.046875, + 0.6015625, + -2.90625, + -1.109375, + 0.06884765625, + 0.431640625, + 1.546875, + -2.359375, + -0.66015625, + 0.85546875, + 1.0859375, + -1.1171875, + 0.6640625, + -0.671875, + 0.263671875, + -1.4375, + -1.296875, + -1.8125, + -1.78125, + 0.0703125, + -0.875, + 1.53125, + 0.04345703125, + 6.0625, + -0.408203125, + 1.6328125, + 0.8671875, + 0.022705078125, + -2.28125, + -0.2275390625, + 0.8984375, + 0.78125, + 0.77734375, + 1.0703125, + 2.984375, + -3.21875, + -0.62109375, + 1.7421875, + -0.703125, + 1.2578125, + 1.625, + -4.90625, + 1.390625, + 1.3828125, + -2.34375, + -2.015625, + 0.7421875, + 2.59375, + 0.90234375, + -1.953125, + 0.734375, + 0.40625, + -1.1640625, + -0.21484375, + -1.234375, + 1.546875, + -2.828125, + 0.2451171875, + 0.828125, + 1.5078125, + -3.875, + 1.5078125, + -1.8515625, + -0.8671875, + -0.267578125, + -0.69140625, + -3.0, + -0.16796875, + -1.1796875, + -1.625, + -1.15625, + 0.1865234375, + -1.375, + -0.023681640625, + 2.28125, + 2.0625, + 1.984375, + -4.15625, + 2.640625, + 0.85546875, + -5.84375, + -0.0, + -1.0546875, + 1.65625, + 1.6640625, + 2.09375, + -1.359375, + -1.4765625, + 0.9453125, + -4.90625, + 5.9375, + 0.5703125, + -0.5625, + -0.057861328125, + -0.765625, + -3.0, + -1.4453125, + 0.1513671875, + -0.48828125, + -1.78125, + 1.8828125, + -2.421875, + 3.0, + -1.5859375, + 0.91015625, + -0.059326171875, + -0.51171875, + -1.5703125, + -1.1640625, + -3.734375, + 1.4921875, + 0.443359375, + -1.6328125, + -5.40625, + -0.18359375, + -3.359375, + 1.5546875, + 0.828125, + 1.984375, + -2.140625, + 1.265625, + 2.390625, + -1.421875, + -3.1875, + -0.5390625, + -0.1796875, + 2.953125, + -3.859375, + 3.078125, + 1.5546875, + -1.953125, + -1.0078125, + 1.390625, + 3.078125, + 1.3046875, + -0.625, + -1.34375, + 0.1552734375, + 0.5390625, + -0.177734375, + 2.359375, + 0.953125, + 0.078125, + 0.79296875, + 2.09375, + 0.65625, + 1.2578125, + -0.72265625, + -1.5390625, + -1.734375, + -1.484375, + -0.169921875, + 2.109375, + -1.09375, + -1.1953125, + 2.609375, + 0.640625, + 0.89453125, + 1.5625, + -3.5625, + 1.0546875, + -1.765625, + -2.3125, + -1.109375, + -0.0106201171875, + -0.5703125, + -0.84375, + 0.9609375, + -0.0245361328125, + 0.2080078125, + -3.03125, + -0.392578125, + 2.09375, + 0.06494140625, + 2.46875, + 2.1875, + 0.2373046875, + 2.34375, + 1.5625, + 1.6796875, + 0.1806640625, + 2.265625, + 0.061279296875, + -1.5625, + 3.09375, + -2.53125, + 0.56640625, + 0.341796875, + -4.125, + 2.53125, + -1.21875, + 1.65625, + 1.6328125, + -1.5546875, + -1.4921875, + 0.57421875, + 0.21875, + -0.6640625, + 3.71875, + -0.435546875, + 3.765625, + 0.11279296875, + -3.40625, + -3.296875, + -3.1875, + 0.2734375, + 1.9140625, + 3.125, + -3.734375, + 0.07958984375, + -0.89453125, + 2.125, + 1.2421875, + -0.5859375, + -0.77734375, + -0.58203125, + 0.41015625, + 0.65625, + 0.9921875, + -0.373046875, + 1.390625, + 2.28125, + -3.125, + -2.546875, + -0.10107421875, + -0.58203125, + 0.28515625, + -1.640625, + -1.46875, + -2.890625, + 1.046875, + 1.859375, + 3.03125, + -1.8125, + 0.470703125, + 1.6328125, + 0.8046875, + -0.39453125, + -0.287109375, + 3.0625, + 1.53125, + -2.140625, + 0.5703125, + 2.484375, + 0.625, + 1.3984375, + 0.4765625, + -1.0078125, + 0.455078125, + 2.015625, + 0.279296875, + 3.421875, + -0.7109375, + -2.15625, + -1.2890625, + -1.7890625, + 0.431640625, + 2.40625, + 1.1953125, + -0.345703125, + 1.546875, + 1.1640625, + 0.8828125, + 1.7734375, + 1.3984375, + 0.91796875, + -1.0390625, + -1.0546875, + -0.341796875, + -2.125, + 1.7109375, + -1.015625, + -1.4453125, + -0.330078125, + -1.7109375, + -0.095703125, + 1.0625, + 1.34375, + -1.09375, + -1.03125, + -2.296875, + 0.0, + 0.059814453125, + 1.6328125, + -2.25, + 2.84375, + 1.4921875, + 0.490234375, + -1.7421875, + -0.54296875, + -0.421875, + 0.013916015625, + -1.859375, + -3.59375, + 1.859375, + -3.71875, + 0.59375, + 2.328125, + 0.55078125, + -0.9921875, + 0.93359375, + 0.203125, + -3.21875, + 0.5234375, + 0.765625, + 3.609375, + -0.921875, + 3.34375, + 1.171875, + 0.1220703125, + -5.90625, + -1.2578125, + -0.37890625, + 2.3125, + -1.96875, + 1.265625, + -0.77734375, + 0.00286865234375, + -3.3125, + 0.82421875, + 1.03125, + -0.6328125, + -0.9375, + -1.6171875, + 0.73046875, + 0.0927734375, + -3.65625, + -0.150390625, + -1.859375, + -1.2578125, + -8.125, + -1.09375, + 0.515625, + -0.392578125, + 2.640625, + -0.9375, + 1.546875, + -0.79296875, + 0.61328125, + 0.65234375, + -1.8046875, + 1.703125, + 3.515625, + 1.078125, + -2.546875, + 0.04443359375, + 0.98046875, + 3.078125, + -2.0625, + 0.45703125, + -0.7734375, + 1.125, + -0.359375, + -0.5234375, + -1.484375, + 2.375, + 1.7734375, + -1.875, + -1.5, + 0.01214599609375, + -6.4375, + -1.0078125, + 0.3203125, + 1.109375, + 1.1640625, + 0.578125, + -2.546875, + 3.375, + -0.64453125, + 0.0380859375, + -2.0625, + 0.98046875, + 0.44921875, + -1.109375, + -1.3359375, + 3.234375, + -0.9453125, + 6.15625, + 2.71875, + -0.158203125, + -2.015625, + 2.640625, + -2.265625, + 0.5390625, + -5.0, + -2.359375, + -1.7265625, + 4.34375, + 6.84375, + 4.5, + -0.2392578125, + -0.2060546875, + -1.03125, + -1.828125, + -4.9375, + -2.484375, + -0.7578125, + 1.859375, + 0.91796875, + -5.78125, + -2.828125, + 0.51171875, + -1.65625, + -0.12890625, + 2.484375, + 0.11279296875, + 2.734375, + -2.6875, + 0.796875, + -2.234375, + 0.9921875, + 0.26171875, + 1.7734375, + -0.341796875, + -0.36328125, + -0.57421875, + -0.4609375, + 2.75, + -3.984375, + -0.875, + 1.9375, + -2.265625, + 0.7109375, + -0.96875, + -0.671875, + 1.0546875, + -1.921875, + -2.40625, + 1.2578125, + -0.53125, + -1.2421875, + 1.578125, + -0.4609375, + 1.5390625, + -0.291015625, + 1.796875, + -0.1611328125, + -3.78125, + -1.1640625, + 4.5625, + 0.7109375, + 2.171875, + 8.0625, + -0.7265625, + 0.162109375, + -0.9296875, + 3.84375, + 2.875, + -3.875, + -1.9453125, + -2.203125, + 0.80859375, + 0.1611328125, + 0.302734375, + 1.4296875, + -2.484375, + -1.59375, + -2.875, + 3.53125, + 0.357421875, + 3.640625, + 1.2421875, + 3.21875, + 0.04541015625, + -1.328125, + 1.5234375, + -1.609375, + -1.1484375, + -1.03125, + 1.3984375, + 0.51171875, + 2.796875, + -0.345703125, + -2.71875, + 1.09375, + -0.1796875, + -1.7578125, + 0.2236328125, + 0.9140625, + -0.66015625, + -2.484375, + 0.0206298828125, + 0.2177734375, + 1.171875, + 8.0, + 2.203125, + -4.21875, + -1.6171875, + -1.78125, + -1.4375, + -4.75, + -0.07958984375, + 0.9765625, + -1.15625, + -2.203125, + -3.15625, + 2.0625, + 2.5625, + 1.7890625, + -0.88671875, + -1.3359375, + -0.55078125, + 0.79296875, + 0.671875, + 1.78125, + 0.08544921875, + -0.671875, + -3.03125, + -2.40625, + 2.4375, + -4.59375, + 1.0859375, + 1.0546875, + -1.03125, + -4.09375, + -0.310546875, + 1.8125, + -1.4921875, + -3.359375, + 2.9375, + 0.859375, + 0.671875, + 2.921875, + 1.140625, + -0.016845703125, + 1.4375, + 0.6796875, + 3.5, + 4.4375, + 1.09375, + 1.5703125, + -1.8125, + 0.2314453125, + -0.4921875, + 0.609375, + 1.7734375, + -0.154296875, + 0.38671875, + 1.671875, + 2.6875, + 0.271484375, + 0.408203125, + 1.4765625, + 0.49609375, + 0.08642578125, + 0.72265625, + -1.390625, + -2.90625, + -3.28125, + -3.75, + 3.40625, + -0.1650390625, + -5.40625, + -0.34375, + -0.248046875, + 1.15625, + -2.78125, + -0.83203125, + 3.265625, + -0.90625, + -0.3359375, + 1.8125, + 0.1923828125, + 2.921875, + -0.72265625, + 1.5546875, + -0.8046875, + 1.8359375, + -3.96875, + 4.4375, + 0.90625, + 3.390625, + -1.2578125, + 0.5390625, + -0.3203125, + -2.03125, + -0.6328125, + 0.98828125, + -0.365234375, + -1.3125, + -0.4765625, + -1.3359375, + -1.6875, + -1.8828125, + -0.2333984375, + -0.2373046875, + -0.37890625, + -0.8515625, + -1.796875, + 0.28515625, + -0.31640625, + -0.74609375, + 0.8046875, + 0.25, + -1.640625, + 0.40234375, + -1.3828125, + -0.58203125, + -1.4140625, + -1.3515625, + 0.80859375, + -0.67578125, + -1.59375, + -3.34375, + -0.6171875, + 4.71875, + -3.15625, + -0.2197265625, + 1.671875, + -0.216796875, + 1.625, + -3.03125, + -5.625, + 0.0228271484375, + 0.205078125, + -1.2109375, + 1.7421875, + 4.3125, + 0.91015625, + -1.21875, + 1.5078125, + -2.09375, + -1.0078125, + 2.6875, + -3.734375, + 1.1640625, + 2.859375, + 0.7734375, + 0.330078125, + 0.9921875, + -0.1728515625, + -0.16015625, + -2.15625, + 1.3203125, + 0.66015625, + -2.515625, + -0.84375, + -0.2421875, + -1.421875, + 0.31640625, + -0.796875, + 0.7421875, + 1.6953125, + -0.353515625, + 0.8515625, + -2.96875, + 6.3125, + -2.953125, + -2.328125, + 1.0546875, + -0.0673828125, + -0.279296875, + 0.0341796875, + 2.078125, + -1.734375, + 0.7890625, + -2.0625, + -0.82421875, + -1.6015625, + 0.84375, + 0.498046875, + -1.4453125, + 0.5078125, + -0.7421875, + -0.2314453125, + -0.103515625, + 1.6640625, + 2.09375, + 1.0, + 0.6953125, + -4.40625, + -1.2578125, + 0.462890625, + -1.75, + 3.0625, + -2.0625, + 3.390625, + 0.00640869140625, + -0.9765625, + 0.09130859375, + 1.0546875, + 1.9140625, + -0.91015625, + 1.796875, + -2.46875, + -2.640625, + -0.20703125, + -0.75390625, + -0.7421875, + -1.4296875, + -0.54296875, + -0.83984375, + 0.8515625, + -2.125, + 0.60546875, + -0.10107421875, + -0.043701171875, + -0.439453125, + -0.2041015625, + -0.474609375, + 0.87890625, + -3.109375, + -1.484375, + 4.0625, + -0.734375, + -0.75390625, + 1.5546875, + 1.6875, + 0.8359375, + 1.1328125, + 3.25, + 1.21875, + 4.40625, + 0.7265625, + -0.77734375, + 1.6953125, + -0.04296875, + -1.8203125, + -1.34375, + 1.03125, + 1.234375, + 4.0, + 0.5078125, + 0.337890625, + 1.375, + 0.1015625, + -2.296875, + -0.73046875, + 1.7109375, + 2.3125, + -0.47265625, + 0.279296875, + 0.8203125, + -0.2421875, + 1.125, + 0.55078125, + -2.03125, + 0.78515625, + -0.1806640625, + -1.078125, + -3.234375, + -1.96875, + -0.9921875, + 1.7890625, + -0.99609375, + 2.1875, + 1.9609375, + -1.859375, + 0.1943359375, + -1.1796875, + -0.56640625, + 4.0625, + 2.421875, + -1.6640625, + 2.734375, + -1.671875, + 4.0625, + -0.32421875, + -0.60546875, + -0.40234375, + 0.353515625, + -0.205078125, + -1.375, + -1.890625, + -0.92578125, + -2.359375, + -0.796875, + -0.482421875, + 1.3984375, + -1.125, + -1.7109375, + 2.859375, + -1.2578125, + -5.1875, + -0.4375, + 2.6875, + 1.6015625, + 0.287109375, + 1.7265625, + 6.1875, + 0.60546875, + 6.625, + 1.1171875, + 1.3046875, + 0.2119140625, + 2.796875, + 0.9921875, + 1.421875, + 2.453125, + 1.65625, + -1.5078125, + 0.330078125, + 0.2734375, + 2.578125, + -0.3984375, + 1.0234375, + 1.3828125, + -1.375, + 1.171875, + -4.84375, + -6.625, + -0.6484375, + -1.671875, + 3.875, + -0.041259765625, + 1.9453125, + -5.53125, + -1.421875, + -1.2421875, + 4.90625, + 0.68359375, + -1.4921875, + 0.36328125, + 0.361328125, + -3.609375, + 2.1875, + -0.87890625, + -1.7421875, + -1.03125, + -1.5859375, + 1.1875, + 1.0625, + -0.1787109375, + -2.6875, + 0.4921875, + -1.5625, + 1.0546875, + -1.171875, + -1.5625, + -0.76171875, + 0.98046875, + -1.8046875, + -0.1708984375, + -1.4375, + -1.7109375, + 3.390625, + -1.9296875, + -0.7890625, + 1.3046875, + 1.0625, + -1.640625, + 2.5, + -2.59375, + 0.177734375, + -0.609375, + 1.3125, + 1.5546875, + -0.8046875, + 0.1611328125, + -0.28125, + 0.2373046875, + -2.1875, + 4.4375, + 2.0, + -2.0, + 3.453125, + 2.234375, + 1.03125, + 1.1796875, + 1.7890625, + 2.625, + 0.609375, + 2.15625, + -0.79296875, + -1.7578125, + 0.94140625, + -0.5546875, + 0.203125, + -0.51171875, + -1.703125, + -1.4921875, + 1.8125, + 4.3125, + -3.96875, + -0.671875, + -0.875, + 0.41796875, + 7.15625, + 0.71875, + -1.8671875, + -0.87109375, + 2.15625, + 2.296875, + 3.65625, + 0.036865234375, + 1.1796875, + 1.5703125, + 1.6171875, + 1.1640625, + 0.76953125, + -4.0, + 1.625, + -4.59375, + 1.71875, + 1.578125, + 1.921875, + -0.85546875, + -0.79296875, + -4.125, + 0.50390625, + 1.3515625, + -2.390625, + 0.0517578125, + -3.6875, + -1.140625, + 1.0703125, + -0.96484375, + 1.359375, + 2.609375, + -0.431640625, + 0.384765625, + 3.90625, + 1.4296875, + 0.8046875, + -3.53125, + 3.15625, + -0.1455078125, + -1.265625, + -0.20703125, + 2.359375, + 1.7578125, + 0.51953125, + -2.265625, + -3.375, + 2.640625, + -2.359375, + -0.890625, + -3.9375, + -0.5390625, + -4.3125, + 1.9375, + 2.875, + 0.038818359375, + -1.046875, + 0.02978515625, + -0.298828125, + -0.10498046875, + 2.6875, + -0.5078125, + -2.96875, + -0.9609375, + 0.10986328125, + -1.1484375, + -2.828125, + -4.03125, + -0.185546875, + 0.765625, + -2.71875, + 0.7890625, + 1.203125, + -0.421875, + -1.625, + -1.421875, + 0.859375, + 1.59375, + 2.375, + -1.96875, + -1.7265625, + 3.484375, + 2.5, + -1.609375, + -0.92578125, + -2.875, + 0.5703125, + -1.5703125, + 3.875, + -7.3125, + 0.76953125, + 0.6640625, + -0.86328125, + -0.29296875, + -0.09130859375, + -3.3125, + 2.796875, + -2.96875, + 0.66796875, + 1.984375, + -2.28125, + 1.0859375, + 2.046875, + 0.8359375, + -2.265625, + 0.271484375, + 1.4375, + 4.71875, + 0.94140625, + 4.65625, + 2.015625, + 5.4375, + 2.6875, + -0.37109375, + 2.890625, + -1.0234375, + -1.359375, + -2.140625, + 0.77734375, + -1.21875, + 0.875, + -0.859375, + -0.421875, + -2.640625, + -0.39453125, + -0.9765625, + 3.921875, + 2.078125, + -1.1171875, + -1.203125, + -1.3671875, + -3.125, + -0.62109375, + 3.6875, + -0.63671875, + -2.125, + 1.3203125, + 0.03564453125, + -2.15625, + -0.82421875, + 0.875, + 1.875, + -1.65625, + 3.09375, + 1.28125, + -1.109375, + 1.921875, + 3.453125, + -1.046875, + -2.0, + 1.9375, + -0.10986328125, + 0.52734375, + -1.828125, + -1.2421875, + 5.15625, + -1.7265625, + -0.91796875, + -0.1806640625, + 3.15625, + -0.1435546875, + -1.671875, + -3.1875, + -0.96484375, + 0.07275390625, + 0.65234375, + 4.96875, + -2.828125, + 0.88671875, + -2.90625, + -1.4375, + 1.46875, + -4.78125, + -0.89453125, + 0.166015625, + 0.97265625, + 1.9453125, + -4.0625, + 0.8203125, + -1.703125, + -0.4921875, + -2.015625, + -1.703125, + 9.125, + -2.75, + -1.09375, + -1.9609375, + 0.875, + -1.40625, + 1.7890625, + 1.0078125, + -0.5, + -0.1220703125, + -4.0625, + 2.171875, + 0.0849609375, + -0.1513671875, + 2.5625, + -2.25, + -0.0869140625, + 2.84375, + -0.037109375, + 0.92578125, + -0.55859375, + -3.015625, + 2.109375, + -2.53125, + -0.87890625, + 2.515625, + -0.06591796875, + -2.796875, + 1.9765625, + -1.3671875, + 2.03125, + -0.1962890625, + 0.435546875, + 0.6875, + -3.359375, + -0.9453125, + 0.1318359375, + -1.21875, + -1.28125, + 0.1865234375, + -1.3515625, + -2.15625, + -3.453125, + -0.38671875, + 0.984375, + -2.25, + 0.15625, + -1.0078125, + -1.5546875, + -1.609375, + 0.734375, + -1.15625, + 2.109375, + 1.1875, + 2.796875, + 2.234375, + -0.65625, + -2.328125, + -0.87109375, + 1.4921875, + -0.86328125, + -0.9140625, + -0.028564453125, + -1.8203125, + 0.228515625, + 0.111328125, + -1.390625, + -2.234375, + 0.89453125, + 5.1875, + 4.15625, + -4.375, + -0.2119140625, + -2.484375, + 2.921875, + -2.46875, + -2.96875, + 2.1875, + 1.890625, + 0.6953125, + -0.0159912109375, + -0.57421875, + -1.0546875, + 0.14453125, + -1.59375, + -1.9453125, + 0.94921875, + 1.84375, + 0.07421875, + 1.0078125, + 1.0625, + 0.0927734375, + -1.3046875, + -1.6015625, + -3.921875, + -4.0, + 0.283203125, + 1.578125, + -1.2578125, + 3.71875, + 3.25, + -1.15625, + 2.484375, + 1.46875, + 0.345703125, + 0.287109375, + 2.21875, + 3.75, + -0.5546875, + -0.13671875, + -1.2734375, + -1.8515625, + 1.4609375, + 1.46875, + 0.306640625, + 2.328125, + -4.21875, + 0.267578125, + -1.9296875, + 2.484375, + 1.2421875, + -1.2421875, + 1.578125, + -0.33984375, + 0.373046875, + 1.6015625, + -1.125, + -1.96875, + 0.52734375, + 0.1865234375, + -0.95703125, + -1.171875, + -1.609375, + -1.96875, + -3.875, + -1.1640625, + 0.337890625, + 1.1328125, + -0.38671875, + -0.09814453125, + 2.125, + -0.31640625, + 0.0233154296875, + -3.015625, + 0.07275390625, + -0.466796875, + 0.2890625, + -0.058837890625, + 1.375, + 1.1875, + 0.39453125, + -0.62109375, + -1.65625, + 1.609375, + 0.55859375, + 0.025146484375, + -1.53125, + -0.8203125, + -2.296875, + 0.1953125, + -0.96875, + -0.5390625, + 4.59375, + -2.640625, + 0.578125, + -1.8203125, + 0.05126953125, + -0.0732421875, + 2.1875, + 2.28125, + 3.03125, + -0.85546875, + -6.4375, + -0.26171875, + 4.75, + 3.03125, + -1.2734375, + 1.515625, + 0.94140625, + 0.97265625, + 0.0849609375, + 1.5078125, + -4.4375, + 0.07275390625, + 4.90625, + 2.875, + -0.26953125, + -0.48828125, + -0.76953125, + 2.34375, + -2.375, + 0.76171875, + -2.125, + 1.296875, + 0.30859375, + 2.234375, + -0.35546875, + -0.298828125, + 1.34375, + 2.53125, + -2.015625, + 1.515625, + -0.142578125, + -2.890625, + -1.3203125, + -0.06640625, + -2.0, + -1.7890625, + 3.703125, + -1.3125, + -1.8984375, + -1.09375, + -0.287109375, + -1.03125, + 2.734375, + 14.4375, + 0.66015625, + -3.5625, + -1.4140625, + 1.9296875, + -0.19140625, + -1.5, + 0.609375, + 1.375, + 0.9609375, + -3.3125, + -1.984375, + 2.703125, + -3.890625, + -0.93359375, + -1.421875, + 1.9375, + 2.0, + 1.234375, + 0.9453125, + -2.875, + 2.40625, + -0.8671875, + -2.125, + 0.49609375, + 1.8984375, + 0.546875, + -1.1796875, + -0.62109375, + 3.21875, + -0.55859375, + 0.1767578125, + 2.4375, + -2.984375, + -1.1953125, + 2.734375, + -0.7265625, + -1.234375, + 0.76171875, + 0.2294921875, + 1.953125, + 0.244140625, + -3.265625, + -0.2041015625, + -2.640625, + 0.185546875, + 2.15625, + -5.375, + 1.7421875, + -3.59375, + 0.55078125, + 0.07177734375, + 0.85546875, + 1.640625, + 0.63671875, + 0.478515625, + -0.71484375, + 1.40625, + -0.376953125, + -2.59375, + 0.396484375, + -5.21875, + -1.765625, + 2.171875, + -0.014404296875, + 0.69140625, + 1.0, + -0.1259765625, + -2.25, + 2.21875, + -0.0693359375, + -1.0, + 2.6875, + 2.96875, + -1.8828125, + -1.46875, + -1.9453125, + 4.09375, + 0.953125, + 0.77734375, + 0.73828125, + 0.01458740234375, + 2.4375, + -4.03125, + -1.515625, + -0.55859375, + -1.0859375, + 0.6328125, + 0.41015625, + 1.0, + 0.466796875, + 1.546875, + 0.9140625, + 1.5078125, + -1.1875, + 0.77734375, + -0.287109375, + -1.7734375, + 0.65625, + 0.15625, + -0.6171875, + 1.953125, + 2.84375, + 1.421875, + 0.65625, + -1.09375, + 2.78125, + -0.50390625, + -1.671875, + -1.9921875, + -1.2421875, + 0.53515625, + 1.0234375, + -0.1494140625, + -0.94140625, + -3.453125, + 2.203125, + 0.1376953125, + -1.171875, + 0.48046875, + 1.25, + -0.427734375, + 1.140625, + 0.419921875, + 0.2197265625, + 1.859375, + -0.75, + -0.921875, + -1.53125, + 0.201171875, + -3.421875, + -1.5390625, + -2.078125, + 3.625, + 0.91796875, + 0.35546875, + 4.0625, + 1.0078125, + 0.51171875, + -1.5078125, + -0.3359375, + 3.859375, + 0.671875, + 6.625, + 1.203125, + -0.49609375, + -0.93359375, + 0.86328125, + -2.59375, + -1.3203125, + 0.234375, + 1.828125, + 0.1650390625, + 1.21875, + -1.625, + 1.6953125, + -1.265625, + 1.3515625, + 0.43359375, + 1.2734375, + -1.0859375, + -2.25, + 1.265625, + -1.171875, + 0.48046875, + 1.078125, + -0.19921875, + -0.72265625, + -0.7890625, + 0.0498046875, + -1.6640625, + -2.53125, + -3.015625, + -2.265625, + 0.88671875, + -1.453125, + -5.90625, + -1.8515625, + -0.7578125, + 3.1875, + 2.6875, + -1.7734375, + -0.3984375, + 0.46875, + -2.21875, + -4.21875, + 2.953125, + 3.703125, + -2.875, + 2.203125, + -0.96875, + 0.25, + 0.96875, + -2.53125, + -1.4140625, + 3.71875, + -12.6875, + -0.126953125, + -1.546875, + 4.625, + 0.177734375, + -1.6015625, + 0.7734375, + -5.0, + 3.578125, + -1.9453125, + 2.578125, + -0.341796875, + -1.0703125, + 2.75, + 2.21875, + -4.34375, + 3.03125, + -2.03125, + 0.8046875, + -1.734375, + -0.9140625, + 0.1474609375, + 1.1328125, + 1.3515625, + 2.890625, + 7.03125, + -0.4453125, + -2.453125, + 1.0390625, + -2.21875, + -0.78125, + -1.5078125, + -2.109375, + -2.3125, + -1.796875, + -0.275390625, + -2.578125, + -1.046875, + 0.0556640625, + 0.515625, + 1.7578125, + 4.28125, + 0.64453125, + -1.1640625, + -0.94140625, + 0.349609375, + -1.9140625, + 2.890625, + 4.46875, + 0.5546875, + 1.453125, + 0.51171875, + -1.1015625, + 0.70703125, + -3.359375, + -2.0, + 0.2177734375, + 0.796875, + -0.59765625, + -2.453125, + 1.2265625, + -0.224609375, + 3.46875, + 1.46875, + 2.3125, + 1.59375, + 1.4609375, + 0.45703125, + -0.3515625, + -0.59765625, + -1.515625, + 0.2158203125, + -3.96875, + 1.65625, + 1.6796875, + 1.25, + 0.025390625, + 0.2236328125, + -0.404296875, + 3.125, + -0.94140625, + -2.109375, + 2.5625, + 2.140625, + -2.40625, + -1.203125, + -2.15625, + -1.0390625, + -2.9375, + -0.48046875, + 2.34375, + -1.46875, + -0.359375, + -0.875, + 1.6796875, + 1.6875, + -2.828125, + -1.5, + 6.8125, + 2.59375, + 1.7421875, + 2.703125, + 0.41015625, + 2.359375, + -2.21875, + -1.0546875, + 0.26171875, + 0.6640625, + -1.4453125, + -5.53125, + 0.76953125, + -0.32421875, + -5.9375, + 2.28125, + 0.92578125, + 4.34375, + -2.171875, + 0.4375, + 0.494140625, + -1.4375, + -1.1015625, + 1.09375, + 1.640625, + 3.109375, + -0.1923828125, + -0.1552734375, + -3.03125, + -0.498046875, + -0.2041015625, + -1.015625, + -3.75, + -0.91015625, + -0.69921875, + 2.15625, + 0.2734375, + -1.6640625, + 0.80078125, + 0.87109375, + 0.003936767578125, + 0.201171875, + -2.390625, + -0.2490234375, + 0.5546875, + -2.203125, + -1.625, + -1.2265625, + -1.953125, + 1.59375, + 2.796875, + -0.50390625, + 0.154296875, + 2.0625, + 1.8046875, + -0.361328125, + 1.203125, + -0.265625, + 0.2431640625, + -0.01287841796875, + -1.6640625, + -4.15625, + -0.83984375, + -0.28515625, + 0.126953125, + -1.5546875, + 1.71875, + -0.078125, + -2.6875, + -1.28125, + 3.53125, + 0.119140625, + -0.64453125, + 3.0625, + 0.31640625, + -1.0, + 2.8125, + -1.8671875, + -2.109375, + -0.734375, + 0.734375, + -4.46875, + -8.625, + 2.0, + -4.59375, + 1.46875, + -1.96875, + 2.46875, + 2.046875, + -2.015625, + 1.125, + -0.72265625, + 0.41015625, + -1.3125, + -2.421875, + -0.8046875, + 1.4453125, + 0.24609375, + -0.5859375, + 1.59375, + 1.8359375, + -1.203125, + -0.80859375, + 2.109375, + 1.8359375, + -2.765625, + -3.34375, + -0.9140625, + 0.91015625, + 5.0625, + 0.953125, + 4.9375, + -1.7578125, + 1.8359375, + -6.96875, + -0.640625, + 1.953125, + -1.4921875, + 2.71875, + -3.53125, + -2.46875, + 0.337890625, + 0.93359375, + 1.4296875, + -14.4375, + -1.609375, + 1.1640625, + 2.15625, + -0.0198974609375, + 0.70703125, + 3.875, + 0.7265625, + 1.2734375, + -2.84375, + 4.3125, + -0.4296875, + 0.1328125, + -0.2734375, + -2.859375, + 1.75, + -1.140625, + -1.046875, + -2.03125, + -0.98828125, + 1.984375, + 2.765625, + 5.40625, + -0.10791015625, + -0.53125, + -1.8203125, + 0.32421875, + -1.609375, + -3.46875, + -0.10986328125, + 3.546875, + 6.15625, + 1.40625, + 6.375, + -1.640625, + 1.75, + 0.1416015625, + 0.8515625, + 0.5859375, + -3.203125, + 1.6484375, + 0.6171875, + -1.1015625, + 1.5625, + -2.0625, + 1.296875, + 0.5703125, + -1.1171875, + 2.0625, + -1.8828125, + -1.59375, + 2.65625, + 1.578125, + -0.87890625, + 1.390625, + 1.5546875, + 4.53125, + -4.8125, + -1.75, + -0.1015625, + -0.0849609375, + -0.94921875, + 2.296875, + -0.14453125, + 2.859375, + -0.90625, + -0.6953125, + -2.28125, + 0.36328125, + 3.125, + 0.59765625, + 2.453125, + 1.1171875, + 3.0625, + 0.0986328125, + 3.28125, + 2.6875, + -2.4375, + -3.328125, + -0.9375, + -0.328125, + 1.3515625, + 0.41796875, + 1.7265625, + -5.65625, + 0.38671875, + -2.5625, + -0.546875, + -0.054443359375, + -0.142578125, + -0.26171875, + -1.25, + 0.71484375, + 0.890625, + 0.5, + -1.5703125, + 0.65625, + -1.2109375, + -3.65625, + 0.68359375, + -1.5390625, + 1.90625, + 0.25, + 0.390625, + -1.1171875, + 1.5625, + 1.1953125, + -0.82421875, + -0.359375, + -0.80859375, + 2.9375, + -1.7734375, + 3.15625, + 1.421875, + 0.053466796875, + 1.78125, + -1.5234375, + -3.171875, + -0.14453125, + 2.1875, + 0.0225830078125, + -2.890625, + 0.1416015625, + -2.671875, + -1.9609375, + 1.4921875, + -2.96875, + -1.4609375, + 1.4609375, + -1.0234375, + 1.5390625, + -1.9375, + 1.6328125, + -3.3125, + 0.98046875, + -0.85546875, + 0.9453125, + 1.03125, + -1.5390625, + 1.6953125, + -1.2734375, + -0.828125, + 2.78125, + -0.8046875, + 0.5078125, + 0.12890625, + -0.921875, + 0.10888671875, + 1.9375, + 6.15625, + -1.1796875, + -0.9921875, + -0.6328125, + 2.640625, + -0.08056640625, + -0.365234375, + -0.33984375, + -0.703125, + 1.7265625, + 0.90625, + 0.302734375, + 0.09619140625, + 1.234375, + 1.34375, + 4.5, + 0.6015625, + -1.4921875, + 3.171875, + -1.53125, + -0.4609375, + 5.4375, + 1.3671875, + 0.0751953125, + -0.58984375, + 1.8125, + -2.21875, + 0.318359375, + 0.984375, + 0.7734375, + 1.6953125, + 1.7734375, + 1.203125, + 1.8984375, + -0.37109375, + 1.1484375, + 0.5546875, + -0.08447265625, + -2.984375, + 1.6640625, + -1.6796875, + 2.359375, + -0.408203125, + -1.34375, + 5.25, + 1.3359375, + 1.1484375, + 0.08056640625, + 0.59765625, + -3.796875, + 0.251953125, + -0.341796875, + 2.8125, + -0.421875, + 2.875, + -1.7734375, + 0.034423828125, + -1.953125, + -2.078125 + ], + "index": 1, + "object": "embedding", + "raw_output": null + }, + { + "embedding": [ + 4.125, + 1.390625, + -4.40625, + 0.546875, + 2.640625, + -0.037109375, + -2.921875, + 0.75390625, + 6.0625, + 2.796875, + -5.875, + 2.953125, + -2.265625, + -0.79296875, + 5.96875, + 4.46875, + 0.921875, + -1.828125, + 0.98828125, + 1.1484375, + 1.9453125, + 4.875, + 2.875, + 0.98046875, + -1.828125, + 1.4765625, + -0.265625, + 0.30859375, + 0.36328125, + -2.421875, + -4.25, + -0.65234375, + 2.296875, + 3.34375, + -0.490234375, + 2.0625, + 1.0078125, + -0.765625, + 0.609375, + 1.03125, + 0.37109375, + 3.4375, + 2.953125, + 2.9375, + -1.734375, + 0.423828125, + -2.140625, + 2.390625, + -1.8125, + -3.640625, + -0.8203125, + -0.265625, + 2.828125, + 2.96875, + 0.53515625, + 2.265625, + -1.625, + -3.734375, + -3.328125, + -2.34375, + -1.6796875, + 1.5703125, + 2.03125, + -1.78125, + -0.302734375, + 0.8046875, + 0.322265625, + -1.4453125, + 0.6796875, + 2.140625, + 2.421875, + 2.6875, + -1.140625, + 3.390625, + 3.296875, + 0.62109375, + -0.83984375, + -0.322265625, + 0.53515625, + 3.8125, + -1.0234375, + -1.4609375, + 0.59375, + 2.78125, + -0.63671875, + -0.484375, + 1.2578125, + 0.345703125, + -0.7890625, + 2.859375, + -2.046875, + 2.5625, + -1.4921875, + 1.9375, + -1.375, + -6.5625, + 1.3359375, + -0.91015625, + 0.921875, + 1.71875, + 0.1435546875, + -1.5859375, + -3.203125, + 0.8671875, + -0.74609375, + 1.03125, + 0.150390625, + -2.328125, + 4.125, + -1.1796875, + -0.75390625, + 2.6875, + 1.1484375, + 2.265625, + -1.7421875, + -0.1669921875, + -0.4453125, + 1.0234375, + -2.265625, + 2.703125, + 5.34375, + 0.458984375, + 0.99609375, + 1.6328125, + -0.92578125, + -1.7421875, + 3.640625, + 1.2890625, + -2.375, + -0.2734375, + 1.8046875, + -1.578125, + -1.421875, + -2.09375, + -2.234375, + -0.53125, + 1.8515625, + -2.46875, + 3.15625, + -0.640625, + -1.6171875, + -1.0, + 1.21875, + -1.8671875, + -1.4765625, + 1.828125, + 0.55078125, + 1.3359375, + 2.25, + -0.63671875, + -0.369140625, + -0.78125, + -0.458984375, + 3.109375, + 0.9296875, + -5.96875, + 1.5625, + 1.046875, + 1.5703125, + 1.1796875, + -6.09375, + 0.1279296875, + -0.79296875, + -1.796875, + 1.7421875, + 0.12060546875, + 2.953125, + -3.15625, + 2.609375, + -1.84375, + -1.296875, + -0.23828125, + 0.494140625, + 0.08544921875, + -1.5, + -0.609375, + 0.9921875, + -1.1640625, + 3.890625, + 0.427734375, + -0.9765625, + 2.046875, + -1.2578125, + 2.140625, + -0.62890625, + 1.6171875, + -2.171875, + 1.6484375, + -3.234375, + -0.0830078125, + 1.2734375, + -0.376953125, + -2.609375, + -5.28125, + 2.0625, + 1.34375, + -0.00665283203125, + -2.28125, + 2.921875, + 2.328125, + 3.640625, + -5.15625, + 0.11279296875, + 1.609375, + -4.75, + -0.921875, + -1.203125, + 2.65625, + -0.5, + -1.40625, + 2.90625, + -1.8203125, + 3.6875, + 0.5625, + 2.359375, + 1.1328125, + 0.921875, + 0.890625, + -0.263671875, + 0.97265625, + 1.3828125, + 0.8828125, + -1.84375, + 0.86328125, + -1.921875, + 1.25, + -2.28125, + -2.15625, + -0.005828857421875, + 0.51171875, + 0.62109375, + 0.353515625, + -0.8984375, + -0.6171875, + 2.578125, + 3.90625, + -1.7421875, + 1.4140625, + 0.322265625, + -0.36328125, + -0.2099609375, + -1.2890625, + 1.7265625, + 1.15625, + -1.3046875, + -0.7421875, + 3.625, + -1.4375, + 1.109375, + 4.3125, + -1.8125, + -3.765625, + 0.208984375, + -0.796875, + -0.359375, + 0.1328125, + 0.93359375, + 0.5625, + 0.1708984375, + 0.018310546875, + -1.046875, + -0.09912109375, + 2.046875, + -1.1171875, + -2.453125, + 1.5, + 0.150390625, + 1.890625, + 0.671875, + 0.9296875, + 1.0, + 0.08349609375, + 0.2490234375, + 3.015625, + -3.390625, + -0.81640625, + 2.828125, + 2.46875, + -3.4375, + 0.6875, + 2.46875, + 0.0615234375, + -0.92578125, + -3.796875, + -0.81640625, + -1.3984375, + -0.609375, + 1.25, + -1.3359375, + -0.380859375, + 0.74609375, + 0.25390625, + -1.0546875, + 0.490234375, + 2.15625, + 0.40234375, + 1.5625, + -4.28125, + -1.265625, + 0.8984375, + 1.609375, + 0.431640625, + -0.81640625, + -0.828125, + -1.578125, + 2.5, + 2.40625, + -0.93359375, + 1.9453125, + -2.625, + -0.69140625, + 0.396484375, + 0.50390625, + -1.2265625, + -0.5703125, + -2.9375, + 2.34375, + 0.98828125, + -1.640625, + -0.052001953125, + -0.56640625, + 0.1025390625, + 0.3671875, + 1.15625, + -0.37890625, + 1.2265625, + -0.50390625, + -1.125, + 0.98828125, + 3.109375, + -0.328125, + -1.3203125, + 1.171875, + 1.4453125, + -4.46875, + -1.4296875, + 2.71875, + -0.1923828125, + 4.625, + -0.431640625, + 1.3125, + -0.279296875, + 0.1357421875, + -1.296875, + 1.125, + -0.859375, + -0.53515625, + 2.703125, + 0.8984375, + 1.015625, + -0.341796875, + 0.263671875, + -5.65625, + -2.21875, + 0.7890625, + -2.984375, + -0.10888671875, + -0.26953125, + 1.3203125, + 1.7109375, + -0.84765625, + 0.2412109375, + -1.25, + -0.890625, + 1.421875, + 1.484375, + 1.765625, + 0.55078125, + -2.40625, + 0.8984375, + 3.625, + 0.466796875, + 0.44921875, + -2.0625, + -0.1884765625, + 1.1171875, + -4.875, + -0.875, + 0.33203125, + 0.87890625, + 0.6875, + -9.9375, + 0.076171875, + 2.546875, + -0.91796875, + 1.5078125, + -2.109375, + -6.21875, + 0.189453125, + 0.2431640625, + -2.046875, + -2.78125, + -1.5546875, + 0.3125, + 1.1484375, + 0.435546875, + 0.765625, + 0.376953125, + -0.408203125, + 0.6328125, + 1.1953125, + -2.078125, + 0.423828125, + -0.546875, + -1.7109375, + 0.71484375, + -0.83203125, + 0.76953125, + -2.5, + 0.578125, + -0.37109375, + -0.8203125, + 0.3984375, + -1.8203125, + 0.04736328125, + -1.3359375, + -1.4140625, + 0.69140625, + 3.140625, + 1.625, + 1.0859375, + -0.482421875, + -1.796875, + -0.8125, + -1.9140625, + -0.330078125, + -1.953125, + 0.380859375, + -0.89453125, + -1.3203125, + 0.291015625, + -0.41015625, + 0.40625, + 3.46875, + -2.40625, + -3.03125, + 2.515625, + 0.56640625, + -0.14453125, + 1.421875, + 1.2421875, + -1.9765625, + 1.9375, + -5.65625, + 1.28125, + -2.21875, + -2.96875, + -2.796875, + -0.5390625, + -1.2265625, + 2.75, + 1.890625, + 0.69921875, + -1.375, + -0.92578125, + -0.39453125, + 0.419921875, + -1.421875, + 0.2294921875, + 0.6875, + 2.25, + -1.921875, + 2.390625, + 0.48828125, + -0.81640625, + -3.3125, + -1.4140625, + -0.4453125, + -2.78125, + 1.046875, + 3.390625, + -0.640625, + -0.66796875, + -1.1171875, + 2.015625, + 2.703125, + -2.09375, + -2.015625, + 7.0, + -3.15625, + 0.171875, + -2.015625, + 2.375, + -0.58984375, + -0.89453125, + -0.59375, + -0.291015625, + 0.0, + 2.203125, + 2.1875, + 3.53125, + -2.953125, + 1.515625, + 0.1337890625, + 0.478515625, + -1.953125, + 0.98828125, + 1.2265625, + -0.0361328125, + -0.5703125, + 0.8046875, + -0.1455078125, + -2.125, + -2.5, + 1.4921875, + 5.59375, + -1.9765625, + 0.3203125, + -0.0118408203125, + -0.59375, + -0.294921875, + -0.298828125, + -2.359375, + -1.3984375, + -1.0390625, + -0.55859375, + -0.70703125, + -0.306640625, + -3.296875, + 1.4453125, + 2.53125, + -1.65625, + -0.11083984375, + 0.0054931640625, + 2.140625, + 0.049560546875, + -1.40625, + -1.8046875, + 2.8125, + 1.265625, + -0.0791015625, + 0.87890625, + 1.7109375, + -1.21875, + -1.359375, + 0.8359375, + -0.06103515625, + -0.75, + 1.0234375, + -0.875, + -2.703125, + 2.53125, + -0.349609375, + -0.359375, + -2.53125, + 2.109375, + -0.67578125, + -0.1611328125, + -1.4921875, + -4.28125, + 0.51953125, + 5.90625, + -0.0400390625, + -1.6328125, + -0.984375, + -0.78515625, + -1.859375, + -1.671875, + -0.73046875, + -0.004364013671875, + -2.078125, + 1.1953125, + -2.03125, + -1.203125, + 0.04638671875, + -0.453125, + -2.4375, + 0.55078125, + 1.828125, + -1.1640625, + -1.1640625, + -1.265625, + 0.07470703125, + 5.0, + -2.53125, + 0.189453125, + 2.109375, + 1.8984375, + 1.875, + -0.703125, + 1.21875, + -4.375, + 1.4296875, + 0.5234375, + -0.173828125, + -2.015625, + -0.9453125, + 0.421875, + -0.59765625, + 0.6875, + -1.0, + 1.3046875, + -2.171875, + -0.8125, + -0.8125, + -1.3515625, + 1.5390625, + 2.265625, + 2.765625, + 0.37890625, + -0.078125, + -3.34375, + 0.169921875, + -0.435546875, + 0.07421875, + 1.1328125, + -0.8671875, + -1.96875, + -1.78125, + 3.546875, + 2.015625, + -1.4765625, + 1.9453125, + -1.3984375, + 2.109375, + -5.9375, + 0.1337890625, + 1.1953125, + -0.017578125, + 0.88671875, + 1.7109375, + 0.15234375, + -3.890625, + 0.439453125, + 3.25, + -0.0595703125, + -0.80078125, + 2.078125, + 0.546875, + -2.9375, + 0.0166015625, + 1.2265625, + -0.2275390625, + -3.59375, + -1.125, + -0.6015625, + -3.078125, + 0.671875, + 3.21875, + 2.03125, + -3.390625, + 3.5625, + 0.75390625, + -1.734375, + 0.453125, + -1.71875, + 0.2265625, + 1.6796875, + 2.015625, + -2.609375, + 2.9375, + 2.078125, + -3.921875, + 0.46484375, + 3.515625, + 3.078125, + -0.94921875, + 0.03515625, + -3.140625, + 3.546875, + -2.3125, + 2.515625, + -2.96875, + -3.4375, + -0.609375, + -3.296875, + -0.1650390625, + 0.462890625, + 0.9453125, + -0.953125, + 0.7265625, + 2.828125, + -1.734375, + -0.2431640625, + 2.203125, + -0.322265625, + 2.15625, + -0.1982421875, + -0.1884765625, + 1.2578125, + -1.109375, + 0.2236328125, + -3.15625, + -1.8828125, + 0.9609375, + 0.515625, + 0.275390625, + 0.0869140625, + -2.484375, + 1.171875, + -4.65625, + -2.390625, + -1.453125, + -3.8125, + -0.29296875, + 0.99609375, + -2.34375, + -4.625, + 1.0078125, + 0.4140625, + -1.203125, + 0.8125, + -5.4375, + -6.5, + -0.99609375, + -2.546875, + 0.18359375, + -0.275390625, + -3.828125, + -1.4765625, + 0.115234375, + -3.140625, + -1.8515625, + 0.859375, + 2.421875, + 1.1640625, + 5.625, + 2.6875, + -0.07080078125, + 0.06591796875, + 0.8203125, + 5.34375, + -1.96875, + 0.91796875, + 1.46875, + -1.046875, + -2.390625, + 1.4140625, + -0.29296875, + -2.5625, + -2.125, + -0.26953125, + -1.3515625, + 2.234375, + -1.625, + 2.984375, + -5.78125, + -0.185546875, + 1.3671875, + -2.34375, + -2.34375, + 0.70703125, + 0.26953125, + 0.609375, + 0.4140625, + -0.54296875, + 3.171875, + -5.84375, + 0.302734375, + 2.5625, + 1.0703125, + -1.296875, + -4.8125, + 2.71875, + 2.71875, + -2.84375, + 3.734375, + 1.3828125, + 1.6640625, + -3.515625, + 3.109375, + 2.5, + -0.16015625, + -3.125, + -0.98828125, + -1.2734375, + -0.81640625, + -0.27734375, + 1.5078125, + -2.53125, + -0.6953125, + -0.1982421875, + 0.55078125, + -1.1171875, + -0.65625, + -4.25, + -1.3125, + 1.1953125, + 0.365234375, + 0.69140625, + 0.208984375, + 0.421875, + 1.171875, + -0.6640625, + 0.58203125, + 1.0546875, + -3.484375, + -0.6953125, + -1.4140625, + 1.9921875, + 0.87890625, + -3.3125, + 0.59765625, + 0.4609375, + -1.484375, + 0.9296875, + -1.5703125, + -2.578125, + 3.890625, + 0.734375, + -2.921875, + 0.2236328125, + 0.8125, + 2.78125, + 3.0625, + -0.609375, + -3.078125, + -2.484375, + 1.4453125, + -1.828125, + -5.25, + 3.625, + -0.052490234375, + 2.65625, + 4.1875, + -2.71875, + 1.5390625, + -0.173828125, + 1.2109375, + -2.328125, + 0.90234375, + -0.0341796875, + 1.6796875, + -0.6875, + -0.70703125, + -1.9765625, + -0.439453125, + 1.75, + 1.2109375, + 3.671875, + 2.671875, + -2.53125, + 1.2890625, + 1.84375, + 0.85546875, + 1.1484375, + 2.1875, + -0.62890625, + -3.21875, + -0.87890625, + -1.2890625, + -0.408203125, + 1.9921875, + 0.66015625, + -1.0, + -1.0390625, + -2.953125, + -0.67578125, + -1.5625, + -0.392578125, + -1.8359375, + 0.6484375, + -0.71484375, + 2.890625, + -0.443359375, + -1.4453125, + 0.59765625, + -0.435546875, + -0.494140625, + 1.359375, + -0.859375, + 2.421875, + 0.26171875, + -5.3125, + 0.6171875, + 0.94921875, + 0.22265625, + 2.65625, + 0.0888671875, + 1.109375, + 0.72265625, + -1.15625, + -0.76171875, + -1.6484375, + 2.078125, + -2.203125, + 2.3125, + -1.2265625, + -0.373046875, + -2.5625, + 0.296875, + -2.703125, + -1.8125, + -1.2734375, + 3.296875, + -1.8671875, + -1.2265625, + 1.671875, + -0.90625, + 0.515625, + -0.77734375, + -1.71875, + 0.72265625, + 0.609375, + -3.171875, + -1.421875, + -0.271484375, + -1.1640625, + 0.0615234375, + 0.48828125, + 1.0078125, + -1.859375, + -2.640625, + 2.5625, + -1.6015625, + -1.5859375, + 0.10595703125, + -2.03125, + -6.5625, + -0.185546875, + 1.7890625, + 3.890625, + -0.421875, + -0.04248046875, + -0.28515625, + 2.359375, + -1.25, + -1.0703125, + 1.125, + -0.490234375, + -0.181640625, + -3.234375, + -2.40625, + 0.578125, + 0.74609375, + -1.4609375, + -0.310546875, + 1.0078125, + -0.6875, + -0.5859375, + -2.8125, + -3.59375, + -0.70703125, + -1.4921875, + -1.828125, + 2.171875, + 0.609375, + 3.15625, + 0.6015625, + -1.0078125, + -0.2890625, + -5.71875, + 0.49609375, + 0.91796875, + 1.8046875, + 1.8203125, + -0.67578125, + 0.953125, + 1.1796875, + 0.26953125, + -0.5546875, + -3.1875, + -3.078125, + -0.03955078125, + 1.609375, + 1.0546875, + 0.37109375, + 2.484375, + 0.5546875, + -1.7734375, + 1.0859375, + -1.4296875, + 1.7109375, + -0.890625, + 0.51953125, + -0.271484375, + 1.09375, + -1.328125, + -1.0, + -0.734375, + -0.1953125, + 0.671875, + -1.3671875, + 0.79296875, + 2.03125, + 0.84765625, + 0.5078125, + -2.421875, + 1.6484375, + -0.26171875, + -1.4375, + -1.8125, + 1.7734375, + 1.0, + 0.014892578125, + 1.4609375, + -2.234375, + -1.6015625, + -2.09375, + 16.125, + -1.8203125, + 1.3046875, + -0.40625, + 1.0, + 2.84375, + -1.375, + 1.6484375, + -0.65625, + 0.478515625, + -2.796875, + 1.71875, + 3.375, + -1.2265625, + -2.46875, + 0.0247802734375, + -0.1728515625, + 2.34375, + -0.796875, + 1.3125, + 0.80078125, + -4.125, + -0.72265625, + 0.036376953125, + 1.203125, + -1.0546875, + -2.203125, + -1.4140625, + -1.1875, + -5.71875, + -2.921875, + 2.515625, + -1.296875, + -0.6171875, + -0.96484375, + -0.5234375, + -1.3828125, + -1.484375, + -2.484375, + -2.765625, + 0.298828125, + -1.7578125, + 0.171875, + 1.0703125, + -0.53125, + -0.53125, + -6.6875, + 2.484375, + -3.546875, + 2.109375, + 0.4921875, + -0.3828125, + -0.5234375, + 2.5625, + 1.1953125, + 0.400390625, + -0.96875, + -0.1376953125, + -2.140625, + 2.90625, + -0.427734375, + 3.203125, + 0.515625, + 3.609375, + -0.1318359375, + -0.404296875, + 0.3203125, + -0.044677734375, + 2.171875, + 2.4375, + 1.140625, + -1.9921875, + 2.28125, + 2.53125, + -0.1982421875, + -0.302734375, + 0.1572265625, + 1.2265625, + 0.92578125, + -2.921875, + 0.9609375, + 1.71875, + -2.53125, + 0.75, + 1.6640625, + 2.46875, + -2.375, + -0.8671875, + 0.054931640625, + 0.0615234375, + -0.11474609375, + -3.6875, + -0.75, + 2.859375, + -1.6328125, + 3.25, + 2.703125, + -1.1875, + -0.25, + 0.1904296875, + -1.796875, + -3.953125, + 1.1484375, + -2.171875, + 0.84375, + -2.515625, + -2.015625, + 1.9453125, + 0.1474609375, + 2.953125, + 1.96875, + 2.09375, + -0.234375, + 2.71875, + -0.6875, + 0.81640625, + -1.5234375, + 0.9296875, + 1.0078125, + -2.953125, + -0.0654296875, + 0.451171875, + 5.875, + 1.1640625, + -2.78125, + -6.71875, + 0.478515625, + -0.55859375, + 6.40625, + 0.58984375, + 0.345703125, + -4.6875, + 1.75, + 0.46875, + -0.11572265625, + -1.890625, + -1.7265625, + 2.15625, + -2.171875, + 0.1826171875, + -5.0625, + 1.265625, + 1.0625, + -1.78125, + 1.28125, + -0.380859375, + -0.4921875, + 1.1015625, + 1.1328125, + 1.53125, + -3.078125, + -0.189453125, + 3.140625, + -2.0, + -1.21875, + -0.67578125, + -0.478515625, + -0.28125, + -1.2421875, + 2.046875, + -2.3125, + -1.4453125, + -2.671875, + -1.65625, + 2.984375, + 1.15625, + 1.515625, + 2.578125, + 0.7578125, + 1.5703125, + 0.4765625, + -3.28125, + -1.0390625, + -1.765625, + 0.0224609375, + -1.296875, + 1.609375, + -0.453125, + 2.5625, + 3.75, + -0.1005859375, + 4.46875, + 0.447265625, + 0.34375, + 1.125, + -0.298828125, + 1.328125, + -0.45703125, + 0.55078125, + 3.46875, + 2.5625, + 2.34375, + -1.6015625, + 1.1171875, + 2.28125, + 0.28125, + -1.5546875, + -3.25, + -0.6328125, + -4.59375, + 0.16015625, + -2.015625, + 1.4453125, + 0.318359375, + -0.71875, + 0.88671875, + -1.8671875, + -2.796875, + 0.0201416015625, + 0.10791015625, + 1.015625, + 0.66015625, + 2.796875, + -1.671875, + -2.03125, + -0.1123046875, + -0.8671875, + 1.46875, + -1.8359375, + 0.078125, + -0.8671875, + -3.96875, + -0.58984375, + 1.2421875, + -0.515625, + -3.046875, + 0.07861328125, + 1.7578125, + -0.1826171875, + 0.71875, + 1.4453125, + -2.875, + -0.07666015625, + 8.9375, + 1.7578125, + 2.265625, + 1.8203125, + 0.5234375, + 2.28125, + -0.408203125, + 3.5, + 0.84375, + 1.515625, + 2.8125, + -2.765625, + -1.125, + -3.140625, + -0.2373046875, + 0.0498046875, + 0.74609375, + -4.34375, + -1.3515625, + -0.25390625, + -2.859375, + 1.765625, + -2.0, + -1.59375, + -0.07568359375, + -0.56640625, + -0.890625, + -3.78125, + -1.03125, + -2.875, + -0.5, + -1.4375, + 0.51171875, + 0.07861328125, + -0.75390625, + -0.271484375, + -1.5625, + 1.734375, + 0.1328125, + -0.87890625, + -0.66796875, + 3.140625, + 2.421875, + 0.26171875, + -0.8046875, + -0.380859375, + -1.71875, + -3.546875, + 2.96875, + 0.58203125, + 1.1796875, + 1.8515625, + -1.8046875, + 2.53125, + -0.474609375, + -0.48828125, + 1.484375, + 0.365234375, + 0.80078125, + -1.8046875, + 0.1767578125, + -2.828125, + 4.5, + 1.515625, + 0.283203125, + 1.421875, + 1.15625, + 1.8046875, + 1.3828125, + -2.6875, + 1.03125, + 3.3125, + 3.15625, + -1.484375, + 0.037841796875, + 3.03125, + 1.984375, + 0.65234375, + 2.78125, + 0.1455078125, + 0.85546875, + 2.296875, + -2.171875, + 1.75, + 0.240234375, + 2.09375, + -1.171875, + -2.796875, + 0.396484375, + 0.73046875, + 2.796875, + -4.34375, + 0.72265625, + -0.1796875, + 3.1875, + -1.046875, + -1.4921875, + 2.4375, + -0.58203125, + -2.234375, + 1.0703125, + -1.59375, + 0.9609375, + 0.38671875, + -2.03125, + 1.0078125, + 1.359375, + 1.2265625, + -0.423828125, + 0.2470703125, + -2.6875, + 4.21875, + -3.015625, + 2.15625, + -0.357421875, + -1.4765625, + 0.1005859375, + 2.703125, + -0.73046875, + -0.875, + -0.408203125, + 3.421875, + -3.578125, + 0.6328125, + 3.21875, + -2.6875, + 4.15625, + 1.3515625, + -5.46875, + -1.203125, + 0.447265625, + 5.53125, + 1.078125, + 4.0, + -0.55078125, + -2.046875, + -3.0, + -0.15234375, + -7.125, + -2.453125, + 1.9140625, + -3.46875, + 2.046875, + 1.7109375, + 0.68359375, + 5.625, + -2.0625, + -1.265625, + 1.0546875, + -4.09375, + 1.3671875, + -0.671875, + -1.2265625, + -2.75, + -1.75, + 2.671875, + -1.4140625, + -0.58984375, + 1.5234375, + -1.5078125, + -0.7265625, + -0.953125, + 0.33203125, + -2.328125, + 2.734375, + -0.21484375, + -1.7734375, + 3.953125, + 1.171875, + -0.228515625, + -3.59375, + 0.82421875, + 0.68359375, + 0.921875, + 0.267578125, + -0.9765625, + -0.68359375, + -1.625, + 1.4296875, + -0.44921875, + -0.423828125, + 2.59375, + -2.234375, + -0.8046875, + -0.6875, + -0.10693359375, + -1.875, + -1.625, + 1.15625, + -0.85546875, + 1.140625, + -1.3046875, + -0.466796875, + -0.244140625, + -0.435546875, + -1.7421875, + 0.53515625, + -3.734375, + -1.1484375, + -1.0703125, + 4.0, + 1.46875, + 0.1728515625, + 0.578125, + -2.578125, + -4.15625, + 0.8046875, + -1.0859375, + 0.22265625, + 0.51171875, + 0.2353515625, + -1.5625, + -2.234375, + 3.421875, + 1.5625, + 1.7578125, + -1.515625, + 2.65625, + -0.62890625, + -3.5625, + 2.625, + -7.59375, + 0.455078125, + -1.3984375, + 0.9375, + 0.890625, + 2.34375, + -5.34375, + 4.09375, + -0.64453125, + 2.140625, + -3.375, + -3.59375, + -0.51953125, + -1.25, + -0.1865234375, + -0.62890625, + 0.1953125, + -2.703125, + -0.41015625, + -0.6953125, + -1.2109375, + 0.462890625, + -2.203125, + -2.6875, + -1.21875, + 0.1923828125, + -2.125, + -0.302734375, + 1.4765625, + -0.734375, + 1.015625, + 1.2734375, + -1.3984375, + 2.203125, + 0.451171875, + -3.078125, + 0.6015625, + 0.39453125, + 1.828125, + 2.09375, + 1.65625, + -3.984375, + 2.578125, + -1.953125, + -0.455078125, + 1.4609375, + 0.072265625, + 1.140625, + -0.578125, + -0.365234375, + -2.609375, + 0.94140625, + -0.21484375, + -2.359375, + 1.1171875, + -2.453125, + 1.1015625, + -2.109375, + 3.296875, + -0.859375, + 3.515625, + 0.7890625, + 1.7109375, + -2.875, + 1.78125, + 2.078125, + -0.43359375, + -2.09375, + 0.0264892578125, + 1.4609375, + -1.6328125, + -4.59375, + 2.25, + -0.62109375, + -0.53125, + -3.671875, + 0.035400390625, + -1.5390625, + 0.1572265625, + -2.578125, + -5.46875, + 1.3203125, + -3.90625, + 0.7578125, + -1.078125, + -0.006103515625, + -0.71875, + 0.310546875, + 1.7421875, + 0.8359375, + 2.8125, + 0.3125, + 2.40625, + 2.25, + -0.609375, + 0.80078125, + -0.625, + 0.2333984375, + -2.09375, + -1.09375, + -3.84375, + -2.4375, + -0.23828125, + -1.7265625, + -0.361328125, + 3.0625, + -1.7265625, + -2.03125, + 0.92578125, + -0.78125, + 0.9765625, + -2.796875, + -1.5546875, + -0.349609375, + -2.9375, + -1.0234375, + -0.60546875, + 0.392578125, + -0.6484375, + 0.4609375, + 0.3125, + -1.125, + -0.6953125, + -2.265625, + 1.7734375, + 3.09375, + 2.953125, + -0.5234375, + 2.671875, + 2.578125, + 0.294921875, + -0.8046875, + 0.2431640625, + 0.291015625, + -0.796875, + -0.482421875, + 3.34375, + 1.3359375, + -0.498046875, + 1.7421875, + 2.765625, + -5.46875, + 2.03125, + 1.5625, + -2.21875, + 5.625, + 2.421875, + 1.9921875, + 1.1796875, + -0.30078125, + 3.515625, + -3.140625, + -1.1640625, + -0.64453125, + 2.140625, + -0.7265625, + -0.01165771484375, + 2.484375, + -0.296875, + -1.9296875, + 0.76171875, + -0.1552734375, + -1.921875, + -4.34375, + 1.3203125, + 1.7109375, + -0.326171875, + 3.203125, + 0.2578125, + 0.3359375, + -0.7578125, + 2.90625, + 4.28125, + 0.203125, + -0.2392578125, + -1.40625, + 0.119140625, + -0.333984375, + 2.4375, + -0.8984375, + 1.21875, + 2.328125, + 1.25, + -0.53515625, + -2.109375, + 0.57421875, + 0.494140625, + -0.9140625, + 1.4453125, + -2.390625, + 2.453125, + 3.34375, + -1.3984375, + -3.390625, + -1.6953125, + -0.87109375, + 2.75, + 0.427734375, + -0.8671875, + 1.640625, + 1.6640625, + -3.03125, + 2.4375, + -0.8046875, + 0.310546875, + 0.8671875, + -0.60546875, + -3.109375, + -0.134765625, + 1.046875, + -1.515625, + -3.421875, + 0.85546875, + 2.640625, + 0.4375, + -1.28125, + -3.828125, + 3.296875, + 2.328125, + 0.046630859375, + 2.46875, + -3.03125, + 0.7734375, + -1.546875, + -0.359375, + -1.8046875, + 1.046875, + -2.640625, + -0.8671875, + -2.34375, + -1.2734375, + 2.703125, + -2.546875, + 1.2265625, + 0.49609375, + 1.28125, + 3.703125, + -0.2392578125, + 2.0625, + -0.310546875, + 2.9375, + -0.8671875, + 2.9375, + -0.484375, + 2.046875, + 2.390625, + 3.59375, + 2.625, + 0.51953125, + -1.9765625, + 0.66015625, + -0.357421875, + 1.90625, + 0.26953125, + -1.734375, + 2.125, + -2.765625, + -1.1171875, + 1.203125, + 1.671875, + -1.65625, + -3.90625, + 0.119140625, + -2.75, + -0.65234375, + 1.2734375, + 1.0234375, + 1.484375, + -0.404296875, + 1.28125, + -0.94140625, + 0.921875, + 0.875, + -0.90625, + -2.4375, + 2.75, + -0.5390625, + -0.12353515625, + 1.015625, + 0.423828125, + -3.71875, + 1.3671875, + 1.109375, + 2.328125, + -2.125, + 2.40625, + 0.494140625, + -2.515625, + -1.40625, + -1.2109375, + -1.25, + -0.859375, + -3.8125, + -3.84375, + -0.09033203125, + 0.73828125, + 1.0546875, + 0.078125, + 0.55859375, + 2.640625, + 1.9453125, + -1.109375, + -1.0546875, + 1.0625, + -3.515625, + -1.4453125, + 0.70703125, + -1.40625, + 0.97265625, + 0.036865234375, + -0.859375, + 1.2890625, + -2.359375, + -1.8984375, + -1.5390625, + -2.078125, + -0.87109375, + 3.03125, + 0.58984375, + -4.15625, + -0.6328125, + 1.1796875, + 1.828125, + -3.359375, + -1.21875, + 1.421875, + -0.859375, + 2.0625, + 0.7421875, + -0.6328125, + -2.625, + -1.78125, + -0.78515625, + 1.4453125, + 3.765625, + 0.35546875, + -2.40625, + -1.046875, + -0.6640625, + -0.1767578125, + -11.625, + 2.5625, + 1.515625, + 0.341796875, + -1.1484375, + -2.46875, + 0.1875, + -2.0625, + 0.53125, + -1.6640625, + -0.921875, + -6.09375, + 2.390625, + 1.984375, + 2.875, + -0.09521484375, + 1.5546875, + -1.40625, + -1.3125, + 3.671875, + -0.369140625, + -1.4765625, + -0.52734375, + -0.1103515625, + -0.80078125, + 1.421875, + 1.515625, + -1.8671875, + 1.6875, + -4.71875, + -5.28125, + -1.5703125, + 2.78125, + 0.63671875, + 0.431640625, + -1.9296875, + -1.5234375, + -3.515625, + 1.03125, + 2.015625, + 0.0732421875, + 1.2578125, + -0.216796875, + 0.423828125, + 0.7109375, + -2.09375, + -5.03125, + -0.68359375, + 1.359375, + 0.62109375, + -0.62890625, + -2.703125, + 2.84375, + 3.234375, + -3.484375, + -0.0002880096435546875, + -1.65625, + -0.52734375, + 0.03564453125, + -0.8984375, + 0.369140625, + -3.671875, + -0.328125, + 3.609375, + 0.193359375, + -0.045654296875, + 0.85546875, + -0.369140625, + 0.7421875, + -0.71875, + 0.07470703125, + -0.06201171875, + 1.0703125, + 2.359375, + -1.6640625, + 0.65625, + 1.203125, + 1.046875, + 1.84375, + 0.76953125, + 0.6015625, + 1.09375, + -1.6796875, + -1.7265625, + -0.41796875, + 3.34375, + -3.046875, + 1.453125, + 1.8515625, + -0.38671875, + 2.203125, + 0.42578125, + -0.1357421875, + 3.6875, + -0.75390625, + 1.7109375, + -1.2421875, + 1.4140625, + -1.8515625, + -1.40625, + -0.275390625, + 0.65234375, + -1.625, + -0.255859375, + -0.025634765625, + -0.625, + -1.7109375, + -1.7578125, + -0.625, + -1.6796875, + -1.15625, + -0.9921875, + -1.6484375, + 0.059814453125, + 0.2099609375, + -0.98046875, + -1.6953125, + -3.296875, + -0.142578125, + -1.2109375, + -0.1123046875, + -2.25, + -0.4140625, + 1.6875, + -2.171875, + -0.72265625, + -1.1875, + -0.9765625, + -6.40625, + -1.015625, + 3.515625, + 0.0135498046875, + -0.703125, + 1.171875, + -0.1923828125, + -2.203125, + -0.8984375, + -2.390625, + 3.109375, + 2.109375, + 0.306640625, + -2.796875, + -1.9375, + 0.04150390625, + -1.5546875, + 0.61328125, + 0.052978515625, + -1.5078125, + -0.169921875, + -0.1015625, + -3.296875, + -0.05078125, + -1.9609375, + -0.859375, + 2.4375, + 0.12158203125, + 0.85546875, + -2.921875, + -0.03369140625, + 1.3203125, + -1.390625, + 1.1328125, + -0.84765625, + 3.4375, + 1.0, + 1.359375, + 1.1640625, + -0.1416015625, + 1.8515625, + -0.2060546875, + 0.56640625, + -0.0118408203125, + 0.6328125, + -0.05224609375, + 2.203125, + 0.29296875, + 3.921875, + -0.86328125, + -1.828125, + -0.52734375, + 2.109375, + -2.546875, + -1.5390625, + -0.10302734375, + -1.2734375, + 3.59375, + 0.83203125, + -1.109375, + -1.3828125, + -2.15625, + -2.78125, + -0.2265625, + 2.171875, + 2.859375, + 0.6015625, + 1.0078125, + -1.0234375, + 0.0, + -4.21875, + 1.9609375, + -2.0625, + -0.8359375, + -1.359375, + -2.734375, + -1.390625, + 2.453125, + -0.953125, + 3.59375, + -0.98828125, + 0.60546875, + 2.15625, + 0.4609375, + -0.423828125, + -2.984375, + 1.2890625, + -0.1259765625, + 1.2734375, + -0.006103515625, + -0.9375, + 0.859375, + -2.875, + -0.68359375, + 0.06201171875, + 0.7109375, + -0.97265625, + 2.03125, + 1.4140625, + 2.84375, + 0.80078125, + 0.81640625, + -0.65234375, + 0.62890625, + 4.9375, + 0.96484375, + -0.5859375, + 1.34375, + -1.390625, + 0.67578125, + 2.109375, + 0.294921875, + 2.15625, + -0.453125, + -0.97265625, + -2.53125, + 0.5078125, + -1.5625, + -1.4375, + 3.296875, + -2.21875, + 2.0, + -0.79296875, + 0.8671875, + 0.4453125, + 0.72265625, + 0.96484375, + -0.134765625, + -1.9609375, + -0.1787109375, + 0.671875, + 2.125, + -2.890625, + -0.0361328125, + 0.6640625, + 1.046875, + -0.96875, + -4.21875, + 0.0, + 0.08154296875, + 1.1640625, + 1.03125, + 0.7578125, + 0.95703125, + -1.4921875, + -0.66015625, + 0.0185546875, + 1.1953125, + 4.28125, + -0.1611328125, + -0.44921875, + -2.765625, + -0.87890625, + -0.251953125, + 0.578125, + -3.421875, + 2.65625, + -0.2041015625, + -4.75, + 1.234375, + -1.5859375, + 1.34375, + 1.84375, + 0.54296875, + 2.921875, + 0.59765625, + -0.2333984375, + 5.34375, + 1.90625, + -1.6328125, + -1.1328125, + 1.2578125, + -1.0390625, + 0.142578125, + -3.078125, + -0.365234375, + -3.296875, + -0.63671875, + -1.96875, + -0.93359375, + 2.21875, + 2.03125, + -2.15625, + -0.216796875, + 2.046875, + -1.0234375, + -2.21875, + 0.177734375, + -5.34375, + 1.140625, + 2.6875, + 1.5859375, + -3.171875, + 1.9765625, + -0.1494140625, + -1.171875, + 0.2060546875, + 2.625, + 0.53515625, + -1.0703125, + -5.15625, + -0.494140625, + -1.1328125, + 1.7265625, + 1.8359375, + 1.234375, + 3.15625, + -3.59375, + 1.6015625, + -0.244140625, + -1.8515625, + -0.94140625, + 2.03125, + 2.859375, + -0.365234375, + 2.765625, + 1.75, + 2.4375, + -0.74609375, + 1.3359375, + 1.53125, + 0.451171875, + 0.484375, + 1.515625, + -0.357421875, + -2.140625, + 2.609375, + -0.90234375, + 0.9375, + 0.5546875, + 3.046875, + -1.328125, + 0.9921875, + -1.4296875, + 1.1015625, + -3.421875, + 0.10302734375, + -1.109375, + -0.09521484375, + 0.173828125, + 0.251953125, + -1.3203125, + -1.0078125, + 0.0615234375, + -0.4296875, + -0.7109375, + 1.671875, + 9.3125, + -2.359375, + -3.84375, + -0.76953125, + 1.765625, + -0.92578125, + 0.291015625, + 0.578125, + -0.87890625, + -3.015625, + 0.99609375, + -0.29296875, + -10.25, + 0.056884765625, + -0.81640625, + 0.54296875, + 0.890625, + -0.921875, + 1.109375, + 2.890625, + -0.2099609375, + -1.125, + 2.078125, + -1.09375, + 1.0078125, + -0.62109375, + 0.64453125, + 1.9765625, + 3.34375, + -1.796875, + 0.69140625, + -1.1015625, + -3.921875, + 2.6875, + 0.388671875, + -1.703125, + 0.337890625, + -0.37890625, + -0.95703125, + -0.3671875, + -0.64453125, + 0.73046875, + 2.21875, + -0.3515625, + -7.78125, + 1.5859375, + 2.15625, + 4.75, + 2.5, + 2.171875, + -0.6875, + 0.4296875, + -1.7109375, + -0.06689453125, + -1.7109375, + -1.09375, + 2.609375, + -0.23828125, + 1.28125, + -3.625, + 5.90625, + 1.953125, + 1.3046875, + 1.7421875, + -1.8359375, + 0.88671875, + 0.376953125, + -2.1875, + -0.38671875, + -1.1640625, + -2.296875, + 2.09375, + -0.84375, + 2.375, + 1.6484375, + 2.25, + -2.34375, + -0.4765625, + -0.7421875, + -1.5625, + 0.56640625, + 0.8828125, + 0.609375, + -0.64453125, + 2.34375, + 3.65625, + -2.0, + -2.03125, + 1.4453125, + 0.6875, + -2.171875, + -0.734375, + -1.953125, + -1.4765625, + -2.0, + 4.84375, + 1.09375, + -1.0390625, + 1.96875, + 1.0859375, + -1.0546875, + 0.28125, + -0.283203125, + -0.26171875, + 3.84375, + -0.14453125, + 2.796875, + -0.34375, + 4.15625, + -1.78125, + 0.035400390625, + -1.0703125, + -2.09375, + 0.8046875, + -3.03125, + 2.421875, + 0.140625, + -1.75, + -0.0, + -0.8984375, + -2.109375, + -2.296875, + -0.62109375, + -1.828125, + -1.671875, + -0.5078125, + 1.3203125, + 0.59765625, + 3.625, + -2.375, + 1.6953125, + 3.5, + 2.34375, + 2.453125, + -1.7109375, + -0.0245361328125, + -0.82421875, + 2.59375, + 0.1357421875, + 3.890625, + 1.8046875, + 2.375, + 1.2109375, + 0.1328125, + 0.3984375, + -2.40625, + -1.875, + 1.46875, + 2.125, + -1.0, + -2.234375, + -0.306640625, + -4.6875, + 0.404296875, + -2.09375, + 0.65625, + 0.458984375, + 1.03125, + 4.65625, + 0.9609375, + -0.859375, + -1.6875, + 4.03125, + 1.2890625, + 1.109375, + 0.5234375, + -1.1953125, + 2.109375, + -1.5234375, + 1.453125, + -1.71875, + 2.515625, + 1.140625, + -1.0859375, + 1.3515625, + 0.734375, + 0.9921875, + 1.65625, + 3.078125, + -1.421875, + 0.2177734375, + -3.46875, + -2.390625, + 2.3125, + -2.125, + 0.609375, + -3.46875, + -0.484375, + -1.6328125, + -1.578125, + -2.0, + 1.0078125, + -2.171875, + -0.43359375, + 1.2734375, + -0.396484375, + 3.921875, + 0.1005859375, + -2.71875, + 3.3125, + -4.34375, + 0.98828125, + 1.7109375, + 3.171875, + -1.8515625, + -2.765625, + 0.4453125, + 0.314453125, + -0.431640625, + -0.482421875, + -3.765625, + 1.671875, + 2.828125, + 2.015625, + 1.09375, + 0.0859375, + 2.15625, + -5.125, + 1.0078125, + -2.515625, + 0.59765625, + 3.3125, + 2.75, + -0.9765625, + -0.029541015625, + -2.34375, + 1.5859375, + 3.0, + 0.5234375, + -1.7578125, + -1.265625, + -0.01226806640625, + 1.796875, + 0.0059814453125, + 2.453125, + 3.984375, + -0.267578125, + 1.3671875, + 2.15625, + -2.234375, + -2.390625, + -0.890625, + 2.28125, + 4.125, + 1.3515625, + -0.63671875, + 1.4765625, + -1.1328125, + 2.6875, + 0.1953125, + 1.40625, + 0.78125, + 2.34375, + 1.1640625, + 3.1875, + 1.171875, + 1.421875, + 0.8984375, + -0.462890625, + -2.578125, + -4.03125, + -0.76171875, + -1.4296875, + 0.2421875, + 1.5703125, + 0.91015625, + -1.453125, + -2.71875, + 1.7890625, + 1.0, + 3.328125, + -1.40625, + -0.65234375, + -1.9296875, + 0.388671875, + -0.107421875, + -0.265625, + 0.498046875, + 0.25390625, + 0.58203125, + 2.03125, + -1.2265625, + 2.53125, + -3.0, + 0.251953125, + -1.0078125, + 1.4296875, + 0.1982421875, + -0.1435546875, + 0.6328125, + -0.44140625, + -1.7421875, + -1.46875, + -0.30078125, + -0.173828125, + 1.328125, + 2.0, + 4.6875, + 3.765625, + 0.130859375, + -2.234375, + 1.203125, + -1.6015625, + -1.375, + -2.09375, + 2.109375, + -3.015625, + -1.5390625, + 0.40234375, + 0.94921875, + 0.96875, + -1.65625, + -4.96875, + 2.25, + -0.181640625, + -1.7109375, + -4.09375, + 1.609375, + 0.41796875, + 1.5546875, + -0.84375, + 0.2109375, + 1.734375, + -0.4296875, + 1.78125, + 1.7109375, + 2.390625, + 0.75390625, + 2.734375, + -1.625, + -2.234375, + 0.81640625, + -2.5625, + 0.56640625, + -1.3515625, + -1.7421875, + -1.390625, + -0.62109375, + -1.5, + -0.81640625, + -1.265625, + -0.03125, + 0.16015625, + -0.62890625, + 0.07763671875, + 1.8515625, + -1.2890625, + 0.38671875, + 0.478515625, + -0.703125, + -0.9296875, + -0.1708984375, + 4.0625, + -0.76953125, + -0.69140625, + -5.03125, + -3.59375, + -0.25, + 3.4375, + 2.78125, + 0.45703125, + 0.7421875, + 1.3125, + 2.25, + 1.3828125, + -0.9765625, + -0.64453125, + -5.5, + -0.93359375, + -0.1650390625, + -0.94140625, + 2.875, + 0.70703125, + 1.640625, + -1.2578125, + 1.8359375, + -0.10791015625, + 0.0157470703125, + -1.125, + 1.8828125, + -2.546875, + -0.81640625, + 1.5859375, + -0.56640625, + 0.85546875, + -1.34375, + -0.0556640625, + -0.859375, + 0.75, + -5.28125, + -0.828125, + 1.7421875, + -0.828125, + -1.9921875, + -5.1875, + -1.640625, + 0.275390625, + -1.8828125, + 1.3203125, + 0.57421875, + -0.59375, + 1.109375, + 1.2578125, + -2.609375, + 1.0703125, + -2.015625, + 0.71875, + -3.328125, + -0.4375, + -1.765625, + -2.078125, + -0.82421875, + -1.4453125, + -0.94140625, + -5.125, + 2.859375, + 0.71484375, + -3.3125, + -0.486328125, + -2.921875, + -3.46875, + 7.125, + 0.55859375, + 1.203125, + 6.6875, + 0.0771484375, + 4.40625, + -0.27734375, + 2.4375, + -0.181640625, + -1.5234375, + -0.5, + -3.4375, + -0.69921875, + -1.3515625, + 1.9609375, + -2.796875, + -2.1875, + -0.44921875, + -0.73828125, + -1.609375, + -1.5546875, + -1.546875, + 0.953125, + -1.296875, + 1.9296875, + -1.9140625, + 1.859375, + 0.0849609375, + -0.375, + 2.984375, + 2.296875, + -0.09765625, + 0.81640625, + 1.4921875, + -2.453125, + -0.095703125, + -0.609375, + 0.6328125, + -0.2099609375, + 0.51953125, + -0.703125, + 1.1328125, + -0.10693359375, + -1.7578125, + -0.42578125, + 1.625, + 0.49609375, + 1.7734375, + -0.796875, + 1.609375, + -1.0859375, + -0.4765625, + -0.640625, + 0.8671875, + -1.34375, + -1.0859375, + -2.046875, + 1.5859375, + -2.078125, + -1.6171875, + -1.015625, + 1.0078125, + 0.0, + 1.3203125, + -2.625, + 0.984375, + -2.25, + 0.765625, + 1.6953125, + -0.103515625, + -1.8671875, + 0.30859375, + 0.6796875, + 2.703125, + -1.890625, + -1.4375, + -1.09375, + 0.984375, + -0.498046875, + 0.263671875, + -0.76953125, + 0.97265625, + 3.328125, + -3.5625, + -0.5703125, + -1.890625, + -2.828125, + -0.328125, + -0.984375, + 3.234375, + -1.6484375, + 0.59375, + -1.5625, + -2.265625, + 2.015625, + -0.1484375, + 1.3125, + 0.78125, + -2.75, + 2.03125, + 2.5, + -0.88671875, + 0.984375, + 1.15625, + -0.26171875, + -1.4375, + 2.0625, + 1.3046875, + -2.46875, + -0.62109375, + -1.25, + -1.1640625, + -0.080078125, + 1.015625, + 3.875, + -1.96875, + 0.306640625, + 0.265625, + -0.0274658203125, + -0.71875, + 1.515625, + -0.1298828125, + 1.921875, + -0.76171875, + -1.2421875, + 0.8671875, + 0.83984375, + 2.328125, + -1.6484375, + -2.65625, + -6.1875, + -1.203125, + 1.7109375, + -0.041015625, + -2.078125, + -1.4921875, + -0.54296875, + -1.0625, + 0.57421875, + 1.4765625, + -1.71875, + 1.078125, + 1.421875, + 2.484375, + 1.46875, + -1.234375, + 0.498046875, + -1.71875, + 0.1640625, + 0.6171875, + 0.055419921875, + 1.53125, + -0.94921875, + -0.265625, + 3.3125, + -2.1875, + -0.047607421875, + 0.671875, + -0.2392578125, + -1.0703125, + -2.09375, + 0.55078125, + 0.53515625, + -2.46875, + 0.8828125, + -5.8125, + -1.328125, + 2.375, + -0.65234375, + -1.078125, + 0.77734375, + -0.412109375, + -0.83984375, + 1.125, + 0.451171875, + -1.515625, + 0.4921875, + -0.58203125, + -2.0625, + -1.28125, + -1.0234375, + -0.80078125, + 1.203125, + 0.8984375, + -0.53125, + -0.71875, + 1.5234375, + 0.74609375, + -0.63671875, + 1.2890625, + -0.27734375, + 0.0022735595703125, + -3.796875, + 1.375, + 0.88671875, + 0.12060546875, + 2.125, + 0.71875, + -2.984375, + -0.1787109375, + 1.0078125, + -0.5390625, + 2.34375, + -1.5546875, + -1.3671875, + -1.53125, + 1.375, + 0.337890625, + 2.046875, + 2.765625, + 0.65234375, + -3.25, + -2.359375, + 0.9453125, + -0.8984375, + -1.7578125, + 0.70703125, + -2.484375, + -0.036376953125, + 0.91015625, + -2.21875, + 1.140625, + -0.62890625, + -1.0625, + -1.7578125, + -3.3125, + 0.1787109375, + 0.47265625, + -2.46875, + 0.271484375, + -0.10546875, + 0.7578125, + -0.84375, + 1.875, + -3.734375, + -2.859375, + 1.390625, + -2.125, + -1.8515625, + 3.125, + -2.125, + -1.6640625, + 0.423828125, + 1.0390625, + 0.96484375, + -2.359375, + -0.1708984375, + -0.380859375, + 0.57421875, + 0.03564453125, + 3.125, + -1.28125, + 1.53125, + -0.357421875, + -0.67578125, + -2.34375, + -2.796875, + 0.275390625, + -0.8046875, + 0.91015625, + 1.0859375, + -2.75, + 6.75, + -0.392578125, + -1.328125, + 1.9765625, + -3.84375, + 2.1875, + -0.271484375, + -1.078125, + 1.4609375, + 1.640625, + 0.061279296875, + 1.6796875, + -1.15625, + -0.5, + -0.408203125, + 1.015625, + -0.07861328125, + -1.2265625, + 2.890625, + -2.0, + -3.4375, + 3.453125, + 1.0, + -0.045654296875, + -3.78125, + -2.5625, + -0.41796875, + -1.125, + 2.984375, + 0.2275390625, + 0.416015625, + -0.1513671875, + 0.322265625, + 1.4296875, + 3.21875, + 4.0625, + -2.890625, + 1.5390625, + -0.6953125, + 0.011962890625, + 3.03125, + 0.236328125, + 2.046875, + -1.3515625, + -0.796875, + -1.5078125, + -1.8046875, + -0.1552734375, + 1.5, + 1.953125, + 1.4765625, + 4.15625, + -1.8046875, + 3.25, + -1.25, + -1.6015625, + -0.419921875, + -1.3203125, + -3.953125, + 1.6484375, + 0.73828125, + -1.28125, + -2.078125, + 1.515625, + 1.625, + -0.6953125, + -0.091796875, + -1.0390625, + -0.91015625, + 0.1455078125, + -4.71875, + 0.84375, + -1.8671875, + -0.85546875, + 0.4765625, + 1.578125, + -0.1083984375, + 0.53515625, + 4.34375, + -3.59375, + 0.71875, + 0.921875, + -0.71484375, + -0.6484375, + 0.1181640625, + 0.201171875, + 1.7734375, + 0.68359375, + 1.1875, + 0.92578125, + 1.640625, + -1.59375, + -0.60546875, + -4.4375, + -1.6328125, + -2.484375, + -0.72265625, + -1.9140625, + 3.0, + 2.234375, + 4.0625, + 1.515625, + -2.53125, + -1.59375, + -4.375, + -2.15625, + -0.00775146484375, + 3.578125, + 1.25, + 0.11279296875, + 4.28125, + -5.53125, + -3.265625, + 1.03125, + -1.5625, + -0.412109375, + -1.296875, + 0.310546875, + 1.4375, + 2.78125, + -1.7890625, + -0.78515625, + 0.68359375, + -6.21875, + 0.55859375, + 2.34375, + -2.546875, + 0.9140625, + -0.84765625, + 2.578125, + -0.078125, + 0.416015625, + 2.359375, + 1.8671875, + -0.9140625, + -0.8046875, + 2.765625, + -0.26953125, + -1.7109375, + 3.296875, + -1.1171875, + 1.734375, + 5.0625, + -1.3515625, + -0.12890625, + -2.21875, + 1.53125, + -0.2431640625, + -0.291015625, + -3.296875, + 1.875, + -1.34375, + -0.3359375, + -2.0625, + 3.34375, + -1.546875, + -6.78125, + 1.421875, + -0.6484375, + 1.46875, + -1.2578125, + 0.169921875, + -0.158203125, + -1.140625, + 0.158203125, + -0.220703125, + -1.3984375, + 1.3203125, + -1.4765625, + 1.6328125, + -3.9375, + 1.09375, + 2.078125, + 0.62890625, + 0.5703125, + -2.609375, + -2.109375, + -0.427734375, + 0.75, + -2.140625, + -0.515625, + 4.375, + -2.125, + 0.484375, + -0.58984375, + -1.90625, + -3.0625, + -2.015625, + -2.25, + 0.85546875, + -1.3125, + -1.3515625, + 1.0078125, + 0.1201171875, + -0.2158203125, + 0.8984375, + -4.03125, + -1.4609375, + -3.6875, + -2.984375, + -0.244140625, + 2.34375, + -1.9765625, + 0.8046875, + 1.046875, + 1.8515625, + 0.78125, + 2.296875, + -3.46875, + 2.484375, + 1.5703125, + -0.006103515625, + 2.953125, + 0.53125, + 2.09375, + 3.015625, + -3.859375, + -4.78125, + -4.375, + -2.015625, + 0.365234375, + 0.9296875, + -2.609375, + -0.66796875, + -2.203125, + 0.921875, + -2.96875, + -0.390625, + 2.09375, + -1.046875, + -1.5, + 2.203125, + -2.046875, + 1.109375, + -0.06103515625, + -4.25, + 1.328125, + -1.671875, + 0.5546875, + 3.546875, + 0.9375, + -2.171875, + -0.7734375, + 0.2734375, + 0.81640625, + -0.87890625, + -5.71875, + -0.875, + 0.01220703125, + -2.5, + -1.234375, + 2.9375, + -4.625, + 1.3046875, + 0.443359375, + 0.55078125, + 2.53125, + -0.341796875, + -2.875, + 1.578125, + 0.26953125, + -1.9765625, + 4.875, + 0.91015625, + 0.2578125, + 3.96875, + -2.296875, + -0.1396484375, + -4.625, + 1.3125, + 0.51953125, + -3.40625, + -0.5546875, + -1.3671875, + -0.380859375, + -2.84375, + 1.5078125, + 0.005828857421875, + 0.890625, + -2.015625, + -0.58203125, + -3.3125, + -0.26171875, + 0.328125, + -0.0184326171875, + 3.984375, + -0.89453125, + -2.671875, + 0.33203125, + -0.0213623046875, + 0.8046875, + 2.734375, + 0.7890625, + -1.0, + 1.5078125, + -3.03125, + 4.09375, + 0.25, + -3.140625, + -0.875, + -2.234375, + -1.546875, + 0.37890625, + 0.005950927734375, + -1.4140625, + -4.0, + -0.390625, + -0.1962890625, + 0.09033203125, + 2.375, + -0.314453125, + -0.67578125, + -0.5234375, + -1.7421875, + -2.15625, + -1.2109375, + -2.375, + 0.71484375, + -0.6953125, + 0.24609375, + -5.21875, + 0.80078125, + 0.9453125, + -3.484375, + -0.2236328125, + 1.671875, + -2.453125, + 1.2265625, + -13.6875, + 0.06982421875, + -3.484375, + -0.283203125, + 2.015625, + 0.416015625, + -2.34375, + -0.482421875, + 0.478515625, + 0.578125, + 4.6875, + -2.21875, + 3.765625, + 2.234375, + 2.609375, + -5.3125, + -0.486328125, + 0.51953125, + -2.125, + 0.271484375, + -2.265625, + -0.314453125, + 0.84375, + 0.55078125, + 0.494140625, + -4.96875, + -2.59375, + 1.7578125, + -0.6875, + 3.921875, + 1.0546875, + 1.0, + -1.6484375, + 1.5078125, + 1.765625, + -0.35546875, + 0.1357421875, + 0.375, + -0.1982421875, + 1.2734375, + 0.80078125, + 2.421875, + 1.75, + -1.515625, + -0.51953125, + 0.59375, + -1.4609375, + 1.9296875, + 0.431640625, + 2.03125, + 0.64453125, + 1.671875, + -0.43359375, + 4.0625, + 0.466796875, + -1.953125, + -0.1552734375, + -1.734375, + 2.140625, + 2.5, + 0.91796875, + -2.234375, + 1.0859375, + 0.3359375, + -0.1884765625, + 1.4609375, + -1.1953125, + -1.5390625, + -2.171875, + 2.40625, + 2.875, + -1.2265625, + -4.40625, + -0.9609375, + 3.46875, + -1.3828125, + 2.40625, + -1.4609375, + 1.09375, + 0.3203125, + -6.75, + 2.203125, + 1.78125, + -0.7734375, + -0.76953125, + -1.328125, + 5.34375, + -0.78515625, + -1.2734375, + -1.46875, + -1.296875, + -1.421875, + 2.125, + 0.7421875, + -1.984375, + 3.171875, + 0.498046875, + -0.478515625, + -0.62109375, + 2.9375, + -0.2333984375, + -1.9453125, + -2.390625, + -1.09375, + -0.5703125, + 1.78125, + 0.74609375, + -3.015625, + 1.25, + -0.84375, + 1.3984375, + 4.71875, + 0.72265625, + -2.765625, + 3.8125, + -3.640625, + 2.109375, + -2.921875, + -0.0869140625, + -0.15625, + -3.28125, + -3.734375, + 0.76171875, + -2.03125, + 2.40625, + -1.6796875, + 2.015625, + -1.25, + -0.56640625, + 4.375, + 0.9609375, + 2.375, + -1.34375, + 0.146484375, + -4.59375, + 2.09375, + -0.01202392578125, + -0.0849609375, + -2.078125, + 0.3671875, + -1.59375, + 2.328125, + -2.1875, + -0.353515625, + -3.40625, + 0.326171875, + -1.703125, + 1.75, + -0.58984375, + 2.421875, + -0.034912109375, + 0.6640625, + 1.4921875, + -0.609375, + 1.359375, + 2.796875, + 1.0, + -2.375, + -1.625, + -0.80859375, + 1.71875, + -2.78125, + -4.4375, + 1.3828125, + -0.578125, + -0.035400390625, + 0.74609375, + -2.8125, + -0.96875, + -0.46484375, + -1.3515625, + 0.53125, + 0.173828125, + 3.421875, + 0.060546875, + 0.6640625, + 0.6875, + 1.796875, + -2.5625, + -0.83203125, + -2.484375, + 0.87109375, + -0.65234375, + 2.40625, + 1.734375, + -1.7578125, + 1.1796875, + 1.890625, + 1.1171875, + 0.609375, + 2.125, + 2.328125, + 4.375, + 0.1708984375, + -1.359375, + -0.63671875, + -2.265625, + 0.31640625, + 1.65625, + -1.2109375, + -1.78125, + 1.28125, + 0.421875, + 0.8515625, + 2.640625, + 0.515625, + 1.296875, + -3.546875, + 0.72265625, + -4.84375, + -0.83984375, + 2.734375, + -1.875, + -1.5546875, + 1.0859375, + 2.734375, + -0.8828125, + -0.34375, + 2.34375, + -0.421875, + -1.4453125, + -2.453125, + 3.375, + -1.015625, + 0.6015625, + 0.482421875, + -1.515625, + 0.58203125, + -0.76953125, + -2.59375, + 2.40625, + 1.4140625, + -2.421875, + 0.73828125, + 0.73046875, + 1.0703125, + -1.0390625, + -1.3125, + -3.609375, + 4.5625, + -0.8984375, + -0.6640625, + -2.03125, + 1.671875, + -1.0234375, + -0.71875, + -3.96875, + 1.2421875, + 0.357421875, + 0.326171875, + -1.875, + -1.7265625, + -1.546875, + -0.2470703125, + 2.0, + 2.5, + -0.609375, + -2.21875, + -1.0234375, + 4.71875, + -0.333984375, + -0.6796875, + -1.328125, + -1.3359375, + 0.3203125, + 0.4453125, + -0.3203125, + 0.396484375, + -3.515625, + 0.98046875, + 2.234375, + 2.984375, + 0.40625, + 0.91796875, + 1.3046875, + 0.875, + -2.328125, + -1.7265625, + -0.9140625, + -1.0390625, + 1.5, + 2.84375, + 1.2109375, + 0.60546875, + 4.21875, + -1.21875, + 0.5390625, + -0.039794921875, + 1.578125, + 2.046875, + 1.8359375, + -0.96484375, + -0.138671875, + -0.51171875, + 4.34375, + 0.52734375, + -2.546875, + -2.015625, + 1.046875, + 2.46875, + -0.07958984375, + 15.4375, + 3.125, + -0.197265625, + 0.2890625, + 1.171875, + -1.8828125, + -1.5546875, + -0.6875, + 1.5625, + 1.0, + -2.5625, + 1.90625, + -1.25, + -5.25, + 2.796875, + -3.390625, + 3.203125, + 2.1875, + 5.5, + -0.09521484375, + -2.859375, + -0.59765625, + -2.1875, + 2.59375, + 0.58984375, + 2.703125, + -4.375, + 0.640625, + -7.9375, + 2.34375, + 0.21875, + -0.279296875, + -1.6953125, + -3.84375, + -1.625, + -2.640625, + -2.53125, + -1.1953125, + 3.1875, + 0.87109375, + 0.232421875, + 1.7578125, + 1.1328125, + -0.6171875, + 0.1513671875, + 0.8984375, + -3.171875, + 0.62109375, + -0.890625, + 0.189453125, + 0.9375, + 1.3515625, + 3.234375, + 0.4609375, + 0.279296875, + -0.392578125, + 1.7421875, + 2.328125, + -0.79296875, + 0.5546875, + -2.453125, + -2.140625, + -3.390625, + 1.890625, + -0.96484375, + 2.734375, + -2.515625, + 0.85546875, + -2.421875, + 2.265625, + -0.81640625, + 1.4140625, + 1.8125, + 0.68359375, + -3.96875, + -1.1015625, + -2.21875, + -2.046875, + -0.25, + 0.53515625, + 0.5703125, + 0.75, + -1.171875, + -0.37109375, + -0.73828125, + -0.8984375, + 1.5546875, + 0.98046875, + 2.28125, + 0.546875, + -0.0257568359375, + 2.71875, + 1.796875, + -2.0, + -0.640625, + -2.609375, + -1.6171875, + -1.6953125, + -3.59375, + 0.10107421875, + 0.142578125, + 1.296875, + -0.22265625, + 0.46484375, + 2.3125, + -0.99609375, + -1.5625, + -2.875, + -2.015625, + -2.15625, + 0.2314453125, + -1.203125, + 2.6875, + 2.625, + -0.216796875, + 1.375, + 0.030517578125, + 2.359375, + 1.2734375, + -3.421875, + 1.3828125, + 1.9140625, + -0.94921875, + -0.6875, + 3.3125, + -1.3984375, + 0.50390625, + 0.26953125, + 2.59375, + -0.84375, + 3.1875, + 1.96875, + 1.609375, + 3.859375, + 0.76953125, + 3.078125, + 2.375, + -4.53125, + 0.984375, + -2.5, + -0.6796875, + 3.796875, + 2.875, + -0.97265625, + -0.50390625, + -0.86328125, + 0.55859375, + -0.2119140625, + 1.8984375, + -1.984375, + -1.171875, + 0.5859375, + -3.6875, + 5.5, + -0.1591796875, + 1.7265625, + -0.375, + 1.03125, + -1.1640625, + 1.2890625, + -0.314453125, + -0.90625, + 0.83984375, + -1.0078125, + -0.8359375, + -0.4375, + 3.359375, + 0.703125, + -1.15625, + 3.265625, + -0.234375, + -0.349609375, + -2.890625, + -2.734375, + 2.375, + -1.421875, + -4.4375, + 1.2890625, + 0.1513671875, + -1.859375, + -1.203125, + -2.09375, + -1.9765625, + 0.84765625, + 1.7578125, + 1.3984375, + 0.796875, + 5.34375, + -0.65625, + 2.0, + -3.03125, + -1.328125, + -2.109375, + -1.1953125, + 0.0, + -0.279296875, + -12.25, + -0.490234375, + 0.5546875, + 2.921875, + 2.703125, + -1.8046875, + 3.46875, + 0.91015625, + -3.46875, + -3.640625, + 1.859375, + -3.625, + 0.2333984375, + 1.03125, + -0.45703125, + 1.8359375, + -0.08642578125, + -2.59375, + 3.203125, + -0.51171875, + -2.34375, + 3.125, + -2.609375, + 0.1240234375, + -4.78125, + 0.31640625, + -1.0078125, + 2.234375, + -0.82421875, + -4.71875, + 0.3203125, + -0.130859375, + -2.96875, + -1.6640625, + -0.94921875, + -0.1474609375, + -2.171875, + -1.46875, + 0.60546875, + 1.671875, + -3.421875, + -0.7421875, + 1.4140625, + 1.703125, + -0.4765625, + 1.2421875, + -0.64453125, + 4.21875, + -2.171875, + -0.365234375, + 1.78125, + 0.6171875, + -0.8671875, + 0.4140625, + -2.21875, + -1.625, + 0.515625, + 0.26171875, + -0.58984375, + -1.5703125, + 0.251953125, + 0.5625, + -0.609375, + -0.97265625, + 0.01165771484375, + -3.03125, + 0.80859375, + 4.0, + 0.83984375, + 0.0181884765625, + -1.234375, + 1.4609375, + -1.4609375, + -1.8125, + -0.15625, + 1.484375, + -0.94921875, + 1.015625, + 1.46875, + 2.78125, + -1.96875, + -0.036865234375, + 2.5, + -1.109375, + 0.036865234375, + -4.71875, + -4.40625, + 0.109375, + -0.3984375, + 3.171875, + 0.1982421875, + 0.294921875, + -1.203125, + 2.96875, + 2.796875, + 1.15625, + 0.765625, + 2.890625, + 3.0, + 1.09375, + 3.09375, + 1.6015625, + -1.421875, + 1.4375, + -0.1748046875, + -2.15625, + -0.66796875, + 1.359375, + 0.609375, + -8.3125, + -2.453125, + 0.82421875, + -0.1435546875, + 2.921875, + -8.6875, + -0.453125, + 1.5703125, + -0.62890625, + -0.255859375, + 2.578125, + 0.33984375, + 0.0732421875, + -2.46875, + -0.197265625, + -3.03125, + 0.353515625, + -0.66796875, + 1.2578125, + 1.65625, + -0.92578125, + -1.1484375, + -2.203125, + 0.8046875, + -0.6875, + -0.392578125, + -0.06396484375, + -2.453125, + 1.640625, + -0.98828125, + 1.3125, + 0.173828125, + -0.84375, + 1.0390625, + 1.1015625, + 2.03125, + -1.9140625, + -5.0625, + -0.10693359375, + -4.5625, + -2.671875, + 0.95703125, + 3.390625, + 3.671875, + -0.37109375, + -0.5234375, + 1.078125, + 0.0166015625, + -3.46875, + -1.890625, + -0.2060546875, + -0.98828125, + -0.6875, + -0.3203125, + 1.4375, + 1.625, + 1.09375, + -3.0625, + 0.8984375, + 1.5703125, + 0.80078125, + 0.59375, + 1.6796875, + -1.4921875, + -1.7734375, + 0.88671875, + -0.255859375, + -3.171875, + 4.0, + 0.59375, + 2.984375, + -4.5, + 3.078125, + 1.046875, + -1.09375, + -0.51171875, + -0.828125, + 0.169921875, + -2.828125, + 1.625, + -0.65234375, + -1.3046875, + -1.8203125, + -2.65625, + -1.65625, + -3.734375, + 0.2001953125, + -1.5859375, + 0.7734375, + 1.8359375, + 1.84375, + 0.197265625, + -0.578125, + -0.76171875, + 0.369140625, + -1.890625, + -0.9140625, + -1.1328125, + -2.203125, + -1.8203125, + 2.0, + -2.1875, + -2.375, + -6.90625, + -0.244140625, + 1.1015625, + 0.306640625, + 1.1328125, + 2.5625, + -0.64453125, + -0.55078125, + -1.015625, + -1.578125, + -12.1875, + 0.63671875, + -0.357421875, + -2.3125, + -2.28125, + -2.859375, + 3.53125, + 0.259765625, + 3.484375, + -0.50390625, + 2.140625, + -0.384765625, + 0.90625, + -0.2041015625, + -0.057861328125, + 1.6796875, + -2.359375, + 0.5078125, + -1.4453125, + 0.609375, + -2.9375, + 1.0703125, + 4.21875, + -0.734375, + 0.5859375, + -3.09375, + 1.0, + 1.46875, + 1.75, + 0.4375, + 1.1796875, + 3.5, + 0.6875, + 8.6875, + -1.015625, + 1.25, + -0.1806640625, + -2.875, + -0.2275390625, + 3.125, + 1.6328125, + 1.140625, + -0.578125, + 2.28125, + -3.953125, + 0.294921875, + -0.271484375, + 0.2412109375, + 0.98046875, + 3.125, + -2.09375, + -0.82421875, + -0.369140625, + -1.9140625, + 0.310546875, + 0.341796875, + -0.369140625, + 1.96875, + -1.234375, + -0.875, + 0.55078125, + -1.59375, + 0.494140625, + 0.3359375, + -2.546875, + 1.875, + -1.703125, + 4.65625, + -1.234375, + 0.0556640625, + 4.78125, + 1.4140625, + -1.734375, + 3.5625, + 1.4609375, + 0.87890625, + 0.5703125, + 1.296875, + 0.890625, + 1.671875, + -0.45703125, + 2.390625, + -4.8125, + -1.359375, + -4.375, + -2.609375, + -5.1875, + -1.09375, + -1.953125, + -2.296875, + 2.765625, + 2.359375, + 3.34375, + 2.171875, + 2.234375, + -2.421875, + -0.7578125, + -1.1796875, + -2.140625, + 3.703125, + -0.8125, + -2.75, + 0.408203125, + 2.890625, + -1.0, + 1.1328125, + 1.3515625, + 1.3828125, + 3.296875, + 2.140625, + 0.45703125, + 0.4609375, + 3.046875, + -0.0201416015625, + 1.671875, + -1.9296875, + 1.0234375, + -0.859375, + -0.208984375, + 2.78125, + 1.359375, + -1.328125, + 0.337890625, + -1.03125, + -2.125, + -1.34375, + -2.859375, + -2.15625, + 3.015625, + -0.224609375, + 1.7578125, + -2.34375, + 2.609375, + -2.296875, + 0.984375, + 0.640625, + 1.6015625, + 1.671875, + -5.90625, + 1.8203125, + -1.0390625, + 2.609375, + 2.078125, + -1.859375, + -0.984375, + 0.064453125, + -0.89453125, + 0.49609375, + 2.53125, + 7.9375, + 0.02490234375, + 1.65625, + 2.828125, + -2.015625, + -1.3359375, + 2.328125, + 0.734375, + -1.0625, + 1.125, + 1.6171875, + 1.171875, + -0.330078125, + -3.0, + 3.546875, + 1.4375, + 1.21875, + -2.359375, + -2.640625, + -0.86328125, + 1.03125, + 1.4921875, + -4.15625, + -0.91015625, + -1.640625, + -2.53125, + 0.75, + 1.046875, + 0.50390625, + 0.380859375, + -1.421875, + 1.328125, + -0.74609375, + 4.3125, + 0.36328125, + 4.5625, + -0.033935546875, + 0.67578125, + 0.22265625, + 2.71875, + -0.6171875, + -0.337890625, + 1.515625, + -1.8046875, + 5.375, + 1.2421875, + 0.09521484375, + 3.3125, + 2.6875, + -4.1875, + -1.4921875, + -3.734375, + -0.60546875, + 1.84375, + -0.5859375, + -2.359375, + -0.671875, + -2.421875, + -2.625 + ], + "index": 2, + "object": "embedding", + "raw_output": null + }, + { + "embedding": [ + 2.34375, + 7.625, + -2.21875, + 0.55078125, + 4.5, + -0.2001953125, + -2.796875, + -5.5, + 4.6875, + -1.328125, + -3.984375, + 2.96875, + 2.484375, + -0.6640625, + -0.7109375, + 0.6015625, + 0.494140625, + -1.265625, + 1.0625, + 1.515625, + -1.4921875, + 7.53125, + -2.703125, + -0.267578125, + -0.6640625, + 3.375, + -1.9921875, + -1.1640625, + -1.515625, + -3.6875, + -2.46875, + -0.76171875, + 1.109375, + 3.984375, + -2.5, + 3.140625, + -0.8984375, + 2.484375, + -1.484375, + -0.78125, + 1.78125, + -1.4453125, + 1.0, + 0.78515625, + -2.609375, + 0.369140625, + -0.0203857421875, + 0.71875, + 0.76171875, + -0.51953125, + -0.7734375, + -1.265625, + -1.25, + -0.01220703125, + 1.9609375, + -1.515625, + -2.921875, + -2.671875, + 2.578125, + -0.94140625, + 1.0859375, + 0.6953125, + -0.1328125, + 1.8359375, + -0.7890625, + 3.09375, + 0.2265625, + 0.76953125, + -2.53125, + -1.5703125, + 1.859375, + 0.396484375, + -1.796875, + -0.058349609375, + -0.267578125, + -2.296875, + -2.296875, + 0.78515625, + 3.890625, + 1.6875, + 2.40625, + -2.109375, + -1.875, + -0.7421875, + -0.4765625, + -1.6875, + -2.8125, + -0.83203125, + -2.125, + -0.255859375, + -2.8125, + 1.4375, + -1.796875, + 2.765625, + 0.8125, + -6.125, + 2.15625, + 1.9609375, + 1.125, + -0.173828125, + -1.015625, + -0.2216796875, + -2.203125, + 2.28125, + 1.4765625, + -0.22265625, + -1.3515625, + -1.2734375, + 0.359375, + -3.203125, + 0.1123046875, + 2.5625, + -2.21875, + 1.703125, + 0.427734375, + 2.921875, + 0.9140625, + -1.2421875, + 1.65625, + 2.328125, + 2.09375, + 0.34375, + -1.84375, + -0.78515625, + 0.201171875, + -1.6484375, + 0.341796875, + 0.1240234375, + 1.0703125, + -2.71875, + -2.96875, + -1.234375, + 1.2890625, + 2.140625, + -4.59375, + -1.890625, + -0.73828125, + -2.5, + -1.515625, + 0.62890625, + -1.734375, + -2.59375, + 3.15625, + -1.453125, + -0.53515625, + 0.181640625, + 4.25, + 2.546875, + 3.828125, + 1.3203125, + -0.1181640625, + -0.75390625, + -0.123046875, + 3.015625, + 0.318359375, + -0.0947265625, + -0.37109375, + 3.03125, + -2.4375, + 2.84375, + -2.90625, + 1.265625, + 1.078125, + -0.28515625, + -0.43359375, + -1.2890625, + -2.375, + -1.203125, + 5.09375, + -1.6953125, + -2.125, + 0.275390625, + 0.73046875, + 3.390625, + 1.7890625, + -1.5234375, + 1.828125, + -0.138671875, + 0.58984375, + -0.138671875, + 3.34375, + 4.46875, + -3.78125, + -0.35546875, + -0.84375, + 0.57421875, + -2.125, + 1.8046875, + -1.9375, + -1.0703125, + -0.474609375, + -2.078125, + -0.31640625, + 1.65625, + 3.59375, + -2.84375, + -0.69921875, + 0.65625, + -0.5546875, + 5.6875, + 1.65625, + -4.34375, + -0.70703125, + 0.263671875, + -3.09375, + -0.8984375, + 1.078125, + 0.83203125, + -1.3125, + -1.296875, + 1.625, + -2.625, + 0.1318359375, + -0.57421875, + 0.224609375, + -0.306640625, + -0.4375, + 3.421875, + -1.9296875, + -1.0703125, + 0.91015625, + 2.046875, + 0.1650390625, + 2.453125, + 1.65625, + 5.9375, + -2.078125, + 1.0546875, + 0.48828125, + 0.45703125, + 2.125, + 0.279296875, + -1.7734375, + 0.0908203125, + 8.6875, + -1.34375, + -0.84765625, + 1.5703125, + 1.609375, + -0.369140625, + 0.2470703125, + 2.453125, + 0.032958984375, + 0.07568359375, + 3.03125, + -1.359375, + 0.294921875, + -3.046875, + 2.859375, + 1.71875, + -4.375, + 1.65625, + 2.5, + -0.796875, + 0.201171875, + 2.890625, + 1.375, + 0.033447265625, + -0.671875, + 1.0, + 0.1103515625, + -3.609375, + 0.84375, + -4.09375, + 1.3515625, + -1.7421875, + -0.0869140625, + -1.796875, + 1.5234375, + 0.59375, + 0.1396484375, + -0.7578125, + -0.373046875, + 1.578125, + -2.875, + 1.4921875, + -0.0439453125, + -2.609375, + -1.9765625, + 0.78515625, + 0.64453125, + -1.0234375, + -0.07177734375, + -5.03125, + 2.71875, + 2.515625, + 0.1025390625, + 0.546875, + -2.15625, + -2.03125, + 2.1875, + -3.625, + -0.765625, + -1.46875, + 1.625, + -4.5, + -0.494140625, + -2.53125, + -0.306640625, + 1.15625, + 1.953125, + 0.5625, + -0.96484375, + 2.234375, + 0.56640625, + -2.296875, + 3.25, + -0.359375, + 0.953125, + 0.4375, + 0.08544921875, + -1.2734375, + 0.345703125, + 1.3671875, + -0.59765625, + -1.4375, + -1.234375, + 0.48828125, + -0.83203125, + 0.00109100341796875, + 1.6015625, + 0.333984375, + 0.55078125, + -0.248046875, + 3.59375, + 0.388671875, + 0.64453125, + 0.78125, + -1.1015625, + 0.66015625, + -3.609375, + -3.640625, + -1.5390625, + 0.90234375, + -1.0859375, + 0.1298828125, + -0.1259765625, + -0.357421875, + 5.3125, + 0.80078125, + 3.625, + -0.236328125, + 1.0234375, + -0.03466796875, + -0.244140625, + -0.0185546875, + -0.81640625, + 1.96875, + -2.625, + 1.1796875, + -3.890625, + 1.2578125, + -1.390625, + 0.4453125, + 1.2890625, + -1.5, + -1.2265625, + 0.291015625, + 1.59375, + -0.89453125, + -4.0, + 0.75390625, + 1.234375, + -0.93359375, + -0.310546875, + 1.671875, + -0.73828125, + 0.1962890625, + -2.75, + -0.90625, + -0.16796875, + 0.71875, + 2.078125, + -1.9140625, + 2.140625, + 1.8359375, + -3.734375, + -0.234375, + -0.171875, + -1.6953125, + 1.7578125, + -1.2578125, + -1.3828125, + -2.015625, + -0.10205078125, + 1.9140625, + 2.46875, + -2.515625, + -1.6015625, + 1.03125, + 3.21875, + -3.46875, + -1.7265625, + 1.34375, + -0.36328125, + 1.0078125, + -0.2158203125, + -1.78125, + 2.21875, + 0.72265625, + 1.9453125, + -0.46875, + -1.109375, + -0.279296875, + -3.3125, + -0.92578125, + 0.2890625, + -0.9765625, + -1.09375, + 2.78125, + 1.5546875, + 0.0576171875, + 2.1875, + -6.03125, + 0.93359375, + -0.75390625, + -1.0703125, + 4.1875, + 1.046875, + -1.1640625, + -1.828125, + -0.65625, + 1.78125, + 0.75, + 0.1962890625, + 0.78515625, + 1.5546875, + 3.609375, + -2.515625, + -1.171875, + 0.416015625, + -0.75, + 0.2392578125, + 2.328125, + 0.69921875, + 0.02099609375, + -0.380859375, + -1.0, + -2.796875, + 0.1875, + -1.4375, + 0.30078125, + 1.140625, + -1.4375, + -1.984375, + -1.0625, + 0.890625, + -2.640625, + 0.55859375, + 1.5703125, + 2.046875, + 0.4453125, + -0.359375, + -0.8984375, + -1.359375, + 2.15625, + -0.4296875, + -0.77734375, + -0.546875, + -2.296875, + -0.2890625, + -2.1875, + -1.6171875, + -1.9140625, + 3.203125, + 1.1484375, + 2.578125, + 1.5390625, + -0.474609375, + -0.51171875, + 1.921875, + -5.03125, + -1.4921875, + -0.62890625, + 0.306640625, + -2.546875, + -3.625, + -0.55859375, + 0.75, + -6.9375, + 3.46875, + -2.234375, + -1.078125, + -0.87890625, + 1.0703125, + 2.515625, + -2.15625, + 0.08642578125, + 1.1875, + -0.76953125, + 0.26171875, + -1.1015625, + 0.66796875, + -0.859375, + 1.453125, + -1.4375, + 1.7421875, + 0.515625, + 0.72265625, + -0.80078125, + 0.1376953125, + 3.625, + 1.3828125, + 0.52734375, + 1.2578125, + 0.255859375, + -0.62890625, + 0.95703125, + -1.7421875, + -1.3046875, + 2.4375, + -3.59375, + -3.734375, + -2.5625, + 0.4375, + 1.5625, + 0.33984375, + -1.859375, + -1.296875, + 3.6875, + -0.302734375, + 3.765625, + -1.3828125, + 1.8125, + -0.7109375, + 0.91796875, + 2.6875, + -1.078125, + 0.671875, + 0.953125, + 1.0078125, + 1.1328125, + 0.79296875, + 1.515625, + 0.26171875, + -0.412109375, + 2.625, + -0.578125, + 2.28125, + 0.93359375, + 1.2265625, + 0.76171875, + 2.203125, + 2.59375, + -2.484375, + 2.015625, + -0.3359375, + 2.90625, + 0.1220703125, + -3.765625, + 1.2734375, + 1.5, + -2.078125, + -0.82421875, + -1.625, + 0.671875, + -2.859375, + -3.140625, + -0.62109375, + 0.14453125, + -0.56640625, + -1.1953125, + 1.1875, + 3.21875, + -2.453125, + 0.90234375, + -2.8125, + -0.08544921875, + 3.390625, + -0.2412109375, + -2.421875, + -1.703125, + -0.7734375, + -0.7734375, + -1.484375, + 0.55078125, + 2.21875, + -1.3125, + -1.0859375, + 0.61328125, + 0.09716796875, + -1.15625, + 1.0, + 0.73828125, + -1.8203125, + 2.171875, + -1.6640625, + -1.265625, + 3.421875, + -1.625, + 2.65625, + -0.08740234375, + -0.40234375, + -2.25, + -0.84375, + -0.6875, + 2.0625, + -3.203125, + -1.7109375, + 1.625, + 0.58984375, + -1.71875, + -1.5234375, + -1.7578125, + -3.6875, + 0.73828125, + 0.49609375, + -0.0034942626953125, + 3.09375, + 5.875, + 0.71484375, + 0.55859375, + 0.78515625, + -1.734375, + 4.0, + -1.984375, + 2.0625, + -1.734375, + -0.306640625, + -1.8125, + -1.03125, + -3.390625, + 0.73046875, + 2.40625, + 2.828125, + -0.365234375, + -0.80859375, + -0.08349609375, + -0.4140625, + -0.2294921875, + -0.859375, + -1.921875, + -2.359375, + -0.56640625, + -1.3984375, + -3.03125, + -1.1796875, + 1.6484375, + 0.59375, + 1.7734375, + 1.21875, + 2.96875, + -3.171875, + 2.734375, + -0.6015625, + 1.6015625, + -0.1318359375, + 1.5234375, + -0.408203125, + 0.455078125, + -1.1171875, + -1.546875, + -0.8984375, + -0.6953125, + -0.56640625, + 0.33984375, + 2.03125, + 1.9375, + -4.625, + -1.515625, + -0.91015625, + -0.69921875, + 0.04296875, + 1.2421875, + 0.0546875, + 0.93359375, + -0.455078125, + 0.306640625, + -0.0223388671875, + -1.9375, + -1.9609375, + -2.796875, + 1.625, + -0.265625, + 0.40625, + 1.515625, + 0.296875, + 2.8125, + 1.609375, + -2.3125, + 0.94921875, + 1.796875, + -1.7890625, + -0.5546875, + 3.453125, + -3.6875, + 2.46875, + -1.7265625, + -2.046875, + 1.84375, + -2.453125, + 0.0791015625, + -2.171875, + 0.1953125, + -0.83203125, + -0.310546875, + 0.70703125, + -5.0, + 0.490234375, + 0.828125, + 3.4375, + -0.228515625, + -0.48046875, + -4.25, + 0.66796875, + -4.375, + -0.76953125, + -1.5625, + -1.125, + -2.21875, + 1.4375, + 1.8828125, + 0.006134033203125, + -3.09375, + 3.03125, + -1.2265625, + 5.5, + -0.408203125, + -3.96875, + 0.1796875, + 1.4921875, + -0.294921875, + -3.59375, + -2.03125, + 2.109375, + -1.515625, + -3.375, + 2.34375, + 1.65625, + 3.453125, + -4.53125, + 1.8203125, + -1.859375, + 1.2421875, + -3.171875, + -0.400390625, + -4.21875, + 2.515625, + 0.86328125, + 0.390625, + -1.375, + -2.65625, + -6.96875, + 2.15625, + -2.34375, + -2.25, + 0.2119140625, + -5.71875, + 1.234375, + 0.796875, + 1.5859375, + 1.03125, + 0.404296875, + -0.06396484375, + 1.4140625, + -1.5, + -0.244140625, + 1.71875, + 0.97265625, + -1.1171875, + -0.70703125, + 4.03125, + 3.84375, + -3.375, + -0.5234375, + 1.0703125, + 0.09521484375, + 0.99609375, + 3.015625, + -1.296875, + -0.498046875, + -1.8828125, + -2.71875, + -3.4375, + -3.390625, + -1.3828125, + -1.46875, + 1.359375, + 1.6953125, + 4.25, + -1.5625, + 1.15625, + 0.62109375, + -4.28125, + 2.359375, + -0.470703125, + -1.3515625, + 0.11669921875, + 4.90625, + 0.71484375, + -1.2734375, + -3.0625, + 0.052490234375, + 3.234375, + -0.68359375, + -1.09375, + 0.439453125, + 1.1796875, + -0.423828125, + 2.96875, + -1.78125, + 2.875, + 1.1171875, + -2.015625, + 2.578125, + 2.25, + -5.0625, + 1.1640625, + -2.6875, + -1.609375, + -1.84375, + 1.5625, + -1.2734375, + 2.203125, + 1.296875, + -0.4453125, + -0.375, + -3.234375, + -0.859375, + -2.0625, + -1.015625, + -3.625, + 3.359375, + 2.484375, + 0.400390625, + -0.326171875, + -1.9375, + 0.21875, + -5.125, + -1.7265625, + -0.265625, + -0.90234375, + -1.015625, + 4.0, + 1.0546875, + 0.294921875, + 3.46875, + -1.1953125, + -2.75, + -0.9609375, + -2.640625, + 0.169921875, + 2.5, + 0.11083984375, + -3.09375, + -2.328125, + 0.09814453125, + 0.0439453125, + -1.8984375, + -4.0625, + 0.140625, + -5.3125, + 1.234375, + 2.75, + -2.0, + -2.875, + -1.7421875, + 0.1240234375, + 2.4375, + -1.5859375, + -2.765625, + 0.77734375, + 1.0859375, + -0.2578125, + -1.7421875, + -0.443359375, + 0.1572265625, + 4.09375, + -0.267578125, + -0.88671875, + 1.78125, + -0.30859375, + 1.125, + -2.546875, + 0.142578125, + 0.1474609375, + 2.015625, + 1.9296875, + 2.34375, + -0.8125, + -0.5234375, + -2.171875, + 1.1015625, + -0.69921875, + 1.9453125, + -0.6484375, + -0.1396484375, + 1.0, + -0.140625, + -1.171875, + -0.8359375, + -2.015625, + -2.9375, + 0.2314453125, + -1.8828125, + 1.0859375, + 2.21875, + -0.353515625, + 2.0625, + 0.369140625, + 0.1220703125, + -2.765625, + -3.203125, + 1.8671875, + -0.005706787109375, + 3.6875, + -5.3125, + -0.65625, + 0.291015625, + 0.10009765625, + -2.34375, + 0.1337890625, + 0.64453125, + -3.0, + -2.0625, + -1.3046875, + 1.21875, + -0.82421875, + -2.578125, + 1.03125, + 0.3515625, + -1.0546875, + -1.328125, + 2.71875, + -1.1171875, + -0.365234375, + -1.28125, + -1.609375, + 0.91015625, + -2.203125, + 1.9140625, + -0.8359375, + -1.40625, + -0.578125, + 0.91015625, + 0.59375, + 0.78515625, + 1.515625, + 3.625, + 0.2490234375, + 0.8671875, + -1.1640625, + -0.94140625, + -1.0546875, + -0.84375, + -2.6875, + 0.0184326171875, + 0.88671875, + -0.1337890625, + -1.8359375, + -2.109375, + -5.125, + 2.78125, + 3.140625, + -1.5859375, + -0.50390625, + 0.765625, + 3.5, + -2.203125, + -0.953125, + -3.265625, + -1.046875, + 0.99609375, + -0.2333984375, + -3.625, + 0.1103515625, + 0.2734375, + -1.2109375, + -0.01275634765625, + -1.59375, + 0.54296875, + 1.125, + -0.7578125, + -2.921875, + 4.1875, + 1.09375, + 3.296875, + -3.328125, + 1.796875, + 0.55078125, + -1.9921875, + 1.3203125, + 0.69140625, + -0.0296630859375, + 3.703125, + 0.98828125, + -2.921875, + -1.2734375, + -0.859375, + 8.75, + -1.015625, + -0.470703125, + -1.484375, + -0.240234375, + 2.75, + -2.328125, + 3.71875, + -0.02392578125, + -1.09375, + -0.486328125, + 1.9296875, + 0.55859375, + -0.306640625, + -1.7734375, + 1.046875, + -1.84375, + 0.5390625, + 3.109375, + 1.8671875, + -2.265625, + -2.984375, + -2.40625, + -0.197265625, + -1.0625, + -1.1484375, + 0.1962890625, + -1.15625, + -0.1240234375, + -3.828125, + -0.2490234375, + 0.0703125, + 0.5625, + 0.53125, + 1.6640625, + 1.3359375, + 0.5078125, + 2.078125, + -3.21875, + -2.265625, + -0.96484375, + -0.5546875, + -0.1240234375, + 2.328125, + 0.97265625, + -2.984375, + -5.34375, + 2.234375, + -0.609375, + -5.84375, + -5.125, + -0.251953125, + -1.640625, + 2.46875, + 0.71875, + 1.4296875, + 0.828125, + 0.72265625, + -0.3046875, + 1.5625, + -0.64453125, + -0.439453125, + 0.22265625, + -0.796875, + -1.3359375, + -0.2578125, + -0.7734375, + 0.23828125, + 0.6875, + -2.53125, + 1.09375, + 0.392578125, + -0.07421875, + 0.91796875, + 3.03125, + -1.25, + -0.087890625, + 2.703125, + 1.1640625, + 2.359375, + 0.69140625, + 0.08203125, + 1.2734375, + -1.09375, + -1.171875, + -0.376953125, + 3.5, + -3.203125, + -2.234375, + -0.94921875, + -0.494140625, + -1.75, + -0.73046875, + -0.259765625, + 2.21875, + 3.078125, + 2.28125, + 1.0234375, + 1.6875, + -0.7421875, + -1.3359375, + -1.515625, + 1.8359375, + 0.90625, + -0.181640625, + -2.09375, + -1.109375, + -3.0, + 1.96875, + 2.75, + 1.703125, + -0.0947265625, + -1.984375, + -1.390625, + 0.61328125, + 0.56640625, + -0.59375, + -0.2392578125, + -0.6875, + 0.0250244140625, + -0.93359375, + -0.59375, + 0.9140625, + 1.9375, + -4.25, + -11.6875, + 0.01806640625, + -0.34765625, + 1.7734375, + 3.578125, + 0.1630859375, + -4.4375, + 1.3125, + 1.3984375, + -1.515625, + -2.84375, + 3.265625, + 0.208984375, + 0.333984375, + -1.96875, + -7.9375, + 0.27734375, + -0.9140625, + 2.46875, + 1.390625, + 1.734375, + -1.5, + -0.326171875, + -1.46875, + 5.40625, + 0.12158203125, + -0.65234375, + 1.65625, + -2.28125, + -1.3828125, + -0.7890625, + -0.953125, + 0.90625, + -1.34375, + -1.015625, + -1.28125, + -1.0625, + 0.423828125, + -3.140625, + 1.4296875, + -1.78125, + 2.234375, + -2.90625, + -1.9609375, + -3.0, + -1.0546875, + -0.875, + 3.90625, + -2.5, + 2.25, + 1.046875, + 0.92578125, + -0.423828125, + 0.73828125, + -0.470703125, + 2.265625, + 1.515625, + 0.71875, + -0.294921875, + 1.65625, + 2.09375, + 4.5625, + 2.34375, + 0.73046875, + -0.314453125, + -1.4140625, + -0.359375, + -1.4140625, + 0.07958984375, + -0.255859375, + 1.3984375, + -0.9140625, + -1.03125, + -0.79296875, + 3.3125, + -1.0625, + -0.048828125, + 2.265625, + -3.703125, + -0.384765625, + 1.3046875, + -1.53125, + -2.8125, + -2.34375, + 0.3203125, + 0.80859375, + 1.3203125, + 2.890625, + -0.85546875, + 2.34375, + -0.96875, + 2.59375, + 0.84765625, + 0.44140625, + 0.007568359375, + 1.4296875, + 0.98828125, + 0.421875, + -1.484375, + -0.75, + -3.78125, + 0.78515625, + 1.8359375, + 0.51171875, + -1.3828125, + -0.1533203125, + -2.15625, + 0.69921875, + 2.25, + 0.54296875, + 2.921875, + -1.3671875, + -1.5, + 2.265625, + 2.140625, + 3.578125, + -2.859375, + -1.046875, + 3.5, + -2.28125, + 1.9296875, + 0.1962890625, + -2.859375, + -3.671875, + 0.89453125, + -5.3125, + -2.109375, + -5.65625, + -0.3515625, + 1.5859375, + 0.9921875, + -0.796875, + -0.2216796875, + 1.21875, + -2.796875, + -0.48828125, + -0.421875, + -1.25, + -1.171875, + -0.373046875, + -1.7734375, + 4.125, + -0.671875, + 0.89453125, + -2.921875, + 3.40625, + 1.8203125, + -3.78125, + -0.2255859375, + 0.5078125, + -0.34375, + 0.1318359375, + -0.6171875, + -3.875, + -0.578125, + -1.9140625, + 3.8125, + -0.69140625, + 0.84375, + 0.52734375, + 0.67578125, + 2.5, + 3.09375, + 1.3984375, + 1.75, + -1.796875, + 1.203125, + 0.455078125, + 0.50390625, + 0.609375, + 1.9765625, + -0.7265625, + 2.03125, + -1.5, + 0.216796875, + 2.703125, + 0.47265625, + -0.462890625, + -0.302734375, + 2.046875, + -0.330078125, + 3.96875, + 0.98828125, + 2.4375, + 4.65625, + -0.62109375, + 1.4140625, + 0.59375, + -1.5234375, + -0.1611328125, + -0.796875, + -3.125, + -2.1875, + 0.875, + 0.80078125, + -2.40625, + 0.48046875, + -1.3203125, + 1.484375, + 0.75390625, + -0.53515625, + -1.8984375, + -1.109375, + -2.921875, + 1.4453125, + -0.59375, + -3.21875, + 0.72265625, + 1.9765625, + -2.421875, + -0.7265625, + -1.390625, + 1.546875, + 1.1953125, + -0.427734375, + 3.28125, + 1.734375, + -2.671875, + -0.79296875, + 2.609375, + -1.671875, + 1.03125, + 3.046875, + 0.0277099609375, + -1.953125, + 0.21875, + -1.2890625, + 0.5234375, + -0.84765625, + -0.8984375, + -3.515625, + 0.98046875, + 0.8046875, + 0.55078125, + 3.09375, + 2.515625, + -4.0, + 0.373046875, + -5.0625, + 1.1796875, + -0.5703125, + -0.40625, + 1.0546875, + -0.6015625, + -0.60546875, + 2.640625, + -1.3203125, + -3.421875, + -1.109375, + 0.009521484375, + 2.703125, + -3.078125, + -0.88671875, + 1.734375, + -1.1640625, + -0.361328125, + 0.8359375, + -0.30078125, + 0.328125, + -0.427734375, + -1.9921875, + 1.4765625, + 0.671875, + -0.271484375, + -0.416015625, + 0.01385498046875, + 2.390625, + 0.0, + 0.92578125, + 1.0234375, + -1.71875, + -0.98828125, + 1.921875, + 2.109375, + 2.84375, + 3.984375, + 2.015625, + 0.024169921875, + -0.484375, + 1.40625, + 3.109375, + -0.80078125, + 1.875, + -0.2001953125, + -1.3984375, + 0.7578125, + 0.2255859375, + -2.640625, + -1.4296875, + 4.78125, + 0.08349609375, + -0.96484375, + -1.640625, + -0.265625, + 5.71875, + -4.84375, + -0.3046875, + 0.578125, + 2.25, + -1.5390625, + 0.4765625, + -1.265625, + 2.46875, + -1.15625, + 1.8046875, + -0.16015625, + -0.09130859375, + -0.2001953125, + -2.359375, + 1.3984375, + 1.2265625, + -0.2099609375, + -0.494140625, + 0.036865234375, + -1.078125, + -0.46484375, + -1.015625, + -1.8671875, + -3.140625, + 0.63671875, + -2.796875, + 0.69140625, + 1.625, + 1.859375, + 0.365234375, + 0.890625, + -0.953125, + -1.5078125, + 4.75, + -10.625, + 0.1533203125, + -1.078125, + 1.28125, + 0.9609375, + 0.1328125, + -3.0625, + 0.47265625, + 2.03125, + 2.703125, + -2.65625, + -1.015625, + 1.171875, + -2.71875, + 0.67578125, + 1.96875, + 1.2421875, + -0.369140625, + -1.046875, + 2.0, + -2.5625, + -0.9296875, + 0.039306640625, + -0.4453125, + 2.109375, + 0.279296875, + -0.3671875, + 2.578125, + -0.037109375, + 0.5546875, + 0.078125, + 2.578125, + -0.1796875, + -0.310546875, + -0.46484375, + -0.2431640625, + -1.296875, + -0.0224609375, + 0.87109375, + 0.024169921875, + 0.9140625, + 0.7109375, + 0.578125, + -1.4765625, + -0.1328125, + -0.84765625, + -0.115234375, + 0.2578125, + -2.4375, + 0.76953125, + -1.0234375, + 4.53125, + 2.3125, + -1.421875, + -0.578125, + -0.1259765625, + -1.7734375, + 0.73046875, + -0.1328125, + 3.59375, + 0.67578125, + -2.84375, + 0.546875, + -0.578125, + -0.2578125, + -2.171875, + 4.15625, + -0.375, + -3.53125, + -2.03125, + 2.328125, + 0.36328125, + -1.9765625, + 0.287109375, + -0.8515625, + -2.296875, + -1.3671875, + -1.015625, + -3.296875, + 0.5546875, + 3.375, + 3.578125, + -1.6953125, + -0.515625, + -0.035400390625, + 0.97265625, + -3.078125, + -0.326171875, + -1.953125, + -3.71875, + 1.875, + 0.341796875, + 0.2060546875, + 2.765625, + -2.46875, + 1.5, + -0.2080078125, + 0.76953125, + -0.7578125, + 0.267578125, + 3.234375, + -3.4375, + -1.3671875, + 0.21484375, + -0.33984375, + -7.71875, + -2.265625, + -0.515625, + 3.015625, + -0.8359375, + 3.15625, + -1.1875, + -0.2734375, + 2.953125, + -6.5625, + 2.953125, + -1.390625, + 2.40625, + -2.015625, + -2.015625, + 2.65625, + 1.390625, + -1.1328125, + 0.5234375, + 1.2109375, + 4.03125, + -1.296875, + -1.2734375, + 0.9453125, + -0.4375, + 0.32421875, + -0.890625, + 2.546875, + 1.3671875, + -0.87109375, + -2.453125, + 2.890625, + 2.28125, + 1.1875, + 2.96875, + 1.6171875, + -0.85546875, + 0.62109375, + 1.90625, + -4.125, + 0.90234375, + -0.578125, + 1.265625, + 0.08203125, + -1.15625, + 2.09375, + -0.5, + -0.80078125, + -0.62890625, + -3.21875, + 3.5625, + -0.96875, + 1.71875, + -1.1953125, + -0.8671875, + -0.34375, + 0.04248046875, + 2.21875, + 0.322265625, + 3.515625, + 2.390625, + -1.484375, + -1.703125, + -1.4921875, + 1.21875, + 2.5, + 2.109375, + 1.9609375, + 2.1875, + 0.040283203125, + 0.63671875, + -2.359375, + 0.9296875, + 0.458984375, + -0.70703125, + -0.134765625, + 0.107421875, + 0.314453125, + 2.109375, + 2.84375, + -0.9140625, + 1.625, + 1.3359375, + 0.7421875, + 1.546875, + -0.6640625, + 3.125, + -4.59375, + -0.90234375, + 0.81640625, + 3.984375, + 3.3125, + 4.78125, + 0.057861328125, + -2.03125, + -1.5, + 2.0625, + 5.65625, + 1.3203125, + 1.84375, + 5.15625, + -1.7734375, + 0.1796875, + 2.875, + 2.78125, + 0.546875, + -1.875, + 1.2890625, + -2.703125, + -0.421875, + 0.5703125, + -3.84375, + 0.89453125, + 0.81640625, + -0.32421875, + 0.1669921875, + -0.72265625, + -0.8984375, + -0.91015625, + -1.46875, + -3.265625, + 2.421875, + 0.78515625, + -0.1328125, + -1.9921875, + 3.078125, + 3.125, + -0.0181884765625, + -1.5703125, + -1.3671875, + 0.169921875, + 2.203125, + -2.03125, + 2.953125, + -2.625, + 1.4609375, + 0.07958984375, + 3.328125, + -4.78125, + -1.1875, + -1.9609375, + 2.21875, + 5.25, + 1.046875, + 0.94140625, + -2.203125, + -4.625, + 3.75, + -0.91015625, + 0.79296875, + -1.1875, + -0.375, + -0.1650390625, + 1.5078125, + 8.375, + 0.15625, + -0.404296875, + -1.84375, + -0.306640625, + 1.515625, + 0.263671875, + -1.453125, + -0.1953125, + 0.62890625, + 0.0220947265625, + -1.96875, + -0.0015716552734375, + -0.146484375, + 3.328125, + 1.125, + 1.3671875, + -1.3515625, + 1.28125, + -1.28125, + 2.171875, + 0.82421875, + 0.047607421875, + 0.9375, + 1.875, + -0.24609375, + 3.6875, + 1.5078125, + -2.90625, + -1.8125, + -0.16015625, + -0.58203125, + 0.7578125, + 0.400390625, + -2.03125, + 0.08837890625, + 1.5703125, + 2.609375, + -1.1875, + 2.546875, + 4.9375, + 2.3125, + 2.25, + 1.8359375, + 6.25, + -2.78125, + -3.5, + -0.98828125, + -0.04736328125, + 1.109375, + 0.71484375, + -2.375, + -0.62890625, + 1.3359375, + -3.5625, + -0.16015625, + 0.072265625, + 1.8515625, + 0.408203125, + 2.796875, + -0.23046875, + 2.671875, + 0.5703125, + 0.134765625, + -0.51171875, + 0.275390625, + 2.015625, + -0.025634765625, + 1.25, + 2.234375, + 1.703125, + -2.59375, + 0.1533203125, + -4.9375, + -2.421875, + 0.302734375, + 1.1953125, + -3.46875, + 0.0263671875, + -1.1875, + 0.5625, + -10.125, + 3.265625, + 1.1953125, + -1.421875, + 4.5625, + -0.66015625, + -0.130859375, + -0.5625, + -2.265625, + -4.9375, + -2.875, + -1.0859375, + 0.99609375, + 0.07470703125, + 0.0028533935546875, + -0.158203125, + -0.69921875, + 1.265625, + 2.234375, + -1.265625, + 0.0849609375, + 0.1328125, + 0.416015625, + 0.4375, + -3.625, + -2.890625, + -0.5546875, + -0.921875, + -0.5703125, + -4.375, + -4.78125, + -8.4375, + -0.6015625, + -0.1962890625, + 0.7265625, + -2.875, + 2.515625, + -4.34375, + 1.4921875, + -0.48046875, + 1.6171875, + -2.09375, + -1.390625, + 0.345703125, + 0.1240234375, + -1.0625, + -0.10986328125, + 0.9296875, + 2.375, + 1.109375, + -0.255859375, + 1.03125, + 3.515625, + 0.83984375, + -2.0625, + -0.0002498626708984375, + -0.361328125, + -3.453125, + 2.765625, + -0.54296875, + 0.357421875, + -1.3125, + -2.984375, + 1.2109375, + 0.32421875, + -0.58984375, + 2.96875, + 1.1015625, + 2.171875, + 0.58203125, + -0.86328125, + -2.03125, + -0.62890625, + 3.078125, + 0.302734375, + -0.396484375, + 2.609375, + -1.1796875, + 2.03125, + 0.57421875, + 0.94921875, + 0.79296875, + -0.8359375, + 0.2119140625, + -2.90625, + 0.55859375, + -2.125, + -1.859375, + 1.96875, + 3.328125, + -0.890625, + 0.99609375, + -0.96875, + -1.1484375, + -0.1591796875, + 0.1064453125, + -1.4375, + -0.98046875, + -0.026123046875, + -3.984375, + -4.40625, + -0.75390625, + -2.65625, + -0.921875, + -0.765625, + -0.34765625, + 1.78125, + -3.3125, + -1.7109375, + 0.765625, + 0.6171875, + 1.2109375, + -1.859375, + -2.296875, + 1.3125, + 1.515625, + 0.82421875, + 0.6796875, + -0.78515625, + -1.140625, + -1.2421875, + -2.375, + -1.265625, + 0.1259765625, + 1.6953125, + -2.21875, + -0.259765625, + -0.79296875, + -0.859375, + 1.4921875, + 1.421875, + 4.09375, + -0.478515625, + 2.59375, + 3.140625, + -4.125, + -4.59375, + 1.265625, + 2.390625, + 0.2373046875, + 0.2353515625, + -0.765625, + -1.859375, + 2.984375, + 3.015625, + 1.4140625, + 5.75, + -1.53125, + -1.1328125, + -0.55859375, + -3.578125, + 2.5625, + -0.8515625, + 2.53125, + 1.2109375, + -1.828125, + 1.40625, + -0.2890625, + -0.031005859375, + -2.859375, + -3.765625, + 3.09375, + 2.046875, + 1.7109375, + 3.5, + 0.50390625, + 3.859375, + -0.0234375, + -1.0859375, + -1.265625, + -0.267578125, + 2.578125, + 1.1328125, + -0.2734375, + 1.8515625, + -2.015625, + 3.03125, + -1.2421875, + -0.6796875, + -0.6171875, + 0.0, + -3.375, + -2.8125, + -0.365234375, + -0.470703125, + 5.09375, + -2.03125, + 0.99609375, + 0.462890625, + -0.1337890625, + -2.140625, + 1.2265625, + 0.408203125, + 0.1826171875, + 1.4609375, + -0.412109375, + 1.859375, + 0.7734375, + -1.265625, + 1.7421875, + 0.08544921875, + -1.4375, + 2.65625, + -0.6796875, + 1.359375, + -3.296875, + 6.875, + 0.65625, + -0.7109375, + 0.21875, + 3.5625, + -1.9140625, + 0.49609375, + -3.703125, + 1.8828125, + 2.25, + 1.546875, + 1.9921875, + -1.109375, + -2.046875, + -1.8984375, + -0.60546875, + -1.921875, + -0.2236328125, + 2.78125, + 1.3203125, + 2.921875, + -0.0299072265625, + 3.3125, + 1.9453125, + -3.0, + -0.255859375, + 4.28125, + 0.59765625, + -0.703125, + 2.921875, + -0.13671875, + 0.796875, + 2.125, + 0.48046875, + -1.515625, + 0.01220703125, + -3.203125, + -1.1875, + 0.408203125, + 2.234375, + -3.125, + -0.1435546875, + -1.8515625, + 0.12890625, + -4.03125, + 1.7109375, + 2.25, + 2.65625, + -2.015625, + -2.25, + 1.515625, + -0.482421875, + 2.765625, + 0.31640625, + -1.3359375, + 1.125, + 1.140625, + -1.484375, + -0.7578125, + -2.671875, + 3.5625, + -0.56640625, + -0.150390625, + -2.28125, + -0.6484375, + -0.55859375, + -1.703125, + -1.328125, + 2.078125, + 2.75, + -1.078125, + 2.421875, + -3.71875, + 1.390625, + -0.055419921875, + -0.55859375, + -2.3125, + -2.203125, + 2.125, + 1.0390625, + -0.396484375, + -0.181640625, + 3.21875, + 0.1708984375, + 1.3984375, + -1.6015625, + 1.75, + -0.0245361328125, + -0.859375, + 5.75, + 1.9609375, + -1.875, + -3.3125, + -0.609375, + 0.69140625, + 0.265625, + 0.494140625, + -1.0390625, + -0.6875, + 1.4453125, + 1.640625, + 3.25, + -1.109375, + 3.9375, + -2.46875, + -1.0390625, + -0.9375, + 2.71875, + -1.5390625, + 1.7421875, + -3.734375, + 1.890625, + -0.150390625, + 0.515625, + -3.09375, + 3.296875, + 0.31640625, + 2.171875, + -1.5703125, + -0.181640625, + -0.77734375, + -2.34375, + 1.6875, + -1.3203125, + -2.15625, + 2.265625, + -2.140625, + 1.046875, + 2.15625, + -4.4375, + -1.8671875, + 1.703125, + -0.6796875, + -0.34765625, + -0.263671875, + 1.2265625, + 0.2080078125, + 3.0625, + -0.453125, + 0.9296875, + -2.4375, + 0.8125, + 2.609375, + -1.5625, + -1.8125, + 0.322265625, + -0.78125, + -0.6875, + -1.2734375, + -0.91015625, + -0.67578125, + -0.09423828125, + 0.71484375, + -1.5078125, + 3.03125, + -0.5390625, + 0.77734375, + 0.322265625, + -0.93359375, + 0.83203125, + 1.5859375, + 3.046875, + -2.71875, + -0.173828125, + -2.546875, + 3.140625, + -0.1298828125, + -1.5390625, + -1.3828125, + 5.40625, + 0.87890625, + 0.287109375, + -1.515625, + 1.328125, + -0.76171875, + 0.024169921875, + -1.9609375, + 0.74609375, + -1.0546875, + 2.40625, + -0.279296875, + -3.453125, + 1.1640625, + -0.90625, + 0.60546875, + -4.1875, + 0.05224609375, + -2.171875, + 0.78515625, + -0.2001953125, + -2.6875, + 5.65625, + 2.796875, + 1.6328125, + 2.8125, + 1.515625, + -0.431640625, + -0.6171875, + -1.5234375, + -1.6328125, + 1.6171875, + 2.5, + 3.84375, + -0.2392578125, + -0.48046875, + 2.0, + -3.4375, + -0.392578125, + -1.8828125, + -1.3984375, + 0.65234375, + 1.671875, + 2.28125, + -3.578125, + -0.58203125, + -0.98046875, + 2.625, + -2.0, + 1.2421875, + -0.59765625, + -0.11376953125, + -0.30078125, + -1.03125, + -0.0277099609375, + -3.03125, + 1.09375, + -0.8984375, + -0.6796875, + 2.359375, + 5.4375, + 1.84375, + -1.3046875, + 2.265625, + -1.6171875, + -1.6875, + 0.2890625, + 1.3359375, + -0.57421875, + -1.609375, + -0.8671875, + -0.76953125, + -0.478515625, + 1.625, + 0.9453125, + 3.65625, + 2.15625, + -1.75, + -1.453125, + 0.031494140625, + -4.375, + -3.4375, + -0.984375, + -0.828125, + 1.2109375, + 1.125, + -0.37109375, + 2.328125, + 0.84765625, + 2.046875, + 1.3828125, + 2.0625, + -1.9921875, + -3.140625, + -2.125, + -0.8828125, + 2.375, + -0.7265625, + 0.63671875, + -1.46875, + -2.078125, + -0.267578125, + -2.546875, + 0.640625, + 2.359375, + 0.49609375, + 0.11767578125, + 2.625, + 0.13671875, + -0.98828125, + -1.765625, + 1.9921875, + -0.203125, + 1.9375, + -4.09375, + 6.84375, + -2.0625, + -5.375, + 0.00113677978515625, + -0.369140625, + -0.7109375, + -4.0625, + -0.048583984375, + -0.259765625, + -1.1953125, + 5.25, + -3.796875, + 2.171875, + 0.353515625, + -1.6328125, + -1.7109375, + -2.375, + -4.09375, + 1.75, + 0.57421875, + 0.1279296875, + -1.2265625, + 2.234375, + -2.703125, + 2.796875, + 0.28125, + 0.42578125, + 1.7578125, + 1.8046875, + -0.66015625, + 1.6484375, + -0.8828125, + 3.71875, + 0.6640625, + -1.4375, + -1.734375, + -3.265625, + -1.3125, + -0.8125, + -2.703125, + 1.6484375, + -2.4375, + 0.4609375, + 0.64453125, + -1.78125, + -3.0, + -1.0234375, + 0.6015625, + -0.1640625, + -1.0390625, + 1.9765625, + -0.64453125, + -0.77734375, + -0.296875, + 3.765625, + -0.4609375, + -0.08349609375, + -1.1171875, + -1.2109375, + -1.8046875, + 0.578125, + -0.89453125, + 2.015625, + 1.609375, + -0.82421875, + -1.6328125, + 2.3125, + -0.3828125, + 0.8984375, + 0.6875, + -0.51171875, + -0.96484375, + -1.6484375, + -0.51171875, + -1.4765625, + -1.03125, + -0.49609375, + 2.328125, + 1.0625, + -0.380859375, + 3.875, + 1.171875, + 0.77734375, + 0.60546875, + -1.6484375, + -1.609375, + 0.31640625, + 0.9296875, + -0.60546875, + -1.640625, + 1.3125, + -0.40234375, + 1.1328125, + 2.09375, + -0.0615234375, + 3.296875, + -0.55859375, + 1.890625, + -0.765625, + 1.5859375, + 2.671875, + 0.2890625, + -1.5625, + 3.703125, + -3.21875, + 0.33203125, + 1.1796875, + 2.640625, + -0.1767578125, + 1.5390625, + -2.640625, + 0.46484375, + 2.984375, + 3.671875, + -3.390625, + -1.78125, + -1.09375, + 1.265625, + -0.98828125, + -0.94140625, + 2.9375, + -1.4140625, + 0.031494140625, + 0.6484375, + -1.8203125, + -2.84375, + -1.4609375, + 0.396484375, + 2.546875, + 0.482421875, + -2.125, + -0.79296875, + 0.392578125, + 0.7109375, + 3.140625, + 0.0673828125, + -0.578125, + -2.390625, + -0.5390625, + 1.2890625, + 2.328125, + -0.263671875, + 1.15625, + -0.8359375, + -2.078125, + -2.453125, + 0.240234375, + 0.53125, + 1.15625, + 1.2421875, + -1.0859375, + -0.08544921875, + -0.58984375, + -0.54296875, + 0.67578125, + -1.390625, + -1.53125, + 1.890625, + 0.62890625, + -0.75390625, + -3.71875, + -0.1787109375, + -0.09521484375, + -2.71875, + -0.396484375, + -0.123046875, + -1.2890625, + -1.5234375, + 2.1875, + 0.201171875, + -0.150390625, + 1.203125, + -2.734375, + 0.62109375, + -0.7265625, + 2.046875, + -1.4375, + 0.400390625, + -0.1474609375, + 0.828125, + 3.625, + 1.34375, + 0.890625, + -0.2578125, + -1.625, + 0.53515625, + 3.140625, + 0.6875, + -2.015625, + 0.1376953125, + 0.08984375, + 0.427734375, + -1.515625, + 1.0625, + -0.8671875, + 1.5390625, + -3.625, + 2.109375, + 1.8203125, + 3.15625, + 0.12353515625, + -2.015625, + -0.0927734375, + 0.37890625, + -0.92578125, + -2.078125, + -0.49609375, + -0.455078125, + 0.52734375, + -0.2099609375, + 0.4375, + -1.21875, + -0.72265625, + 2.171875, + 0.68359375, + -1.609375, + 0.212890625, + -6.96875, + 2.953125, + 1.5234375, + 0.8359375, + -1.140625, + -0.228515625, + -0.404296875, + -0.91015625, + 1.734375, + 0.07763671875, + 0.072265625, + 2.625, + 2.734375, + -0.53125, + -2.703125, + -4.03125, + 0.265625, + -0.51171875, + -0.51953125, + -2.90625, + 0.5859375, + -2.171875, + 0.002044677734375, + -0.59765625, + -4.09375, + -0.2177734375, + -1.890625, + 0.037109375, + -1.9921875, + 1.953125, + 0.96875, + -5.21875, + 2.40625, + 0.166015625, + 0.375, + -9.6875, + 1.3125, + -0.78125, + 0.1875, + 0.59765625, + -1.1796875, + 1.109375, + -0.82421875, + -4.4375, + 2.75, + -2.5625, + 0.71875, + -1.9453125, + -2.21875, + -1.25, + 2.96875, + -0.40234375, + -0.201171875, + 0.1748046875, + 0.57421875, + 1.1953125, + -0.484375, + 1.09375, + 0.369140625, + 1.2578125, + 2.59375, + 2.421875, + -2.9375, + -2.4375, + 1.3359375, + -3.171875, + -2.9375, + -1.53125, + -0.01031494140625, + -0.94140625, + -1.1171875, + -0.71875, + 1.640625, + -1.7265625, + 0.96875, + 0.98828125, + 1.2421875, + -1.625, + -2.21875, + -1.8046875, + 2.46875, + -1.5546875, + 3.125, + 2.34375, + 0.890625, + -1.796875, + 2.6875, + -1.0, + 1.0703125, + -1.203125, + -1.6953125, + 0.4765625, + 1.8671875, + 3.953125, + 3.140625, + 1.046875, + -4.34375, + -1.9453125, + -1.5234375, + -4.0625, + -0.07373046875, + -0.0125732421875, + -1.1171875, + -2.328125, + -3.546875, + -0.890625, + -0.8984375, + -2.421875, + -3.625, + 1.0390625, + -0.48046875, + 0.0986328125, + -3.140625, + 1.9375, + 0.65625, + 1.2890625, + -1.515625, + 3.46875, + 0.1806640625, + 2.59375, + -2.03125, + -2.046875, + 3.484375, + -1.3828125, + 1.09375, + 0.0223388671875, + 1.21875, + -0.3203125, + 0.17578125, + -0.62109375, + 1.3671875, + -2.0, + -1.3828125, + 1.1640625, + 0.392578125, + -1.2421875, + 1.6015625, + 0.81640625, + 2.21875, + -0.6640625, + 0.416015625, + -1.90625, + -1.4609375, + -2.03125, + 2.625, + -4.25, + 2.171875, + 3.046875, + -0.95703125, + 1.5859375, + -0.63671875, + 4.78125, + -0.1220703125, + -4.78125, + 0.55078125, + 0.59765625, + 1.46875, + -0.671875, + -0.53125, + 4.09375, + 2.34375, + -1.3671875, + 1.4921875, + 5.5, + -0.443359375, + 4.15625, + 3.140625, + 3.421875, + 1.3125, + -0.71484375, + 0.62890625, + -0.087890625, + -1.3984375, + 0.08251953125, + 0.51953125, + -2.015625, + -1.2578125, + -0.296875, + -1.28125, + -0.91796875, + -1.3046875, + 0.12255859375, + -0.98046875, + 0.35546875, + -2.078125, + -0.275390625, + 0.236328125, + 0.451171875, + 0.18359375, + 3.328125, + 0.50390625, + 1.328125, + -1.4765625, + 1.15625, + -1.90625, + -2.03125, + -0.96484375, + -1.609375, + -1.015625, + -1.2109375, + -2.09375, + 2.453125, + 2.828125, + 3.453125, + 0.734375, + 0.439453125, + -0.062255859375, + -0.224609375, + -0.95703125, + 2.453125, + 1.8515625, + -1.2265625, + 2.140625, + 0.004638671875, + 2.53125, + -0.68359375, + 2.796875, + -2.078125, + -1.359375, + -5.46875, + -0.037841796875, + 0.8359375, + -0.353515625, + 1.984375, + -1.0546875, + 4.71875, + 1.59375, + 1.859375, + -1.4609375, + -0.859375, + -0.3359375, + 5.03125, + 0.94140625, + 0.9140625, + 0.2119140625, + 1.4296875, + 0.9453125, + 0.478515625, + -2.53125, + 3.140625, + 1.1875, + -3.390625, + 0.5390625, + -0.10791015625, + 0.1484375, + 0.59375, + 0.6640625, + 0.2412109375, + -0.57421875, + 0.345703125, + -1.1875, + 1.5546875, + -4.65625, + 0.85546875, + -1.765625, + -0.439453125, + 2.046875, + -5.78125, + -3.296875, + 0.56640625, + 1.8203125, + -0.04833984375, + 0.65625, + 2.78125, + -2.140625, + 0.09228515625, + -0.412109375, + -2.875, + 1.8984375, + -0.734375, + 1.8671875, + 1.2578125, + 0.255859375, + -0.546875, + 0.44140625, + -1.46875, + -2.203125, + 0.2236328125, + -0.6796875, + 1.40625, + -0.59765625, + 1.6640625, + 1.8671875, + -0.67578125, + -3.078125, + -1.2578125, + 0.56640625, + -0.55078125, + 2.203125, + 0.08349609375, + -2.453125, + -1.015625, + -2.265625, + -2.609375, + 3.28125, + -0.91796875, + -0.2412109375, + 0.86328125, + -2.90625, + -1.4609375, + -1.796875, + -2.109375, + -2.046875, + -0.359375, + -2.421875, + -0.6953125, + -0.1015625, + -1.71875, + -1.765625, + -0.5546875, + 1.7421875, + -2.046875, + 1.6875, + -2.984375, + 0.134765625, + 0.9921875, + -0.0155029296875, + 0.384765625, + -0.10693359375, + 1.265625, + -0.859375, + 4.09375, + 2.140625, + 2.296875, + -1.890625, + 1.296875, + -3.9375, + -1.2109375, + 0.5703125, + -2.703125, + 1.3515625, + 1.8671875, + -0.09033203125, + -0.6015625, + 0.65625, + 3.0625, + 0.75, + 1.625, + -1.3984375, + -1.4140625, + -1.0, + -1.03125, + 0.09228515625, + -1.125, + 0.2158203125, + -0.84375, + -0.25390625, + -1.84375, + -0.75, + 0.2021484375, + -1.1171875, + 6.28125, + -2.359375, + -1.875, + 4.09375, + 0.408203125, + 0.416015625, + 0.45703125, + 0.498046875, + -1.0703125, + 3.328125, + 0.2421875, + 1.765625, + -1.0859375, + 1.09375, + -0.0341796875, + -1.34375, + 0.9609375, + 3.984375, + 0.5703125, + -2.203125, + -2.578125, + 1.3125, + 2.71875, + -0.498046875, + -1.703125, + 0.142578125, + -1.1875, + -1.3671875, + 3.15625, + -0.01226806640625, + 3.5, + -1.7109375, + -1.390625, + -2.953125, + -0.6484375, + 0.07080078125, + -1.6328125, + 2.484375, + -0.19921875, + -1.0, + -0.361328125, + 0.4765625, + -1.84375, + -1.9140625, + 1.0234375, + -1.3515625, + -1.9921875, + 0.40234375, + -1.8515625, + 0.46875, + 0.80859375, + 2.03125, + -0.7265625, + -0.416015625, + 0.423828125, + -0.10009765625, + 1.015625, + 3.03125, + -1.875, + -2.34375, + 1.28125, + 0.3203125, + 1.4296875, + -1.1171875, + 3.171875, + 0.0294189453125, + 4.25, + 3.28125, + -1.5078125, + -0.73828125, + 1.4609375, + -0.498046875, + -1.4921875, + 0.66015625, + -0.76953125, + 3.765625, + -0.12060546875, + 1.515625, + 4.5, + -1.171875, + 0.55859375, + 1.859375, + 0.0281982421875, + -3.03125, + 0.3125, + 0.53515625, + -0.37109375, + -1.765625, + -0.5703125, + 1.8671875, + -0.83984375, + -0.984375, + -0.57421875, + -0.59765625, + -0.3203125, + 0.98828125, + -3.015625, + -5.1875, + -0.828125, + -0.76171875, + 2.203125, + 0.71484375, + -3.015625, + 0.71875, + -3.609375, + 2.8125, + -0.171875, + 0.076171875, + 1.2578125, + -0.71875, + 2.96875, + -0.296875, + -0.09765625, + 0.392578125, + 2.953125, + -0.64453125, + -2.03125, + -0.3671875, + 1.90625, + -0.1298828125, + -2.25, + -0.37890625, + 1.5625, + -7.21875, + -0.98046875, + 0.031494140625, + -1.3515625, + -4.4375, + 1.8203125, + -0.98828125, + 3.046875, + -0.65625, + -0.2060546875, + 5.6875, + 3.46875, + -1.5234375, + -0.07421875, + 0.98828125, + 0.71875, + 1.1796875, + 1.90625, + -0.07275390625, + -1.0234375, + 3.171875, + 1.234375, + -0.6484375, + 2.4375, + 2.578125, + -1.078125, + 0.46484375, + 2.21875, + 1.59375, + -0.1943359375, + -1.796875, + -5.6875, + -1.2734375, + 0.66015625, + 4.75, + -1.0703125, + 0.1533203125, + -4.78125, + -3.046875, + -0.7734375, + 2.984375, + 0.443359375, + -2.75, + 0.158203125, + -1.2734375, + -3.609375, + -1.1953125, + -0.5078125, + -3.265625, + 0.267578125, + 0.76171875, + -3.890625, + 1.8828125, + 2.671875, + 0.9375, + -2.9375, + -0.8125, + -1.2265625, + -0.93359375, + 2.5625, + 2.015625, + -0.453125, + -0.91015625, + 1.984375, + -2.84375, + -1.3125, + 3.21875, + 1.5390625, + -3.0625, + -1.46875, + -1.5390625, + -1.0546875, + -0.5859375, + 0.53125, + 0.48046875, + -5.25, + 0.9765625, + 0.474609375, + -2.3125, + 3.4375, + 1.1328125, + -0.828125, + -0.3359375, + 2.21875, + 4.9375, + -0.765625, + 1.234375, + -0.921875, + -3.09375, + 1.6796875, + 1.6015625, + -0.021240234375, + 0.60546875, + -1.75, + -2.859375, + -3.21875, + -1.484375, + -1.9453125, + 1.1953125, + 0.90234375, + 0.494140625, + 0.16796875, + 1.5859375, + 2.671875, + 1.1171875, + -2.953125, + 0.09814453125, + 2.515625, + 2.578125, + 1.9609375, + 1.234375, + -0.490234375, + -0.298828125, + 7.1875, + -0.037109375, + 0.09423828125, + 1.4765625, + -1.0625, + -0.40234375, + -0.42578125, + -0.1259765625, + -3.796875, + -0.5390625, + -6.65625, + 0.81640625, + -0.07373046875, + -1.8984375, + -1.4375, + 0.33984375, + -2.765625, + 1.328125, + -2.421875, + -0.96875, + 0.09814453125, + -0.2197265625, + -2.890625, + 2.015625, + -1.6015625, + 5.625, + 1.5, + 3.125, + -3.46875, + 5.46875, + -0.30859375, + -3.421875, + -4.625, + 1.484375, + 0.1787109375, + 1.0, + 1.3828125, + -0.84375, + -0.408203125, + -1.359375, + 0.453125, + -0.0380859375, + 0.5703125, + -1.0234375, + -1.734375, + 2.328125, + -0.98046875, + -0.66796875, + 0.94140625, + 1.3046875, + 0.33984375, + -2.046875, + -0.1474609375, + -0.60546875, + 2.125, + -0.265625, + 1.328125, + -4.1875, + 2.46875, + 1.46875, + 1.7109375, + -1.4375, + -2.6875, + 0.162109375, + 0.267578125, + 1.640625, + 0.6640625, + 0.11181640625, + -3.375, + -4.15625, + 0.024169921875, + 0.5078125, + 0.2490234375, + 0.2314453125, + -1.4375, + -3.453125, + 0.462890625, + 0.1904296875, + -0.7109375, + -0.17578125, + -4.75, + 1.453125, + 2.5, + -0.490234375, + -10.8125, + 0.68359375, + -1.90625, + -2.21875, + -1.28125, + 0.9296875, + 2.3125, + 1.515625, + -4.71875, + 2.53125, + 2.0, + 0.5390625, + 1.0390625, + 4.46875, + -0.11669921875, + -2.890625, + -0.0279541015625, + 0.357421875, + 3.15625, + 3.578125, + 5.25, + 2.515625, + -0.310546875, + 1.96875, + -3.296875, + 0.498046875, + -1.9140625, + -0.263671875, + 0.5625, + 0.384765625, + -0.94921875, + -0.76171875, + -3.546875, + -0.2109375, + -3.5625, + 0.64453125, + -2.4375, + 2.1875, + 0.546875, + 1.8828125, + -2.015625, + -4.15625, + -1.1953125, + -1.1796875, + 3.984375, + 0.73046875, + -2.328125, + 1.5234375, + -0.48828125, + -5.5, + 0.90625, + -0.0908203125, + 0.94921875, + 1.3671875, + 2.34375, + -1.4140625, + -2.265625, + -0.30078125, + 1.390625, + 1.2109375, + 1.1328125, + 3.46875, + -0.66015625, + -1.1328125, + -0.86328125, + 2.15625, + 5.0, + 0.00836181640625, + 0.396484375, + 0.79296875, + -2.296875, + 1.1328125, + -1.421875, + -1.84375, + 0.326171875, + 1.6484375, + -1.890625, + 1.078125, + -2.28125, + 3.390625, + -5.625, + -2.234375, + -0.134765625, + 0.52734375, + -1.859375, + -1.578125, + 2.75, + 0.53125, + -1.1484375, + 0.5390625, + 0.78125, + -0.1259765625, + -2.859375, + -2.6875, + 6.4375, + -3.453125, + -2.78125, + -0.875, + -0.9375, + -0.72265625, + 2.609375, + 1.125, + -3.203125, + -0.06591796875, + -2.84375, + -0.171875, + 1.34375, + 1.3359375, + 4.46875, + -0.62109375, + -2.234375, + 1.234375, + -0.46875, + -0.2158203125, + -3.3125, + -2.546875, + -0.93359375, + -2.671875, + -0.427734375, + 4.5, + 0.53125, + 0.62109375, + 0.63671875, + -1.953125, + 1.2421875, + -0.28125, + 0.103515625, + -2.09375, + -4.0, + 0.1962890625, + -2.75, + -0.859375, + -2.546875, + -1.15625, + 1.078125, + -3.671875, + 2.421875, + -0.07763671875, + -1.8671875, + 0.55859375, + 0.1435546875, + 0.5703125, + 3.28125, + -3.21875, + -0.62890625, + 0.57421875, + -0.1650390625, + -0.46875, + -0.44140625, + 0.546875, + 0.7109375, + -1.6953125, + -1.359375, + 1.5390625, + -1.8203125, + -1.6875, + 1.734375, + 1.0078125, + 0.640625, + -1.78125, + 0.171875, + -0.54296875, + -0.376953125, + 2.625, + 4.34375, + -4.21875, + -2.140625, + -1.3359375, + 1.5703125, + -4.78125, + 0.08251953125, + 0.890625, + 1.1328125, + 0.671875, + -0.25, + -3.59375, + -2.84375, + -1.8984375, + -1.3203125, + -0.328125, + 3.375, + 5.53125, + -1.2109375, + 2.921875, + 1.625, + -0.1416015625, + 0.07568359375, + -1.0078125, + -1.3984375, + -1.375, + 3.796875, + 1.390625, + -1.5703125, + 6.875, + -2.046875, + -1.8828125, + 0.5859375, + 0.4765625, + -2.078125, + 0.96484375, + -0.208984375, + 1.34375, + -0.1083984375, + -0.97265625, + -0.494140625, + 1.9921875, + 3.640625, + 0.2294921875, + -0.333984375, + 1.5546875, + -1.03125, + 2.625, + -0.578125, + 0.98828125, + 3.296875, + -1.4453125, + -1.15625, + -0.478515625, + 2.71875, + -1.515625, + -0.408203125, + -2.75, + 1.8125, + 0.30078125, + 2.140625, + 0.388671875, + 1.3671875, + 0.7734375, + -3.5625, + -1.390625, + -1.4296875, + -0.5546875, + -0.87109375, + 0.734375, + 1.0390625, + -3.296875, + -2.65625, + -0.56640625, + 1.4609375, + -0.345703125, + 1.6875, + 0.66796875, + 2.4375, + 0.7421875, + 1.796875, + 1.25, + -0.78515625, + -0.84375, + -0.61328125, + -0.95703125, + -0.33984375, + -1.0703125, + -2.09375, + 0.07470703125, + 0.0791015625, + -1.5546875, + 2.515625, + -0.9765625, + 3.4375, + -2.203125, + 0.68359375, + 0.392578125, + -0.53125, + 0.984375, + 3.125, + -2.21875, + 3.03125, + -0.8125, + 2.40625, + 1.96875, + 0.412109375, + 1.671875, + 2.234375, + 1.4921875, + 0.0303955078125, + 3.359375, + -1.0, + -2.921875, + 2.28125, + 3.453125, + 0.1376953125, + -1.6875, + -0.6640625, + 0.98828125, + -1.296875, + -0.7421875, + -0.2734375, + -1.2421875, + 0.259765625, + 2.125, + -0.98828125, + 1.890625, + 0.26171875, + 1.7734375, + 1.625, + -0.70703125, + -3.1875, + -1.84375, + -2.734375, + -0.6171875, + 0.458984375, + 2.03125, + 1.609375, + -0.6015625, + -1.96875, + 3.765625, + -1.7578125, + -1.15625, + -0.7421875, + 13.0625, + 2.625, + -1.109375, + -3.703125, + 1.265625, + -1.6796875, + 1.21875, + -2.09375, + -4.53125, + 0.74609375, + -1.890625, + -1.921875, + 0.734375, + -3.609375, + 1.546875, + -1.46875, + -0.6875, + 3.96875, + 2.359375, + 0.283203125, + -2.625, + 2.234375, + -2.40625, + 1.1796875, + 0.6796875, + 0.9921875, + -0.61328125, + -1.546875, + -3.390625, + -0.431640625, + -1.2421875, + 0.361328125, + 0.337890625, + 0.9375, + -2.5625, + -0.43359375, + -1.2109375, + -1.5703125, + 0.46484375, + -2.28125, + 1.0234375, + -0.859375, + 2.203125, + 1.375, + -0.033203125, + 0.1181640625, + -0.84765625, + -5.75, + 1.8828125, + -0.65625, + 0.1748046875, + -0.55078125, + 2.4375, + 0.384765625, + 2.53125, + -2.046875, + -0.85546875, + 2.28125, + 2.40625, + -4.25, + -0.400390625, + -2.875, + -2.828125, + 1.6953125, + -0.185546875, + -0.9375, + 2.21875, + 0.06787109375, + 1.125, + 2.21875, + -0.61328125, + -1.921875, + 1.9609375, + 4.375, + -2.921875, + -1.2265625, + -2.65625, + 1.6953125, + 2.65625, + 1.921875, + 0.0791015625, + 2.265625, + 0.71484375, + -0.84765625, + 1.9453125, + -2.140625, + -2.875, + -0.66015625, + -0.40234375, + 2.09375, + 1.3359375, + 1.171875, + -0.008544921875, + 1.9296875, + -0.265625, + -0.96484375, + -0.1474609375, + -2.265625, + -1.4296875, + 2.46875, + 1.921875, + 1.796875, + 0.60546875, + 2.15625, + 0.52734375, + -0.1201171875, + -1.4609375, + -2.71875, + -1.484375, + -0.302734375, + 0.43359375, + -0.7578125, + 4.40625, + 1.1328125, + 0.625, + -1.09375, + 1.0859375, + 2.015625, + 2.171875, + -0.27734375, + -0.212890625, + -0.1484375, + 0.1328125, + 0.8984375, + -0.68359375, + -0.267578125, + -2.375, + -0.058837890625, + 0.34375, + 0.67578125, + -6.96875, + 1.9609375, + -1.2265625, + 3.140625, + 0.89453125, + -0.0250244140625, + 2.375, + -0.458984375, + 2.0625, + 0.2353515625, + 1.65625, + 3.28125, + 0.51171875, + 5.5, + 0.83984375, + -0.453125, + -1.7734375, + 0.19140625, + -0.98828125, + -2.515625, + 0.515625, + -1.109375, + -0.484375, + 1.9921875, + -1.0859375, + 3.1875, + -1.1171875, + 0.7265625, + 0.609375, + 1.1328125, + -0.427734375, + -1.421875, + 3.203125, + -0.40234375, + 2.265625, + 0.4609375, + -1.0078125, + -3.90625, + 0.03564453125, + 1.609375, + 0.71484375, + -0.9140625, + -2.28125, + -2.265625, + 1.6875, + 1.859375, + -4.46875, + -1.859375, + -0.86328125, + 0.6328125, + -0.423828125, + -1.234375, + -1.40625, + 0.427734375, + -0.75, + -3.015625, + 4.375, + 1.734375, + 1.125, + 1.4609375, + -3.390625, + 0.73828125, + 1.4609375, + -1.0703125, + 2.046875, + 3.515625, + -8.4375, + 0.82421875, + -2.0625, + 0.890625, + -1.546875, + -1.875, + 4.9375, + -7.1875, + -0.8046875, + -2.765625, + 2.0625, + -1.03125, + 1.0546875, + -0.81640625, + 4.0, + -6.21875, + 2.296875, + -1.140625, + 0.162109375, + 0.376953125, + 0.330078125, + 0.63671875, + 0.17578125, + 1.171875, + 1.1640625, + 6.90625, + -2.140625, + -1.3828125, + -3.34375, + -2.734375, + 0.0830078125, + -0.0206298828125, + -2.625, + -1.9921875, + -1.1171875, + -0.734375, + -0.072265625, + -0.271484375, + -0.314453125, + 1.453125, + 0.73046875, + 1.4375, + 1.3203125, + -1.3515625, + -0.283203125, + -0.2041015625, + 2.125, + -0.380859375, + -0.65625, + -2.25, + -0.05078125, + 2.234375, + -1.4453125, + 2.3125, + -3.515625, + -0.1943359375, + -0.248046875, + 3.0625, + 0.185546875, + -1.40625, + 2.578125, + 2.09375, + -0.6015625, + 2.84375, + -0.1826171875, + 0.3125, + -0.265625, + -0.06396484375, + 1.609375, + 0.427734375, + -1.421875, + 1.3125, + -0.76953125, + 2.46875, + 0.69140625, + 3.140625, + 2.28125, + 0.9609375, + 0.05419921875, + 3.0625, + -1.2734375, + -0.625, + 1.0234375, + 0.53125, + -0.58203125, + -1.6015625, + -2.671875, + 1.8203125, + -3.3125, + -0.5234375, + 3.09375, + -1.6015625, + 0.9375, + 2.65625, + -0.71875, + 1.0625, + -3.6875, + -0.546875, + 0.41796875, + 0.78515625, + 3.0, + 1.3203125, + 0.53515625, + 1.2890625, + -0.0712890625, + -2.984375, + -0.478515625, + 0.376953125, + -0.9375, + -7.59375, + 0.90625, + -0.138671875, + -3.015625, + 1.7578125, + -6.34375, + -0.09423828125, + 0.28125, + -1.671875, + -1.5625, + -1.3125, + -1.1171875, + -0.26953125, + 0.201171875, + 4.96875, + 1.9765625, + -0.48046875, + -0.76171875, + 0.390625, + -1.34375, + 0.3203125, + -1.5625, + -1.25, + 0.037841796875, + 2.1875, + 0.40234375, + -3.5625, + 3.21875, + -0.10791015625, + 1.46875, + 1.875, + -0.703125, + -2.75, + -0.63671875, + -2.1875, + -4.59375, + -2.703125, + -5.25, + 1.6171875, + 1.6328125, + 1.3359375, + -0.7265625, + 2.09375, + 1.765625, + -0.08447265625, + -0.12109375, + 0.796875, + -0.490234375, + 0.78515625, + -0.38671875, + -3.09375, + -3.640625, + 2.390625, + 0.453125, + -0.439453125, + -1.171875, + 0.77734375, + -0.349609375, + -2.09375, + 0.82421875, + -4.3125, + 0.41015625, + 3.390625, + 1.4453125, + -4.0625, + 3.578125, + -2.921875, + -1.8984375, + 0.75, + 0.421875, + -0.66796875, + -6.03125, + 3.8125, + -2.6875, + 1.265625, + -1.140625, + 3.828125, + -0.09130859375, + 0.0400390625, + -0.37890625, + -2.34375, + -0.87890625, + -0.482421875, + 1.765625, + -1.6015625, + 3.421875, + -0.10693359375, + 0.33203125, + 2.109375, + 1.203125, + -0.203125, + 0.263671875, + 1.015625, + 3.46875, + 0.78515625, + 1.796875, + -1.5546875, + -0.154296875, + -4.53125, + -0.240234375, + 1.78125, + -1.5234375, + 2.078125, + 1.71875, + -0.859375, + 2.234375, + -1.0, + 0.8984375, + -0.1396484375, + -0.78125, + 1.0703125, + 3.75, + -0.69140625, + -19.75, + 0.91015625, + -1.375, + -0.62109375, + 1.609375, + -0.828125, + 2.671875, + 0.34765625, + 2.375, + -0.025390625, + -0.470703125, + -1.2890625, + -0.80078125, + -1.390625, + 0.53125, + 1.7734375, + -1.921875, + 0.15234375, + -4.65625, + -2.734375, + 2.875, + 0.025146484375, + -0.9375, + 1.4609375, + -1.3125, + -3.578125, + -0.3828125, + 0.024169921875, + 0.51171875, + 0.298828125, + -0.474609375, + 2.4375, + 1.125, + 6.09375, + 2.109375, + 0.5234375, + 2.765625, + 0.51953125, + 0.84375, + 0.171875, + 2.75, + 0.53515625, + -1.921875, + 0.412109375, + -2.609375, + 0.48828125, + 2.421875, + -0.890625, + 0.291015625, + -1.4765625, + -3.640625, + -1.3515625, + 1.84375, + -0.8828125, + -2.765625, + 0.16015625, + -2.59375, + -3.828125, + -0.01190185546875, + 0.6796875, + 1.8828125, + -1.5625, + 1.65625, + 0.140625, + 0.271484375, + -0.291015625, + -0.078125, + 0.365234375, + -0.353515625, + 0.546875, + -1.4609375, + 1.6015625, + 2.21875, + 2.46875, + 3.4375, + 2.140625, + 0.05419921875, + 1.2578125, + 1.6328125, + -0.8671875, + -0.62890625, + -0.08349609375, + -3.625, + 2.34375, + -2.796875, + -1.0625, + -1.8125, + 1.8984375, + -0.384765625, + -1.6875, + -0.0030517578125, + -1.5, + 0.83984375, + -0.73828125, + 0.96484375, + -0.625, + 3.296875, + 1.703125, + -2.578125, + -0.51171875, + 1.546875, + 2.5625, + 0.78515625, + 2.9375, + 0.12353515625, + -1.0625, + -0.03466796875, + -0.5390625, + -1.1953125, + -0.84765625, + 0.58203125, + -1.078125, + 3.046875, + -0.59375, + -0.1025390625, + -1.7890625, + -3.125, + -2.015625, + 1.7109375, + 2.109375, + -0.484375, + -0.4765625, + -0.005615234375, + -0.328125, + -1.5546875, + 1.0859375, + -0.369140625, + -1.546875, + 0.84765625, + 0.27734375, + 0.62890625, + -1.9375, + -0.11669921875, + -0.59765625, + -2.109375, + -0.84375, + 0.74609375, + 0.95703125, + 0.21875, + 0.37109375, + -0.7109375, + -0.08544921875, + 1.9921875, + -1.484375, + 0.66015625, + 0.07421875, + -1.0546875, + -0.345703125, + 1.6171875, + -0.85546875, + -3.015625, + -1.2734375, + 2.828125, + 1.2265625, + 1.5390625, + 1.109375, + -1.1484375, + -1.046875, + 3.515625, + 1.5859375, + 1.0859375, + -1.015625, + -4.03125, + -1.09375, + 1.75, + -2.359375, + -3.46875, + 1.546875, + -0.1591796875, + -0.376953125, + 3.765625, + 1.59375, + 3.625, + -2.515625, + 1.890625, + -0.80078125, + 0.53125, + 0.9296875, + 2.890625, + -0.26171875, + -0.515625, + 2.171875, + 0.75, + -0.408203125, + 1.4140625, + -1.3359375, + 0.29296875, + -1.3203125, + 4.1875, + -0.80859375, + 0.30859375, + 0.74609375, + -2.71875, + 2.546875, + -0.79296875, + -0.98828125, + 2.0625, + 2.59375, + -3.25, + 0.6796875, + 0.365234375, + 0.53125, + -1.84375, + -2.671875, + -1.4296875, + 1.15625, + -3.09375, + 0.74609375 + ], + "index": 3, + "object": "embedding", + "raw_output": null + } + ], + "model": "accounts/fireworks/models/qwen3-embedding-8b", + "object": "list", + "usage": { + "prompt_tokens": 164, + "total_tokens": 164, + "completion_tokens": 0 + }, + "perf_metrics": null + } + }, + "is_streaming": false + } +} diff --git a/tests/integration/recordings/responses/dd6cc3f2e6ce.json b/tests/integration/recordings/responses/67bec1334dc9.json similarity index 94% rename from tests/integration/recordings/responses/dd6cc3f2e6ce.json rename to tests/integration/recordings/responses/67bec1334dc9.json index cfb752700..ab4df3065 100644 --- a/tests/integration/recordings/responses/dd6cc3f2e6ce.json +++ b/tests/integration/recordings/responses/67bec1334dc9.json @@ -15,7 +15,7 @@ "content": "What is the boiling point of the liquid polyjuice in celsius?" } ], - "max_tokens": 0, + "max_tokens": 512, "stream": true, "temperature": 0.0001, "tool_choice": { @@ -60,7 +60,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-726", + "id": "chatcmpl-560", "choices": [ { "delta": { @@ -71,7 +71,7 @@ "tool_calls": [ { "index": 0, - "id": "call_26xsv4bs", + "id": "call_h50zu2cg", "function": { "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}", "name": "get_boiling_point" @@ -85,7 +85,7 @@ "logprobs": null } ], - "created": 1759368387, + "created": 1759427022, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -96,7 +96,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-726", + "id": "chatcmpl-560", "choices": [ { "delta": { @@ -111,7 +111,7 @@ "logprobs": null } ], - "created": 1759368387, + "created": 1759427022, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, diff --git a/tests/integration/recordings/responses/7d28e973eff5.json b/tests/integration/recordings/responses/67f94c4f8ba0.json similarity index 91% rename from tests/integration/recordings/responses/7d28e973eff5.json rename to tests/integration/recordings/responses/67f94c4f8ba0.json index 29d30de2e..cd8ad4f35 100644 --- a/tests/integration/recordings/responses/7d28e973eff5.json +++ b/tests/integration/recordings/responses/67f94c4f8ba0.json @@ -15,7 +15,7 @@ "content": "What is the boiling point of the liquid polyjuice in celsius?" } ], - "max_tokens": 0, + "max_tokens": 512, "stream": true, "temperature": 0.0001, "top_p": 0.9 @@ -28,7 +28,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -43,7 +43,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -54,7 +54,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -69,7 +69,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -80,7 +80,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -95,7 +95,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -106,7 +106,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -121,7 +121,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -132,7 +132,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -147,7 +147,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -158,7 +158,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -173,7 +173,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -184,7 +184,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -199,7 +199,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -210,7 +210,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -225,7 +225,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -236,7 +236,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -251,7 +251,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -262,7 +262,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -277,7 +277,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -288,7 +288,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -303,7 +303,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -314,7 +314,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -329,7 +329,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -340,7 +340,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -355,7 +355,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -366,7 +366,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -381,7 +381,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -392,7 +392,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -407,7 +407,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -418,7 +418,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -433,7 +433,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -444,7 +444,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -459,7 +459,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -470,7 +470,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -485,7 +485,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -496,7 +496,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -511,7 +511,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -522,7 +522,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -537,7 +537,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -548,7 +548,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -563,7 +563,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -574,7 +574,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -589,7 +589,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -600,7 +600,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -615,7 +615,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -626,7 +626,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -641,7 +641,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -652,7 +652,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -667,7 +667,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -678,7 +678,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -693,7 +693,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -704,7 +704,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -719,7 +719,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -730,7 +730,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -745,7 +745,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -756,7 +756,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -771,7 +771,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -782,7 +782,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -797,7 +797,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -808,7 +808,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -823,7 +823,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -834,7 +834,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -849,7 +849,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -860,7 +860,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -875,7 +875,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427020, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -886,7 +886,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -901,7 +901,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427021, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -912,7 +912,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -927,7 +927,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427021, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -938,7 +938,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -953,7 +953,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427021, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -964,7 +964,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -979,7 +979,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427021, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -990,7 +990,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -1005,7 +1005,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427021, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1016,7 +1016,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -1031,7 +1031,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427021, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1042,7 +1042,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -1057,7 +1057,7 @@ "logprobs": null } ], - "created": 1759368385, + "created": 1759427021, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1068,7 +1068,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -1083,7 +1083,7 @@ "logprobs": null } ], - "created": 1759368386, + "created": 1759427021, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1094,7 +1094,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -1109,7 +1109,7 @@ "logprobs": null } ], - "created": 1759368386, + "created": 1759427021, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1120,7 +1120,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -1135,7 +1135,7 @@ "logprobs": null } ], - "created": 1759368386, + "created": 1759427021, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1146,7 +1146,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -1161,7 +1161,7 @@ "logprobs": null } ], - "created": 1759368386, + "created": 1759427021, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1172,7 +1172,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -1187,7 +1187,7 @@ "logprobs": null } ], - "created": 1759368386, + "created": 1759427021, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1198,7 +1198,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -1213,7 +1213,7 @@ "logprobs": null } ], - "created": 1759368386, + "created": 1759427021, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1224,7 +1224,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -1239,7 +1239,7 @@ "logprobs": null } ], - "created": 1759368386, + "created": 1759427021, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1250,7 +1250,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -1265,7 +1265,7 @@ "logprobs": null } ], - "created": 1759368386, + "created": 1759427021, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1276,7 +1276,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -1291,7 +1291,7 @@ "logprobs": null } ], - "created": 1759368386, + "created": 1759427021, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1302,7 +1302,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -1317,7 +1317,7 @@ "logprobs": null } ], - "created": 1759368386, + "created": 1759427021, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1328,7 +1328,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -1343,7 +1343,7 @@ "logprobs": null } ], - "created": 1759368386, + "created": 1759427021, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1354,7 +1354,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -1369,7 +1369,7 @@ "logprobs": null } ], - "created": 1759368386, + "created": 1759427021, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1380,7 +1380,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -1395,7 +1395,7 @@ "logprobs": null } ], - "created": 1759368386, + "created": 1759427021, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1406,7 +1406,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -1421,7 +1421,7 @@ "logprobs": null } ], - "created": 1759368386, + "created": 1759427021, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1432,7 +1432,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -1447,7 +1447,7 @@ "logprobs": null } ], - "created": 1759368386, + "created": 1759427021, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1458,7 +1458,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -1473,7 +1473,7 @@ "logprobs": null } ], - "created": 1759368386, + "created": 1759427021, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1484,7 +1484,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-197", + "id": "chatcmpl-932", "choices": [ { "delta": { @@ -1499,7 +1499,7 @@ "logprobs": null } ], - "created": 1759368386, + "created": 1759427021, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, diff --git a/tests/integration/recordings/responses/f55d47f584e9.json b/tests/integration/recordings/responses/8b531e81126a.json similarity index 94% rename from tests/integration/recordings/responses/f55d47f584e9.json rename to tests/integration/recordings/responses/8b531e81126a.json index 66c8c0103..a72fde06c 100644 --- a/tests/integration/recordings/responses/f55d47f584e9.json +++ b/tests/integration/recordings/responses/8b531e81126a.json @@ -15,7 +15,7 @@ "content": "Call get_boiling_point tool and answer What is the boiling point of polyjuice?" } ], - "max_tokens": 0, + "max_tokens": 512, "stream": true, "temperature": 0.0001, "tool_choice": "auto", @@ -55,7 +55,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-159", + "id": "chatcmpl-101", "choices": [ { "delta": { @@ -66,7 +66,7 @@ "tool_calls": [ { "index": 0, - "id": "call_9c0j8toc", + "id": "call_8rf1aax7", "function": { "arguments": "{\"celcius\":null,\"liquid_name\":\"polyjuice\"}", "name": "get_boiling_point" @@ -80,7 +80,7 @@ "logprobs": null } ], - "created": 1759368388, + "created": 1759427029, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -91,7 +91,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-159", + "id": "chatcmpl-101", "choices": [ { "delta": { @@ -106,7 +106,7 @@ "logprobs": null } ], - "created": 1759368388, + "created": 1759427029, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, diff --git a/tests/integration/recordings/responses/aeb1abed5560.json b/tests/integration/recordings/responses/aeb1abed5560.json new file mode 100644 index 000000000..1b32994fe --- /dev/null +++ b/tests/integration/recordings/responses/aeb1abed5560.json @@ -0,0 +1,4137 @@ +{ + "request": { + "method": "POST", + "url": "https://api.fireworks.ai/inference/v1/v1/embeddings", + "headers": {}, + "body": { + "model": "accounts/fireworks/models/qwen3-embedding-8b", + "input": [ + "This is a test file 1" + ], + "encoding_format": "float" + }, + "endpoint": "/v1/embeddings", + "model": "accounts/fireworks/models/qwen3-embedding-8b" + }, + "response": { + "body": { + "__type__": "openai.types.create_embedding_response.CreateEmbeddingResponse", + "__data__": { + "data": [ + { + "embedding": [ + 2.140625, + 0.038330078125, + -1.875, + -2.1875, + 1.765625, + 0.08203125, + 0.60546875, + 3.828125, + -3.96875, + 0.58203125, + -3.546875, + 0.52734375, + 8.25, + -1.9296875, + 3.296875, + -1.7890625, + 2.765625, + 1.2734375, + 1.3046875, + 1.5, + -2.734375, + 4.9375, + 2.71875, + -0.83984375, + 4.125, + 1.5625, + -2.171875, + -2.0625, + -0.90234375, + 0.427734375, + -1.9140625, + 2.625, + 1.6640625, + 6.125, + -3.375, + 2.390625, + -2.65625, + -1.375, + -0.7890625, + 0.88671875, + -0.66015625, + -0.9296875, + 0.26953125, + 0.93359375, + -3.015625, + 0.88671875, + -1.40625, + -1.921875, + 0.1083984375, + -0.0927734375, + 0.283203125, + -1.65625, + -0.5625, + 1.21875, + 0.21484375, + 2.515625, + -0.984375, + -1.40625, + -4.21875, + -1.4453125, + -5.3125, + -1.3515625, + 2.265625, + -1.2109375, + 0.48828125, + 2.625, + 1.171875, + 1.1328125, + 1.25, + -2.21875, + 0.42578125, + 2.515625, + 0.83984375, + 0.71875, + 1.015625, + -1.3671875, + -3.6875, + -0.7421875, + 0.341796875, + 4.71875, + -0.09521484375, + -4.59375, + 1.1328125, + -2.21875, + -0.9375, + -1.5078125, + -0.8046875, + -1.171875, + 0.91015625, + -0.55078125, + 3.34375, + -0.734375, + -0.384765625, + 1.6796875, + 0.1015625, + 1.875, + -4.875, + -2.0, + -2.203125, + 3.15625, + 2.09375, + 2.421875, + 1.203125, + -0.52734375, + -0.1259765625, + 1.2734375, + -0.7109375, + -0.703125, + -0.482421875, + -0.7734375, + 2.90625, + 1.96875, + 2.1875, + 1.9921875, + -0.4296875, + -0.78515625, + 0.2294921875, + 2.046875, + 1.0078125, + 2.015625, + 2.4375, + -1.3828125, + -4.875, + -0.63671875, + 3.953125, + -2.09375, + 0.8515625, + -0.421875, + -0.103515625, + -2.953125, + 2.859375, + -3.890625, + 2.8125, + -3.453125, + -1.390625, + -0.63671875, + -3.59375, + 1.1484375, + 0.78125, + -2.578125, + -1.734375, + 1.90625, + 1.84375, + -0.1552734375, + 1.109375, + 3.796875, + -2.765625, + 0.60546875, + 0.380859375, + 5.34375, + 0.09619140625, + -0.77734375, + 1.65625, + -2.84375, + -1.4921875, + -1.5, + -2.78125, + 2.390625, + 2.8125, + 0.326171875, + -1.8046875, + -1.3125, + 4.59375, + 1.3125, + -0.62109375, + 0.072265625, + 2.0625, + 0.498046875, + 1.46875, + -2.09375, + 2.140625, + 0.126953125, + -2.5625, + 3.234375, + 0.703125, + 1.6953125, + -0.09619140625, + -0.9765625, + 2.453125, + 3.234375, + 1.34375, + 2.25, + -1.796875, + 2.34375, + 1.25, + 1.5859375, + 0.318359375, + -2.40625, + 0.58984375, + 1.1875, + 1.1171875, + -0.10205078125, + -0.0927734375, + 1.0859375, + -3.25, + 0.2890625, + 0.51171875, + -0.2353515625, + 1.15625, + -1.140625, + 7.34375, + -3.515625, + 2.1875, + -0.92578125, + -2.1875, + -1.578125, + -2.515625, + 0.79296875, + 1.3515625, + -5.09375, + 0.50390625, + -2.109375, + 2.6875, + 2.0, + 1.15625, + -2.265625, + 5.75, + -0.62109375, + -1.0546875, + 1.21875, + 0.234375, + 1.375, + 1.4609375, + 1.125, + 0.0159912109375, + -5.46875, + -0.361328125, + -1.546875, + -0.87890625, + -4.34375, + 0.94140625, + -0.89453125, + -0.0634765625, + -1.5546875, + 3.8125, + -2.609375, + -1.28125, + -0.8984375, + 0.212890625, + -1.265625, + -0.07958984375, + -0.1328125, + 1.015625, + 0.4375, + 0.515625, + 1.859375, + -1.4609375, + -0.9140625, + 4.28125, + -1.140625, + -0.9375, + -2.875, + -0.9375, + -0.5078125, + -0.390625, + -1.921875, + -0.74609375, + -1.046875, + -0.056640625, + 2.21875, + 1.4453125, + -2.71875, + 1.65625, + 3.609375, + -2.25, + -0.75, + -2.296875, + -3.703125, + -0.6796875, + -0.345703125, + 0.2255859375, + -3.03125, + -0.40625, + 0.86328125, + 0.7421875, + -0.375, + 2.6875, + -3.015625, + -2.5625, + -0.20703125, + 0.1611328125, + -1.2890625, + 2.390625, + -3.171875, + 1.828125, + -1.5078125, + 0.333984375, + 1.21875, + -1.734375, + 3.703125, + -0.1982421875, + 5.0625, + -0.32421875, + 1.6953125, + -0.74609375, + 1.84375, + -4.875, + -2.34375, + -1.515625, + -1.265625, + -0.7265625, + -0.40625, + -1.2890625, + -0.07861328125, + -0.5, + 0.85546875, + -2.921875, + 2.28125, + 0.61328125, + -1.296875, + -2.40625, + -1.65625, + 0.2021484375, + -2.0, + -1.921875, + 1.125, + -0.00787353515625, + -3.8125, + -1.390625, + 2.75, + -0.98828125, + -0.055908203125, + -1.2890625, + -0.60546875, + -0.0927734375, + 0.80078125, + 0.55859375, + -0.73046875, + -3.640625, + -0.9140625, + 1.96875, + 1.3046875, + -3.046875, + 1.953125, + -1.34375, + -0.10205078125, + 2.109375, + -2.53125, + 4.0625, + 0.1845703125, + -0.1611328125, + -1.8359375, + -0.0240478515625, + 0.427734375, + 0.291015625, + 0.431640625, + 3.984375, + -2.125, + -0.69921875, + -0.0400390625, + -0.51953125, + 2.15625, + 0.3671875, + -0.75390625, + 1.1484375, + 0.494140625, + -2.15625, + 2.796875, + 2.3125, + 3.734375, + -0.4921875, + 1.6796875, + 3.1875, + -2.375, + -2.078125, + 4.1875, + 1.765625, + -1.296875, + -3.703125, + -0.259765625, + -1.3671875, + 0.6328125, + -0.97265625, + 0.12353515625, + -1.8984375, + -1.2265625, + -0.77734375, + -1.125, + 1.921875, + -2.734375, + 0.10791015625, + -11.0, + 1.2265625, + -2.546875, + -1.0625, + 1.7265625, + -2.640625, + -0.19140625, + 0.388671875, + -0.1689453125, + -3.8125, + -0.73046875, + -0.55859375, + -0.921875, + -0.33203125, + 0.6875, + 0.18359375, + -2.015625, + -2.796875, + 1.015625, + -0.74609375, + -1.390625, + 0.92578125, + 1.71875, + -0.2138671875, + -1.109375, + 0.8671875, + -0.34375, + -2.015625, + 0.39453125, + -2.140625, + -0.177734375, + -0.23828125, + -0.7890625, + 2.859375, + 1.890625, + 3.65625, + 2.8125, + -0.0400390625, + -0.2197265625, + -0.09228515625, + 2.53125, + -1.75, + -1.6171875, + -3.15625, + 1.4765625, + 2.140625, + 1.234375, + 4.28125, + -1.9453125, + -0.08984375, + -0.828125, + -1.796875, + 0.72265625, + 0.2392578125, + -2.3125, + 2.265625, + 2.046875, + 1.1171875, + -1.734375, + 3.296875, + 3.625, + 0.8984375, + 3.296875, + 1.5859375, + 2.25, + -2.5, + 1.5234375, + 0.423828125, + 0.28125, + -5.0625, + 1.6875, + -0.75, + 1.5078125, + -1.7734375, + -1.359375, + -1.9765625, + -2.40625, + 0.859375, + 1.3125, + -1.2734375, + -1.953125, + 1.2734375, + -0.10302734375, + 2.671875, + -0.34375, + 0.62109375, + -1.4375, + -5.53125, + 1.3828125, + -0.035888671875, + 2.15625, + -3.21875, + -2.75, + -0.427734375, + 5.3125, + -0.9296875, + -1.6328125, + -3.25, + -3.4375, + 0.345703125, + -5.8125, + -1.7734375, + -13.75, + 2.5625, + 6.1875, + 1.78125, + 0.0, + 1.5546875, + -0.234375, + -2.578125, + -0.373046875, + 3.9375, + 0.0177001953125, + 2.578125, + -2.40625, + 2.265625, + -0.56640625, + 1.5546875, + 2.5625, + 0.7734375, + -1.9765625, + -1.3515625, + -0.89453125, + -1.6171875, + 1.71875, + 4.625, + 0.98828125, + -0.76953125, + -1.515625, + 2.234375, + -3.5625, + -0.609375, + 0.80859375, + 0.21484375, + -2.203125, + 0.984375, + 0.138671875, + 0.61328125, + 1.8125, + 0.1630859375, + -0.46484375, + -4.4375, + -0.27734375, + 0.7421875, + 2.0, + -0.7421875, + -0.01129150390625, + 2.828125, + -1.6796875, + -1.59375, + -0.357421875, + 2.875, + -1.0859375, + -1.15625, + -1.0859375, + 0.42578125, + -1.0703125, + 2.890625, + -1.296875, + -1.40625, + 2.640625, + -2.34375, + -1.375, + -0.1865234375, + 2.734375, + 1.0234375, + 0.326171875, + -1.1875, + 4.375, + 2.078125, + 3.328125, + 6.375, + -0.53515625, + 2.15625, + 0.50390625, + -2.984375, + -0.482421875, + -1.5390625, + 1.0703125, + 0.31640625, + 2.65625, + -1.5, + 0.271484375, + -3.453125, + -4.5, + -1.734375, + 0.486328125, + -0.10205078125, + 2.953125, + 0.69140625, + -0.09033203125, + -1.4609375, + 1.5, + 0.306640625, + 1.65625, + 0.41015625, + 2.21875, + -0.41796875, + -1.6171875, + 2.671875, + -0.2490234375, + -0.33984375, + -1.0546875, + 0.3515625, + -2.421875, + 2.53125, + -3.640625, + 2.359375, + -1.1328125, + -0.376953125, + 0.2001953125, + 0.65234375, + -3.53125, + -2.5, + 1.859375, + -2.5, + -6.5625, + 0.3046875, + 1.4140625, + 0.74609375, + -2.6875, + 1.828125, + 1.6015625, + 1.0, + 2.015625, + 0.169921875, + -2.9375, + 2.28125, + -5.21875, + -2.453125, + 2.234375, + -2.0, + -0.302734375, + -3.421875, + -0.25390625, + -1.25, + 0.09716796875, + 0.462890625, + 5.53125, + -3.28125, + -2.28125, + 1.2265625, + 0.42578125, + -2.640625, + 2.109375, + 0.275390625, + 2.078125, + -2.171875, + -0.1162109375, + -0.8125, + 1.359375, + 3.625, + -1.0625, + -0.77734375, + 1.6484375, + 1.7265625, + 1.1484375, + -1.8046875, + 2.765625, + 2.671875, + 1.375, + -0.046142578125, + 1.40625, + 1.6875, + 2.515625, + 2.46875, + 1.75, + -2.515625, + 3.421875, + 0.10546875, + -1.0390625, + 2.6875, + 0.10986328125, + 0.99609375, + -1.5859375, + 0.98828125, + -1.390625, + -0.94140625, + -0.34375, + -1.1328125, + -0.267578125, + 0.6484375, + -3.8125, + -0.42578125, + -0.1865234375, + 0.2470703125, + 0.53515625, + -1.1640625, + -5.46875, + -1.96875, + 2.390625, + -0.1201171875, + 2.21875, + 4.34375, + -2.078125, + 3.5625, + 1.3125, + 0.6953125, + -2.125, + -1.5078125, + -2.28125, + -6.03125, + -0.5703125, + 1.09375, + -4.03125, + -2.734375, + 2.53125, + 1.2890625, + 1.3671875, + 0.64453125, + -1.078125, + -0.8046875, + -1.609375, + 1.1640625, + -0.1181640625, + 0.51953125, + -0.640625, + -1.546875, + 2.4375, + 1.34375, + 1.3984375, + 0.328125, + -2.515625, + 0.8828125, + -2.1875, + -3.390625, + 2.921875, + -3.40625, + -1.671875, + -3.203125, + 0.7265625, + -2.078125, + 0.1875, + 0.408203125, + -1.9453125, + 1.7265625, + 0.515625, + -1.2578125, + -3.109375, + 5.1875, + -0.7734375, + -2.265625, + 1.1796875, + -1.0625, + -5.25, + 4.6875, + 3.15625, + 2.6875, + -1.5625, + -4.15625, + 1.6640625, + -1.7421875, + 2.09375, + -3.609375, + 1.8515625, + 1.40625, + 0.5859375, + -0.03955078125, + -0.296875, + -1.1875, + 0.22265625, + -0.74609375, + 1.0703125, + -1.7578125, + 0.41796875, + -1.2734375, + -0.396484375, + 2.421875, + 0.453125, + 2.046875, + -0.65234375, + -0.640625, + -0.76171875, + 0.83984375, + 2.140625, + -1.578125, + -2.046875, + 3.984375, + 1.3671875, + -2.96875, + 0.146484375, + -0.47265625, + -0.00408935546875, + 1.609375, + -0.265625, + 2.953125, + 1.0546875, + 3.09375, + -2.921875, + 0.21484375, + -0.341796875, + 1.7109375, + 1.7578125, + -0.5390625, + -1.828125, + -1.171875, + 1.453125, + 0.359375, + -0.326171875, + 1.6953125, + -0.2431640625, + 2.5625, + 2.234375, + -1.984375, + -1.125, + -2.984375, + 0.41015625, + 0.60546875, + 3.421875, + -2.328125, + 0.75, + 2.953125, + 1.890625, + 0.703125, + 0.0203857421875, + 2.4375, + -0.408203125, + -0.96875, + 2.5625, + -2.8125, + 1.7890625, + -0.6953125, + -1.1953125, + 0.9296875, + -0.1982421875, + -1.7890625, + 0.1123046875, + -0.007080078125, + -0.10009765625, + -0.81640625, + -0.50390625, + 0.55859375, + 2.046875, + 1.875, + 0.7890625, + -0.73046875, + -1.453125, + -0.65234375, + -1.6015625, + 0.455078125, + 1.9375, + 3.96875, + 2.796875, + -1.71875, + 0.6796875, + 0.75, + 5.03125, + 1.1875, + -0.74609375, + 0.37890625, + -0.326171875, + -1.7890625, + 0.7734375, + -0.97265625, + 0.052978515625, + 0.83203125, + -2.40625, + -0.39453125, + 0.388671875, + -0.54296875, + -2.359375, + -2.359375, + -2.5625, + 0.68359375, + 1.4921875, + -1.7578125, + -1.1796875, + -0.1728515625, + 2.171875, + 1.8359375, + -3.359375, + 0.73046875, + -0.1064453125, + -2.8125, + 0.515625, + 2.140625, + -0.625, + -0.73828125, + 3.578125, + 1.109375, + 0.1259765625, + 1.6875, + -1.46875, + 1.5390625, + -0.875, + 1.65625, + -1.2421875, + -2.828125, + -4.0625, + -1.2265625, + 1.3671875, + -1.484375, + -1.296875, + 0.42578125, + -0.9453125, + -3.390625, + 0.84765625, + 0.5703125, + -3.5625, + -1.53125, + 0.006988525390625, + -1.421875, + -1.421875, + 2.078125, + 2.3125, + -0.5390625, + 2.515625, + 1.671875, + -0.0634765625, + 2.1875, + -1.875, + -1.1640625, + 0.26953125, + -1.515625, + -0.341796875, + -0.4921875, + 0.66796875, + 0.99609375, + 0.0242919921875, + 0.10107421875, + 1.15625, + -0.65234375, + -0.4296875, + 1.1953125, + 2.703125, + -0.8671875, + -3.234375, + -5.1875, + 0.56640625, + 2.40625, + -2.3125, + -0.734375, + -1.328125, + -0.75, + 1.3828125, + 1.625, + -0.140625, + 3.09375, + -0.8046875, + 1.453125, + 1.6640625, + 2.84375, + 0.625, + 2.234375, + 0.91015625, + -0.1796875, + 2.21875, + 0.93359375, + -1.828125, + -6.1875, + -0.671875, + 0.296875, + -0.05224609375, + -1.84375, + 0.2041015625, + 1.90625, + -1.0703125, + 3.046875, + -0.376953125, + -0.9453125, + -3.078125, + 4.375, + 0.83203125, + 0.71875, + -1.421875, + -1.4765625, + -3.6875, + 0.66796875, + 0.67578125, + 0.90234375, + -3.515625, + -1.890625, + 3.328125, + 2.09375, + -1.140625, + 2.140625, + -0.5234375, + -0.625, + -0.9765625, + -5.5625, + -0.8984375, + 1.5859375, + -2.90625, + 3.265625, + -4.5625, + -1.296875, + -0.92578125, + -3.140625, + -1.3046875, + 0.3203125, + -0.70703125, + -2.765625, + -0.9375, + -0.419921875, + -0.7265625, + 5.375, + -2.71875, + -0.2451171875, + -0.84765625, + -1.3984375, + -0.54296875, + 1.234375, + 1.9375, + 0.9921875, + 3.4375, + 0.08056640625, + -2.28125, + -0.90234375, + 8.25, + 2.3125, + -2.421875, + -0.51171875, + 1.0546875, + 5.34375, + -2.015625, + 0.546875, + -2.6875, + 2.1875, + 3.671875, + 1.3046875, + 2.953125, + -0.796875, + -0.9609375, + 1.1953125, + -1.171875, + -1.390625, + -0.5390625, + 0.490234375, + -3.671875, + 0.12451171875, + 3.125, + 0.8671875, + 2.40625, + -1.015625, + -2.90625, + -1.3984375, + 1.46875, + -2.125, + 2.171875, + -1.46875, + -1.5625, + -0.58984375, + -1.5234375, + 1.2265625, + -0.09326171875, + 0.8046875, + -1.53125, + 3.34375, + -1.6484375, + -3.15625, + 3.296875, + -3.265625, + -1.375, + 3.28125, + 2.1875, + -1.9609375, + -1.46875, + 2.578125, + 0.45703125, + -2.921875, + 0.7734375, + 0.26953125, + -2.65625, + -0.10302734375, + 2.125, + 0.042236328125, + 1.21875, + -0.03564453125, + 0.0810546875, + 2.203125, + 2.109375, + -1.8984375, + 1.078125, + 0.035400390625, + -0.70703125, + -4.125, + 3.8125, + 0.271484375, + -2.265625, + 1.265625, + -1.1484375, + 3.5625, + -3.21875, + -3.203125, + -0.98828125, + -0.306640625, + -6.59375, + -1.78125, + 1.625, + 2.34375, + -1.6953125, + -1.421875, + -2.875, + -1.8984375, + -0.890625, + -2.4375, + 2.5625, + -3.609375, + -2.9375, + -0.5703125, + -1.421875, + 0.1962890625, + 2.46875, + 1.9453125, + 5.65625, + 1.421875, + -0.037353515625, + 1.078125, + 1.4921875, + 0.12255859375, + -2.859375, + -2.484375, + 1.8203125, + -0.3828125, + 2.171875, + 1.6171875, + -1.1875, + 3.328125, + -0.77734375, + 2.15625, + -1.6875, + 0.34375, + -1.78125, + -2.890625, + 1.15625, + 1.5625, + 0.2451171875, + 2.078125, + -1.7421875, + 0.11376953125, + 1.703125, + 0.3359375, + -0.98828125, + 2.96875, + 2.515625, + 3.46875, + -1.6015625, + 0.328125, + 0.515625, + -4.84375, + 1.1640625, + -1.5625, + -0.78125, + 0.09326171875, + -2.046875, + 1.09375, + 0.73046875, + -0.57421875, + -3.640625, + -0.04443359375, + 1.9765625, + -4.65625, + 4.59375, + 6.3125, + 0.01531982421875, + 3.140625, + 3.6875, + -1.5859375, + 2.625, + 0.66796875, + 1.1328125, + 0.78125, + -0.466796875, + -5.28125, + -0.08642578125, + -2.390625, + -0.96875, + 1.2890625, + -1.4921875, + 0.75, + -0.2421875, + 1.609375, + -0.3515625, + -1.3671875, + 0.1162109375, + 1.2734375, + -2.453125, + 0.0771484375, + -0.5703125, + -0.97265625, + 0.55078125, + 2.0, + 0.5703125, + -0.478515625, + 1.4453125, + -1.46875, + 2.25, + 0.77734375, + -2.46875, + 1.4609375, + -1.0703125, + -2.890625, + 1.2109375, + 2.015625, + -0.06494140625, + -1.109375, + -0.79296875, + -2.859375, + 4.65625, + -0.103515625, + 0.1572265625, + -1.9921875, + 0.84765625, + -1.1953125, + -2.3125, + -0.181640625, + -2.15625, + 2.90625, + -0.8125, + -2.046875, + 1.5859375, + 1.5390625, + 0.470703125, + 0.322265625, + 3.03125, + 2.796875, + -2.359375, + 1.75, + 0.78515625, + -0.435546875, + -0.83203125, + -1.28125, + -0.1435546875, + 1.234375, + -1.1328125, + 0.14453125, + -2.453125, + 2.125, + -2.5, + -2.9375, + -1.0859375, + 0.283203125, + -1.71875, + -5.53125, + -1.2890625, + -1.9921875, + -0.71484375, + 1.640625, + 1.34375, + 1.0390625, + -2.109375, + -5.75, + 0.78515625, + 0.47265625, + -4.84375, + 0.279296875, + 5.84375, + 0.365234375, + 1.2578125, + -1.515625, + -4.71875, + 0.1171875, + 1.6015625, + 3.671875, + 0.9296875, + 1.140625, + 1.859375, + -1.703125, + 0.01708984375, + 1.0859375, + -3.90625, + 0.212890625, + 2.703125, + -3.1875, + -3.296875, + 0.71875, + -2.734375, + -2.609375, + -7.53125, + -5.0, + 1.0859375, + -1.3984375, + 0.765625, + -4.25, + 0.349609375, + -2.796875, + 3.15625, + 2.828125, + -2.484375, + 0.91796875, + -1.984375, + -0.408203125, + -0.0205078125, + 0.203125, + 3.28125, + -1.828125, + -1.578125, + -0.7421875, + -2.109375, + 3.375, + 0.9609375, + -0.984375, + -0.546875, + -0.25390625, + 1.7265625, + 1.0625, + 1.796875, + -1.96875, + -1.3046875, + 1.2890625, + -1.7421875, + -0.80859375, + 0.734375, + -0.703125, + -1.9453125, + 4.875, + -1.1875, + -0.09765625, + 2.109375, + 0.8046875, + -1.046875, + 2.90625, + -0.423828125, + 4.8125, + -2.625, + -0.6953125, + 1.3515625, + -0.2265625, + 0.99609375, + 2.390625, + 1.09375, + -2.921875, + 3.453125, + -0.5546875, + 1.5, + -1.125, + 0.953125, + -1.7109375, + -0.259765625, + -1.234375, + 3.734375, + 0.9453125, + -0.8046875, + -1.5546875, + -2.453125, + -0.76171875, + 2.71875, + 1.234375, + 1.6875, + -0.828125, + 1.34375, + 1.1953125, + 1.609375, + -3.8125, + -0.625, + 0.90234375, + 1.078125, + 2.046875, + -0.294921875, + -0.41796875, + 8.0625, + -3.875, + 2.78125, + -0.365234375, + -2.15625, + -0.62109375, + 1.703125, + -0.55078125, + 3.359375, + 2.078125, + 2.8125, + -0.95703125, + 0.79296875, + -3.328125, + 4.28125, + 3.09375, + 1.6640625, + -2.65625, + -0.80859375, + -5.75, + 0.341796875, + -2.09375, + -3.640625, + 0.27734375, + 1.375, + 2.953125, + 1.5390625, + -1.2421875, + -0.5234375, + -0.462890625, + 1.484375, + 2.015625, + 0.294921875, + 2.09375, + -0.59765625, + -1.4296875, + 2.390625, + -1.8046875, + 1.5390625, + 1.9921875, + -0.61328125, + 1.265625, + -2.09375, + 0.2236328125, + -1.359375, + -1.3828125, + 1.5859375, + 1.59375, + 3.28125, + -0.1318359375, + 1.6953125, + 1.34375, + -1.28125, + -1.53125, + -0.06689453125, + -2.421875, + 1.9453125, + -2.765625, + 1.734375, + 4.1875, + -1.0234375, + -0.91796875, + 0.7578125, + -0.049560546875, + -2.40625, + 1.640625, + -2.546875, + 1.984375, + -0.447265625, + 0.5, + -0.265625, + 1.1015625, + 1.390625, + -0.07958984375, + -0.91015625, + 3.3125, + 0.734375, + 0.310546875, + -3.6875, + -2.296875, + 0.474609375, + 0.34765625, + 0.169921875, + 1.2578125, + -1.96875, + -1.046875, + -3.578125, + 1.96875, + -0.9375, + -1.390625, + 2.265625, + 0.890625, + -0.0634765625, + 2.125, + -1.7890625, + 2.453125, + -3.25, + 0.74609375, + 0.1474609375, + -0.796875, + -1.2109375, + 0.6328125, + -0.490234375, + -0.07861328125, + 0.8046875, + 1.09375, + 1.5703125, + -3.71875, + -1.546875, + -1.890625, + -0.921875, + 3.765625, + 9.375, + 0.29296875, + -2.40625, + 1.796875, + 0.9453125, + 0.330078125, + -2.15625, + -5.46875, + -1.0, + -3.828125, + -1.4296875, + -2.296875, + 0.64453125, + -0.296875, + -0.6640625, + 0.09375, + -1.1015625, + 0.54296875, + -1.359375, + 0.47265625, + -1.1171875, + 0.30859375, + -0.138671875, + 0.408203125, + -0.88671875, + -1.8046875, + -0.443359375, + 1.6484375, + 0.59375, + 0.89453125, + -1.8984375, + 1.8828125, + -0.35546875, + -0.7734375, + -2.84375, + -1.7734375, + -2.03125, + -1.046875, + 8.3125, + 2.359375, + 1.8203125, + 0.0, + 0.78515625, + 2.875, + -2.546875, + 1.8515625, + 1.2578125, + -1.4375, + 0.90625, + 1.2421875, + -1.6015625, + 0.94140625, + -3.3125, + -1.9296875, + 2.109375, + 0.578125, + 2.078125, + 0.921875, + 1.046875, + 1.0859375, + 0.189453125, + 4.59375, + 1.96875, + 0.40625, + -1.15625, + 2.71875, + 1.140625, + -0.279296875, + 2.6875, + 1.2109375, + 0.423828125, + 0.9296875, + -2.125, + 1.7421875, + 1.0546875, + 1.3515625, + 1.28125, + 0.486328125, + 0.41015625, + 0.5234375, + -4.5625, + 0.369140625, + 2.21875, + 0.51171875, + 3.375, + 2.078125, + 1.2421875, + -2.328125, + 0.42578125, + -0.796875, + 1.6328125, + 1.484375, + 0.8125, + 1.65625, + -3.015625, + -1.8046875, + 1.171875, + 3.546875, + 2.421875, + -3.0, + -2.40625, + 1.484375, + 1.5, + 1.5859375, + 0.2080078125, + -0.51953125, + -2.875, + 1.125, + -0.6640625, + 1.1640625, + 0.248046875, + 0.9375, + 2.625, + 0.83984375, + -0.78125, + -1.515625, + -1.5546875, + 1.9609375, + 0.07666015625, + -1.046875, + 1.203125, + 1.375, + -3.375, + -0.8203125, + 1.8828125, + -0.29296875, + -1.734375, + -2.796875, + 0.263671875, + -2.0625, + -0.9375, + 1.828125, + 3.171875, + -0.318359375, + -1.28125, + 0.119140625, + 3.03125, + -1.578125, + -1.1796875, + -0.59765625, + -0.27734375, + 0.7265625, + 1.4765625, + -1.921875, + -2.8125, + 1.84375, + 0.83203125, + -0.00799560546875, + -1.0078125, + 0.369140625, + -1.484375, + 3.078125, + 2.046875, + -3.953125, + -1.5234375, + -3.015625, + -0.416015625, + 0.419921875, + -0.9140625, + -0.09765625, + 0.71484375, + 0.71875, + -3.265625, + 1.6484375, + 0.220703125, + 1.359375, + -0.2109375, + 0.7890625, + -2.421875, + -3.828125, + 1.9765625, + -0.2470703125, + 3.5, + 0.36328125, + -2.28125, + 0.6328125, + 2.75, + -1.1640625, + -1.796875, + -1.8515625, + -0.8125, + 0.14453125, + -0.392578125, + 1.7421875, + -0.59375, + 1.546875, + -1.671875, + 0.1875, + 0.10986328125, + -2.015625, + -0.259765625, + 3.046875, + 2.53125, + -0.345703125, + 0.51171875, + 4.28125, + 1.5546875, + 1.390625, + -0.263671875, + 2.515625, + 1.015625, + -2.53125, + -3.578125, + 2.515625, + -0.267578125, + 0.302734375, + -1.765625, + 0.5, + 1.8046875, + 0.294921875, + -2.640625, + 0.71875, + 3.5625, + 0.32421875, + 2.203125, + -1.5390625, + -0.32421875, + 0.9609375, + -1.0703125, + 1.2578125, + -0.7734375, + -2.15625, + 0.04248046875, + -3.46875, + -0.294921875, + -0.88671875, + 0.01177978515625, + -2.34375, + -2.03125, + -1.0703125, + 0.5546875, + 0.1318359375, + -0.59375, + 1.0703125, + -2.90625, + 1.0625, + 0.94140625, + -2.984375, + 1.1953125, + -0.2021484375, + 1.5390625, + -1.3203125, + 0.63671875, + -1.359375, + 0.455078125, + 0.90625, + 1.96875, + 0.79296875, + 1.109375, + -3.46875, + 2.046875, + -1.5, + -0.38671875, + -0.3359375, + -0.5234375, + -1.109375, + 0.76953125, + -3.609375, + -3.234375, + -2.421875, + 0.12255859375, + -2.578125, + 0.94921875, + 1.1328125, + 0.828125, + -0.68359375, + -0.828125, + 2.25, + 2.453125, + 0.49609375, + -1.3125, + 0.4765625, + 1.7109375, + -3.625, + 7.34375, + 1.6171875, + -1.6796875, + 1.859375, + 1.1328125, + 2.015625, + -1.5, + -3.25, + 5.65625, + 1.046875, + 0.376953125, + -0.25390625, + 2.390625, + 3.265625, + -0.2431640625, + 0.1943359375, + -0.1416015625, + -3.5, + -0.251953125, + -3.15625, + -1.125, + -1.375, + -0.0693359375, + -1.9453125, + 1.9921875, + 2.265625, + 0.71875, + -0.97265625, + 1.0703125, + 0.193359375, + -0.482421875, + -0.333984375, + -0.75390625, + 2.640625, + -2.59375, + -1.71875, + -1.53125, + -0.00104522705078125, + 0.6328125, + -3.875, + -0.97265625, + 0.197265625, + -2.4375, + -4.875, + 0.68359375, + -3.96875, + 0.271484375, + -0.90625, + 2.59375, + -0.5, + 6.6875, + 1.2109375, + 0.08251953125, + 2.890625, + -1.8984375, + 2.984375, + -0.9921875, + -2.421875, + 0.359375, + 0.5859375, + 2.359375, + -0.5859375, + 0.04052734375, + -4.34375, + -1.40625, + 1.5390625, + -6.53125, + -0.9921875, + 0.21484375, + 1.859375, + 3.96875, + -4.28125, + -0.447265625, + 2.140625, + 0.0079345703125, + 0.146484375, + -0.5234375, + -3.6875, + -4.09375, + 4.34375, + 3.34375, + -3.984375, + -3.453125, + -1.0078125, + 2.90625, + -2.265625, + 0.37890625, + -2.421875, + -0.1884765625, + 0.251953125, + -1.90625, + -2.1875, + 1.2265625, + -1.3984375, + -1.15625, + -2.359375, + -2.3125, + -0.08056640625, + 1.0703125, + 0.98828125, + 3.125, + -1.0859375, + -0.99609375, + -1.96875, + -1.9453125, + 1.03125, + 1.5078125, + -0.9453125, + -2.734375, + 1.453125, + -5.9375, + -2.125, + -0.44921875, + 0.96875, + -3.109375, + 2.125, + 3.421875, + 0.94921875, + 2.609375, + -1.5546875, + -0.73046875, + -0.00021839141845703125, + -1.8984375, + 3.671875, + -0.146484375, + 1.2109375, + 2.3125, + -1.6015625, + 0.455078125, + 1.7109375, + -0.8125, + 1.2421875, + -0.7890625, + 1.5625, + 1.328125, + 3.28125, + 2.84375, + 0.1787109375, + 0.44921875, + 0.1796875, + 3.0, + -3.078125, + -2.03125, + 1.7421875, + -1.0703125, + 0.1181640625, + -1.1015625, + 0.79296875, + -1.8046875, + 0.0791015625, + -0.275390625, + 0.8984375, + 1.21875, + -1.25, + -0.34375, + -1.1875, + 2.765625, + 1.0859375, + -1.90625, + 0.96875, + -2.921875, + -1.4609375, + -2.265625, + 1.3046875, + -0.404296875, + -1.0703125, + 3.375, + 0.53515625, + 0.173828125, + 0.35546875, + -2.203125, + 1.9765625, + 17.0, + -2.765625, + 1.6953125, + 1.8671875, + 0.08154296875, + -0.41796875, + 1.734375, + -4.78125, + -2.34375, + 0.240234375, + 2.171875, + 2.15625, + 1.296875, + 2.25, + -1.609375, + -2.3125, + -2.09375, + 0.515625, + 1.0546875, + -1.3203125, + 1.609375, + 0.98828125, + -0.298828125, + -2.40625, + 0.57421875, + -1.484375, + -2.078125, + -1.5078125, + 2.265625, + -1.0078125, + 3.6875, + -6.625, + -3.921875, + 2.5, + 1.0703125, + 3.453125, + -0.034423828125, + 3.09375, + 0.63671875, + 5.21875, + -3.140625, + -1.3671875, + 1.8515625, + 0.359375, + -6.59375, + 1.453125, + -0.5078125, + -1.484375, + -0.0322265625, + -1.5, + 0.54296875, + -0.703125, + 1.859375, + 2.5625, + -3.265625, + 0.060791015625, + 3.453125, + 4.09375, + 0.984375, + -0.4609375, + -0.396484375, + -0.52734375, + -0.8203125, + 0.486328125, + 0.80078125, + 3.796875, + -0.75390625, + -1.1953125, + 1.765625, + 2.296875, + -1.8671875, + -0.765625, + 0.333984375, + 0.42578125, + -0.33203125, + -2.515625, + -1.078125, + -0.212890625, + -1.0703125, + -0.353515625, + 0.5703125, + 0.447265625, + 0.08642578125, + 0.373046875, + 0.85546875, + 0.92578125, + 2.5, + 0.361328125, + 0.82421875, + 0.4375, + 0.66015625, + 0.73828125, + -1.4140625, + -0.97265625, + 1.890625, + -1.296875, + -0.62890625, + -0.2275390625, + -1.5390625, + -1.2421875, + -7.8125, + 2.09375, + -0.72265625, + 2.234375, + 0.50390625, + 1.890625, + -0.64453125, + -4.125, + 0.82421875, + -0.337890625, + -0.40234375, + 1.265625, + -0.75390625, + 0.2265625, + 0.62890625, + -1.2890625, + -0.67578125, + -0.353515625, + 3.484375, + -1.2578125, + 1.40625, + 3.0625, + -1.4296875, + 2.625, + 0.1875, + -1.4453125, + -0.77734375, + -1.4296875, + -0.220703125, + 1.515625, + 0.84765625, + -2.9375, + 2.953125, + -1.703125, + 0.890625, + 1.15625, + 2.5625, + 2.984375, + 2.578125, + -0.053955078125, + -2.171875, + 2.1875, + -4.09375, + 2.84375, + 1.171875, + 1.8671875, + -0.95703125, + 1.0859375, + 1.921875, + -1.1796875, + -0.90234375, + 1.46875, + 1.421875, + 2.1875, + -1.796875, + 1.671875, + -1.0, + -2.546875, + 1.109375, + 0.453125, + -0.310546875, + -0.291015625, + 0.96484375, + -0.546875, + 1.6875, + -1.359375, + -0.453125, + -1.234375, + 2.546875, + 1.546875, + -3.0625, + 3.078125, + -1.625, + 1.0078125, + -1.7890625, + 0.337890625, + -0.1962890625, + -0.345703125, + 0.9296875, + -0.69140625, + 1.7109375, + -1.8046875, + 1.359375, + 2.640625, + -0.1279296875, + 0.455078125, + 0.365234375, + -2.578125, + 3.09375, + 1.3515625, + -0.216796875, + -1.5546875, + -1.0390625, + 0.5546875, + -1.5390625, + -0.28515625, + 0.7890625, + 0.6484375, + 0.61328125, + 0.88671875, + -1.078125, + 1.3671875, + -1.3515625, + 2.921875, + 1.4140625, + -2.703125, + 0.60546875, + 2.109375, + -3.6875, + 1.84375, + -1.1875, + -2.46875, + -0.205078125, + 3.453125, + 0.9375, + 1.640625, + 1.4453125, + -1.390625, + -1.1171875, + -1.796875, + -1.0234375, + -0.244140625, + 0.53125, + 1.96875, + 0.1396484375, + 0.91796875, + 0.73046875, + -0.7890625, + -2.84375, + -1.5078125, + 3.53125, + -0.296875, + -0.11181640625, + -0.87109375, + -1.8984375, + -2.84375, + 1.0234375, + 4.25, + -1.1015625, + 0.84765625, + -2.328125, + 0.65625, + 1.5, + -1.4140625, + 0.58203125, + 0.75, + 1.9921875, + -0.49609375, + -0.5234375, + -1.25, + -0.6640625, + 0.045654296875, + -0.58984375, + -0.65625, + -2.8125, + 2.890625, + -2.71875, + -0.34765625, + -1.7109375, + 0.765625, + 5.03125, + 0.1767578125, + 1.5546875, + -1.125, + -2.25, + 0.5625, + 1.3203125, + 0.6796875, + -4.1875, + 5.3125, + -1.3671875, + 0.796875, + -0.1279296875, + 4.0, + 0.115234375, + 5.65625, + 0.78515625, + -0.369140625, + 1.1875, + 4.40625, + -0.0186767578125, + -0.62890625, + -2.125, + 0.38671875, + 0.828125, + -1.9453125, + 0.93359375, + 0.25, + 1.4296875, + -0.66796875, + -0.423828125, + -0.7578125, + -0.62890625, + 1.40625, + 0.66015625, + -0.162109375, + 3.890625, + -4.1875, + -0.0986328125, + -0.609375, + 0.150390625, + -1.4140625, + -0.7109375, + -2.125, + -0.9609375, + -2.25, + 1.5703125, + 0.123046875, + 0.1142578125, + -1.59375, + -1.9296875, + -0.609375, + -2.3125, + -13.5, + 1.0546875, + 1.3046875, + -0.1171875, + 0.1845703125, + 0.12353515625, + 0.60546875, + 1.4453125, + 1.484375, + 0.052734375, + 0.95703125, + 1.484375, + 1.875, + -1.7578125, + 1.375, + -1.859375, + -0.171875, + -0.77734375, + 0.828125, + 3.84375, + -1.671875, + 0.96875, + -1.453125, + 0.5234375, + -1.3828125, + -1.1328125, + 0.002227783203125, + 1.625, + -3.40625, + 0.0198974609375, + -0.65234375, + 0.9140625, + -0.5859375, + 1.6640625, + -4.375, + -1.15625, + -0.10302734375, + -1.0546875, + -1.921875, + 1.2265625, + -2.453125, + -2.140625, + 1.2421875, + -1.6328125, + 1.75, + 0.287109375, + -3.875, + 0.158203125, + 0.384765625, + -0.189453125, + 0.43359375, + 1.1328125, + 3.4375, + 0.06982421875, + -0.66796875, + 1.9921875, + 2.296875, + 1.3984375, + -0.859375, + -1.3125, + 0.045166015625, + 4.03125, + -1.546875, + -0.17578125, + -1.671875, + -2.296875, + -0.1494140625, + 2.109375, + 0.4375, + 2.1875, + -4.5, + -0.130859375, + 2.765625, + -4.5625, + 0.001800537109375, + 0.033203125, + -2.546875, + -3.390625, + -1.6328125, + 2.53125, + 1.484375, + -1.3203125, + -8.5625, + -0.291015625, + -0.064453125, + -1.7578125, + -2.15625, + 1.703125, + -1.0546875, + -2.0, + -0.443359375, + 2.296875, + -0.058837890625, + -1.5078125, + -2.78125, + -1.1328125, + 2.84375, + 1.8828125, + 1.2890625, + 0.28125, + -0.287109375, + -2.09375, + -3.03125, + 0.51171875, + 1.4140625, + -1.75, + -0.375, + -0.236328125, + -2.703125, + 2.03125, + -0.06103515625, + -1.4921875, + -0.41015625, + 1.6015625, + -0.462890625, + -0.6484375, + -1.0390625, + 1.25, + 3.5, + 3.328125, + -0.99609375, + -1.4453125, + 1.2578125, + -1.6484375, + 0.81640625, + 2.34375, + -0.07275390625, + -0.1337890625, + -1.265625, + -1.078125, + -0.12158203125, + -0.06787109375, + -1.109375, + 5.21875, + 0.30859375, + -0.44140625, + 2.875, + -0.91796875, + 1.8125, + -1.5078125, + 2.015625, + -1.1640625, + -1.0390625, + -1.765625, + -0.72265625, + 2.1875, + 1.1953125, + 2.296875, + 0.98046875, + -2.5, + 0.470703125, + 1.1015625, + -0.796875, + -1.796875, + -0.384765625, + 0.89453125, + -2.15625, + 1.46875, + 2.671875, + -0.79296875, + -0.50390625, + -0.00433349609375, + -1.828125, + 0.146484375, + 1.390625, + 0.890625, + 5.40625, + 1.15625, + 2.796875, + -0.52734375, + 1.34375, + 0.228515625, + -0.92578125, + -1.8359375, + 1.4296875, + -2.046875, + -2.75, + -0.1640625, + 2.640625, + 0.1689453125, + 0.423828125, + -2.390625, + -1.75, + -1.875, + 1.0234375, + 5.4375, + 1.4140625, + -0.4609375, + 2.34375, + 2.359375, + 0.373046875, + -0.68359375, + 0.8984375, + 1.7734375, + 3.21875, + -3.03125, + -1.234375, + 1.671875, + 1.9140625, + -1.1015625, + -1.234375, + 0.54296875, + -2.015625, + -0.61328125, + -0.369140625, + 0.279296875, + 1.03125, + 0.08544921875, + -0.69921875, + -0.81640625, + 0.61328125, + 1.0078125, + 3.28125, + 3.0, + 1.6484375, + 0.73046875, + 2.859375, + -3.796875, + -0.40625, + 1.890625, + -0.048095703125, + -0.30859375, + 0.296875, + -0.365234375, + -1.8984375, + 0.453125, + 1.2109375, + -2.015625, + 0.78515625, + 4.40625, + 0.298828125, + 0.84765625, + 1.625, + -1.421875, + 0.58203125, + 1.34375, + 1.421875, + 0.37890625, + -1.671875, + 1.90625, + 1.390625, + -1.453125, + 1.265625, + 0.65234375, + -3.625, + -0.640625, + 0.70703125, + -0.54296875, + 0.921875, + -2.984375, + 1.4375, + 0.92578125, + 0.94921875, + 3.6875, + 1.8828125, + -1.6875, + -0.0184326171875, + 1.71875, + 1.8828125, + 1.2109375, + 0.166015625, + 0.412109375, + -0.4609375, + -3.421875, + 0.625, + 1.671875, + 3.03125, + -1.015625, + -3.5, + 1.484375, + 0.357421875, + -0.1552734375, + 1.515625, + -2.0, + 0.82421875, + 0.087890625, + 0.205078125, + -1.1484375, + 0.74609375, + 2.796875, + 0.83203125, + -1.984375, + -0.37109375, + -2.25, + -0.1611328125, + 2.296875, + -1.4765625, + -1.9609375, + -6.09375, + 2.9375, + 2.578125, + -1.6875, + -0.6328125, + 0.8671875, + 1.2421875, + -2.390625, + -0.88671875, + 2.03125, + -2.71875, + 0.373046875, + 0.55859375, + -0.69140625, + 1.03125, + -1.1171875, + -1.765625, + -1.78125, + 1.328125, + -3.703125, + -1.1953125, + -1.515625, + 1.859375, + -4.9375, + -1.8359375, + 1.328125, + 1.9375, + 0.27734375, + 0.0986328125, + -0.53125, + -1.46875, + -2.78125, + 1.09375, + -2.0625, + 0.02685546875, + -8.375, + -2.046875, + 0.3359375, + 1.0625, + 2.390625, + -1.78125, + 1.9765625, + -1.9375, + -1.6171875, + 0.421875, + -1.6171875, + -0.96484375, + -4.34375, + -1.9453125, + -1.578125, + 0.1376953125, + -0.27734375, + -1.2578125, + -1.3828125, + 2.40625, + 0.56640625, + -1.265625, + 0.51953125, + -2.78125, + -1.6875, + 2.296875, + -3.125, + -0.146484375, + -3.109375, + 0.78125, + 0.41015625, + 1.1875, + 2.03125, + 0.640625, + -0.1708984375, + -1.796875, + 1.734375, + 1.578125, + -0.484375, + -0.69140625, + -4.375, + 3.078125, + 1.234375, + 1.5703125, + 3.984375, + 1.5390625, + 1.0703125, + 1.4140625, + 3.828125, + 0.404296875, + -4.71875, + 7.34375, + -2.078125, + 0.2001953125, + -2.8125, + -1.7109375, + 0.0224609375, + -1.96875, + 4.96875, + -2.8125, + -2.0, + 0.39453125, + 0.99609375, + 2.25, + -1.640625, + 0.76953125, + -0.86328125, + -1.375, + -3.265625, + 2.125, + 1.3203125, + 1.2578125, + 1.8125, + 1.4765625, + 2.9375, + -0.3515625, + 0.396484375, + 1.9453125, + -0.3828125, + -2.0, + 1.359375, + 0.6015625, + 2.875, + -2.359375, + -1.578125, + -1.3671875, + -0.369140625, + 2.578125, + -2.671875, + 0.040771484375, + 2.046875, + 3.65625, + -2.03125, + -0.283203125, + 0.1337890625, + 1.8671875, + -3.09375, + 3.609375, + 2.390625, + -0.416015625, + -0.08837890625, + -1.2734375, + -0.490234375, + 2.484375, + 0.00714111328125, + 0.9296875, + 0.50390625, + 1.328125, + 1.109375, + -1.8984375, + -2.484375, + -0.96875, + 0.6171875, + -1.5546875, + -1.546875, + -0.65625, + 1.7734375, + 1.7890625, + -2.125, + 4.34375, + 2.09375, + 2.078125, + 0.050537109375, + -1.8515625, + 2.1875, + -0.50390625, + 0.97265625, + -5.3125, + -0.05615234375, + 3.234375, + 0.15625, + 1.1953125, + 2.359375, + 2.203125, + -1.484375, + -0.4375, + 2.109375, + -1.859375, + -0.890625, + 1.3828125, + -0.89453125, + 0.5078125, + -1.828125, + -0.421875, + 1.28125, + -0.47265625, + 2.328125, + 1.2265625, + -0.220703125, + -0.71484375, + -0.018798828125, + -0.35546875, + -2.71875, + 1.953125, + 4.34375, + -1.2734375, + 3.328125, + 2.328125, + -0.357421875, + -0.140625, + 1.1875, + -0.54296875, + 0.2451171875, + -0.2060546875, + -0.796875, + -2.21875, + 1.703125, + 0.90234375, + 1.09375, + 0.89453125, + 0.62890625, + -0.2578125, + 2.625, + -1.640625, + -0.9375, + -1.0078125, + -0.4296875, + -0.439453125, + 1.78125, + -1.5390625, + -1.3671875, + 3.875, + -1.515625, + 1.359375, + -3.375, + 1.28125, + 1.671875, + 2.3125, + 5.25, + 1.109375, + 1.5, + 2.328125, + -2.390625, + -0.01318359375, + -1.0546875, + 0.765625, + 1.015625, + -1.8828125, + 2.375, + 0.74609375, + 1.84375, + -5.25, + -0.890625, + -4.90625, + -1.0, + -0.84375, + -2.34375, + 1.0703125, + -0.8671875, + 1.109375, + -0.5703125, + -0.80859375, + 1.171875, + -1.7734375, + -0.201171875, + 0.96484375, + -1.03125, + -0.28515625, + -0.3125, + -1.34375, + 4.625, + 2.53125, + -1.1171875, + -2.03125, + 3.484375, + -0.341796875, + -1.078125, + 1.5859375, + 1.6875, + -1.4140625, + 1.9296875, + 2.875, + 0.115234375, + -2.0, + -0.1962890625, + -1.453125, + 4.0, + -3.171875, + -1.0859375, + -1.7734375, + -0.357421875, + -1.3515625, + -2.453125, + 2.3125, + -3.015625, + -1.203125, + -1.8125, + 4.3125, + 0.373046875, + -2.3125, + -0.93359375, + -5.0, + -0.65625, + -2.375, + 2.5625, + 0.125, + 2.328125, + -1.328125, + 2.25, + -1.8984375, + 0.6015625, + -1.75, + 1.7421875, + -1.5078125, + -1.4140625, + -1.3046875, + -0.384765625, + -4.40625, + -1.234375, + 2.28125, + 1.4296875, + -0.94140625, + -1.5234375, + 2.171875, + -1.3359375, + 0.84765625, + -0.482421875, + -1.78125, + 5.40625, + -2.140625, + -0.1796875, + -0.091796875, + -0.50390625, + -2.359375, + -1.1015625, + 2.125, + 0.97265625, + 2.1875, + 0.6171875, + -0.52734375, + -0.76171875, + 0.390625, + -0.052734375, + -3.0, + 2.796875, + -0.1923828125, + -0.216796875, + 1.1875, + 0.60546875, + -1.9609375, + 2.75, + 0.66796875, + -1.140625, + 0.7109375, + -0.89453125, + 0.61328125, + 0.5078125, + 0.875, + -1.203125, + 1.9609375, + 0.7890625, + 0.46875, + 0.0, + 2.171875, + 0.51953125, + -0.96484375, + 4.6875, + -0.484375, + 3.265625, + 0.052734375, + -0.27734375, + 0.51953125, + 0.349609375, + -1.71875, + -0.1044921875, + 0.349609375, + 0.57421875, + 0.88671875, + -0.60546875, + -0.76171875, + -0.890625, + 0.38671875, + -0.99609375, + -0.22265625, + 1.15625, + -2.9375, + 0.80078125, + 3.046875, + 0.62109375, + -0.7578125, + 2.421875, + -0.94140625, + 0.74609375, + 3.875, + 0.62109375, + 1.28125, + 0.416015625, + -0.96484375, + -1.8984375, + 0.671875, + 0.455078125, + 4.09375, + -1.9296875, + -2.125, + -2.40625, + -4.71875, + -1.4296875, + -0.201171875, + -1.1796875, + -0.1748046875, + 0.703125, + -1.3671875, + 0.486328125, + -2.140625, + 1.7578125, + 0.0079345703125, + 0.65234375, + -0.18359375, + 0.7421875, + -1.3671875, + 2.328125, + -0.11328125, + 0.6015625, + 0.2490234375, + -0.283203125, + 1.5859375, + -0.322265625, + -2.21875, + 2.1875, + 2.3125, + 1.328125, + -0.45703125, + -0.3828125, + -0.8359375, + 2.171875, + 0.1767578125, + -2.125, + 3.734375, + 1.4609375, + 0.89453125, + 0.7265625, + 0.212890625, + -0.4765625, + -0.6796875, + 6.0, + -1.4140625, + 0.388671875, + 3.03125, + 0.56640625, + 1.1484375, + 0.98828125, + -2.265625, + -2.296875, + 2.40625, + 1.5546875, + 1.5546875, + 0.92578125, + 1.3359375, + 0.451171875, + 1.765625, + -2.59375, + -1.1953125, + 0.75390625, + -4.34375, + 0.71875, + 0.369140625, + 0.796875, + -1.125, + -0.56640625, + -1.1328125, + -1.7109375, + -1.2890625, + 4.625, + 1.515625, + -0.498046875, + -3.34375, + 2.921875, + 1.421875, + -1.6015625, + -0.91796875, + 4.46875, + -0.05517578125, + 1.078125, + -1.7265625, + -0.65234375, + 1.421875, + -3.921875, + -0.85546875, + 2.75, + 0.2353515625, + 0.25, + 2.3125, + 0.16015625, + 2.03125, + -4.25, + -0.8828125, + 2.53125, + 1.8515625, + -6.34375, + 0.96875, + 4.90625, + 0.1533203125, + 0.9375, + -0.17578125, + -3.578125, + 1.953125, + -3.25, + -3.34375, + 1.7109375, + 0.412109375, + 1.078125, + 1.0859375, + 3.0, + -1.90625, + -1.359375, + 2.9375, + 2.171875, + -0.08740234375, + -1.28125, + -3.078125, + 3.203125, + -1.5078125, + 0.255859375, + 5.21875, + -2.828125, + -1.53125, + -1.3359375, + -0.65625, + -0.267578125, + 7.53125, + 2.15625, + 1.4765625, + 0.2001953125, + -1.3828125, + -1.265625, + -7.65625, + -0.9375, + -0.224609375, + 1.5625, + -0.55859375, + 0.7421875, + 4.15625, + -0.08251953125, + -2.859375, + -0.9765625, + -0.0361328125, + -1.8515625, + 0.09375, + -1.3828125, + 0.423828125, + 1.7890625, + -1.6796875, + 1.8359375, + -0.1904296875, + 3.1875, + 9.4375, + -0.484375, + -1.25, + -2.375, + 0.240234375, + 0.07421875, + 1.109375, + -3.15625, + -0.302734375, + 6.15625, + -1.4765625, + 2.546875, + -1.6953125, + 1.96875, + 0.81640625, + 0.291015625, + -0.88671875, + 0.16796875, + 0.228515625, + -3.703125, + 0.06982421875, + -0.51171875, + -0.65625, + 0.318359375, + 1.84375, + 1.8984375, + -0.85546875, + -0.72265625, + -1.265625, + -2.6875, + -2.875, + 0.51171875, + 1.3046875, + -1.625, + -1.5, + -0.64453125, + 1.3046875, + 6.28125, + 1.3515625, + -1.0859375, + -1.265625, + -1.6875, + 4.5, + -1.90625, + 0.8046875, + -1.6953125, + 1.6484375, + 0.162109375, + 2.5, + -0.1865234375, + 2.640625, + 1.7421875, + 1.1953125, + -0.380859375, + -0.62890625, + 1.953125, + 2.765625, + 0.5390625, + -0.353515625, + -2.46875, + -0.61328125, + -0.00396728515625, + -3.921875, + 0.09375, + -0.40234375, + 1.0859375, + -0.283203125, + -0.038818359375, + -1.3359375, + -0.384765625, + 0.98046875, + 0.2314453125, + -0.0673828125, + 1.3359375, + 0.80859375, + 1.125, + 1.625, + 1.390625, + -1.5, + 2.375, + -0.88671875, + -2.203125, + 1.03125, + 2.234375, + 1.8671875, + -0.4375, + 0.71484375, + -0.6171875, + -1.625, + -4.125, + -0.73828125, + 1.1015625, + -0.0810546875, + 3.28125, + -1.3984375, + 0.2119140625, + 3.703125, + -0.9453125, + -0.2353515625, + 0.82421875, + -3.328125, + -0.78125, + 0.34375, + 0.7734375, + 1.984375, + -0.255859375, + 1.0625, + 0.546875, + -0.4296875, + -0.65625, + -1.34375, + -3.078125, + 1.84375, + -1.3046875, + 0.734375, + 2.078125, + -1.140625, + 2.140625, + 0.341796875, + -0.74609375, + -0.91015625, + -3.65625, + -3.015625, + 2.328125, + -3.421875, + -0.029541015625, + 0.7890625, + 2.125, + -2.71875, + 1.5546875, + 1.265625, + -1.640625, + 1.2578125, + -0.01190185546875, + -2.359375, + 2.28125, + -0.6015625, + 0.94921875, + 1.625, + -10.1875, + -0.2119140625, + 2.421875, + -0.421875, + 1.6328125, + -0.462890625, + -2.859375, + -0.9375, + -1.59375, + 0.10546875, + -7.0, + -0.78515625, + 2.8125, + 1.109375, + 2.15625, + 1.6796875, + -2.046875, + -2.75, + -1.359375, + 0.423828125, + 0.30078125, + 0.314453125, + -0.9140625, + 3.609375, + -1.5234375, + 1.3984375, + -1.515625, + 2.46875, + -2.234375, + 2.25, + -0.220703125, + 0.84375, + -3.203125, + -3.390625, + -1.09375, + 0.546875, + 0.255859375, + -0.5625, + 0.69921875, + 1.8125, + 0.7265625, + 4.15625, + 0.58984375, + -1.3515625, + 0.6171875, + 0.8515625, + -1.671875, + -1.1015625, + -0.609375, + 3.03125, + 1.15625, + 0.193359375, + -2.734375, + -1.765625, + 0.69921875, + 1.09375, + -3.515625, + 1.9375, + 2.65625, + 0.65234375, + 2.5, + 1.484375, + -0.87890625, + 1.46875, + 2.4375, + 1.9765625, + -0.67578125, + 3.171875, + 3.25, + 3.234375, + 0.298828125, + -4.03125, + -2.5625, + 0.46484375, + 0.130859375, + 0.197265625, + 2.421875, + -1.2421875, + -2.03125, + -3.515625, + -5.5625, + 0.78125, + -5.71875, + -0.76953125, + 1.1484375, + 0.75390625, + 1.734375, + -2.1875, + 0.330078125, + -1.0703125, + -0.19921875, + -3.3125, + 2.1875, + -4.34375, + 1.4765625, + -0.365234375, + 0.51953125, + -0.34765625, + -3.25, + -1.796875, + -0.24609375, + 2.734375, + -1.0234375, + 0.8359375, + -0.609375, + -0.6015625, + -1.5859375, + -2.140625, + 3.71875, + 2.46875, + 0.39453125, + 0.7734375, + -0.3515625, + 0.515625, + 1.3515625, + -3.09375, + -2.5, + -1.7734375, + 1.3046875, + -0.140625, + -1.75, + -1.3984375, + -1.7421875, + -2.71875, + 4.09375, + -2.4375, + 0.7109375, + -4.1875, + -1.671875, + -0.82421875, + 2.375, + -0.828125, + -1.2109375, + -1.140625, + -3.234375, + -0.30859375, + 1.3046875, + -2.921875, + -1.015625, + -2.828125, + -0.4765625, + 0.2275390625, + -1.2890625, + -2.46875, + -0.6328125, + -0.2158203125, + 3.484375, + 0.1708984375, + -3.015625, + 1.8984375, + 0.69921875, + 2.65625, + 0.734375, + -0.9765625, + -0.058837890625, + 2.46875, + 1.6171875, + 3.25, + -2.84375, + -0.51171875, + 4.4375, + -2.140625, + -0.455078125, + 0.208984375, + -0.51953125, + -1.46875, + 1.7421875, + 0.130859375, + 0.9609375, + -0.671875, + 2.828125, + -4.625, + 0.9921875, + -0.2060546875, + -3.203125, + -0.8828125, + -0.392578125, + 0.515625, + -1.2578125, + 2.109375, + -3.28125, + 0.984375, + -0.18359375, + 0.65234375, + 0.86328125, + 4.8125, + -1.0546875, + -1.4609375, + 1.3828125, + -0.54296875, + -1.3828125, + -0.68359375, + -0.4921875, + -1.7734375, + -2.0, + -2.84375, + -0.322265625, + 0.34765625, + 1.359375, + 5.1875, + -2.921875, + 1.359375, + 1.09375, + -1.09375, + 0.2021484375, + -2.296875, + -5.25, + 2.671875, + 0.7578125, + -0.9921875, + 1.8359375, + -1.4921875, + -4.0625, + 1.6875, + 0.57421875, + 3.0625, + -1.1953125, + -2.203125, + 0.51953125, + 1.8203125, + -2.359375, + -1.390625, + -0.279296875, + 2.46875, + -0.294921875, + -2.125, + 0.7734375, + 2.53125, + -0.51953125, + 3.046875, + -1.1328125, + -2.71875, + -0.484375, + -0.859375, + 3.3125, + -0.8125, + 2.953125, + 0.82421875, + -1.6328125, + 1.2265625, + -1.859375, + 1.453125, + 1.6484375, + -0.08642578125, + 0.59765625, + -2.359375, + -0.369140625, + 3.84375, + 0.10888671875, + -3.875, + -0.271484375, + -0.77734375, + -2.515625, + -0.94921875, + 4.8125, + 2.125, + 0.55859375, + -1.8125, + -4.15625, + 2.203125, + 1.3671875, + -1.8828125, + 0.5546875, + 4.625, + 0.83203125, + 2.5, + -0.9375, + 0.376953125, + 0.107421875, + -2.65625, + -1.3359375, + 0.87109375, + -1.1796875, + 0.6796875, + -5.6875, + -3.375, + -0.16015625, + 1.390625, + 1.3828125, + 0.91796875, + -1.703125, + 0.5, + -0.10693359375, + 3.625, + 0.08154296875, + 2.1875, + 0.0, + -1.7265625, + 2.390625, + -2.234375, + 2.125, + 0.6875, + -2.015625, + -2.140625, + -0.453125, + 0.0556640625, + -1.09375, + 0.921875, + 1.7578125, + 0.625, + 0.81640625, + -11.5625, + 0.373046875, + -2.125, + -1.0546875, + 3.625, + 1.1328125, + 1.421875, + 1.3359375, + 1.3515625, + -0.2041015625, + 1.0625, + 1.671875, + 1.546875, + 2.59375, + 0.1513671875, + -1.046875, + 1.390625, + -1.5234375, + -0.94921875, + 0.7734375, + -2.265625, + 2.40625, + -0.671875, + 0.04052734375, + -0.494140625, + -0.6171875, + -4.40625, + 2.203125, + -1.9375, + 1.5625, + 0.1142578125, + -1.109375, + -3.046875, + 2.546875, + -1.25, + -1.6796875, + -1.375, + -0.10888671875, + 0.546875, + 2.453125, + 0.9453125, + 1.921875, + -3.921875, + 1.1953125, + -1.1328125, + -0.345703125, + 1.40625, + 2.359375, + -2.03125, + -4.25, + -4.375, + -0.31640625, + 1.2890625, + -0.482421875, + 1.0625, + 0.44921875, + 1.7421875, + 1.5234375, + -1.3828125, + 4.65625, + -2.21875, + 3.34375, + 1.453125, + 3.171875, + -0.427734375, + 2.28125, + 2.34375, + 2.578125, + 0.78125, + -0.36328125, + 1.484375, + -5.125, + 0.5859375, + 2.703125, + 0.10791015625, + -1.515625, + 0.2265625, + 1.4296875, + 0.130859375, + 3.09375, + 0.55078125, + -1.1640625, + -0.49609375, + -2.03125, + -0.04052734375, + -3.0, + -0.5, + 1.2890625, + 1.1015625, + 2.78125, + 0.0703125, + 0.73828125, + 1.296875, + -0.294921875, + 0.44140625, + 0.30078125, + -1.1484375, + 0.384765625, + 3.484375, + -2.3125, + 0.80859375, + -1.75, + 0.09912109375, + 0.59765625, + -1.1328125, + -0.458984375, + -2.375, + -1.1640625, + 1.7265625, + -2.953125, + -1.234375, + 1.484375, + 1.625, + -2.125, + -1.265625, + 0.0250244140625, + -2.71875, + -0.91015625, + -2.375, + -2.015625, + -1.3046875, + -1.0078125, + 2.4375, + -0.796875, + 2.078125, + 0.2412109375, + 3.84375, + 0.87109375, + 2.25, + -3.84375, + -1.34375, + -2.390625, + -0.87109375, + 2.515625, + -0.55078125, + 0.7890625, + 1.5859375, + -0.59765625, + 1.90625, + 0.35546875, + -0.4140625, + 3.71875, + -1.4296875, + -0.439453125, + 1.078125, + 1.1953125, + 1.15625, + 0.63671875, + 0.7109375, + -1.5, + 0.61328125, + -0.640625, + -1.2734375, + 2.09375, + -1.25, + 4.5, + -2.578125, + 1.0703125, + 1.4375, + -1.75, + 1.15625, + 1.8125, + 1.5859375, + -1.3828125, + -0.51171875, + 0.0, + 1.265625, + -0.25, + 1.109375, + 0.57421875, + -4.625, + -0.375, + -0.4765625, + -1.9375, + 1.2109375, + -2.671875, + -2.609375, + -0.57421875, + -0.71484375, + -0.77734375, + 0.08056640625, + 0.2578125, + 0.8125, + 0.498046875, + 0.208984375, + 1.734375, + 0.69140625, + -1.3203125, + 1.0625, + -1.15625, + -3.609375, + -0.30078125, + -1.21875, + 1.4921875, + -2.546875, + 2.8125, + -5.4375, + 0.59765625, + -1.3671875, + -0.412109375, + -3.125, + 0.443359375, + -2.53125, + 5.4375, + 1.875, + -0.62109375, + 0.5390625, + -1.9140625, + 0.65234375, + 0.01611328125, + -0.1728515625, + -1.203125, + -0.6875, + -0.9296875, + 0.06982421875, + -2.78125, + 1.1796875, + 0.765625, + 1.4765625, + -2.65625, + -3.671875, + -2.1875, + -1.921875, + 0.56640625, + -0.859375, + 1.2109375, + 4.59375, + -1.15625, + 1.9609375, + -4.4375, + 0.95703125, + 0.470703125, + -1.984375, + -0.59375, + 1.2109375, + -0.46875, + -0.1318359375, + 1.8046875, + -2.5, + 1.0078125, + -4.59375, + -1.4765625, + 0.6015625, + 4.59375, + 0.78125, + 2.65625, + 0.31640625, + 5.875, + 0.55859375, + -4.375, + 1.046875, + -1.5546875, + -1.7265625, + -0.416015625, + 0.796875, + 0.02490234375, + 1.2421875, + -1.0078125, + 1.0625, + -0.93359375, + 4.75, + -0.1552734375, + -0.416015625, + -2.328125, + -0.32421875, + -1.5078125, + 0.70703125, + -0.4609375, + -1.3515625, + -0.53515625, + -1.5390625, + 1.2109375, + -1.984375, + 1.3359375, + 1.8125, + -2.015625, + -1.2109375, + -2.734375, + -1.2578125, + 2.421875, + -2.09375, + 0.150390625, + -3.078125, + 1.046875, + -0.98046875, + 2.984375, + 1.1171875, + 1.5859375, + -0.89453125, + 2.4375, + -3.96875, + 1.109375, + 1.828125, + 1.6875, + 4.8125, + 0.458984375, + 0.035888671875, + -0.1044921875, + 0.357421875, + -0.87890625, + -0.4609375, + -1.6171875, + -0.6796875, + 1.2421875, + 3.484375, + -1.328125, + 0.23046875, + -1.8046875, + -0.76953125, + -1.4140625, + -3.078125, + 3.046875, + -0.62890625, + 1.7109375, + 0.6484375, + 3.453125, + 0.013916015625, + 2.140625, + 1.7734375, + 3.625, + 1.1171875, + 0.453125, + -3.59375, + 0.00408935546875, + -1.390625, + 3.375, + 0.87109375, + 2.078125, + 0.059814453125, + -1.7890625, + 1.3203125, + -0.1728515625, + 0.6875, + -1.8203125, + -1.765625, + -0.283203125, + 1.2421875, + 0.08154296875, + -2.796875, + 2.15625, + -2.25, + -2.265625, + 1.5859375, + 2.421875, + -1.578125, + 1.34375, + -2.515625, + 0.31640625, + 2.265625, + -0.015869140625, + -1.5625, + 0.859375, + -1.0078125, + -0.287109375, + -0.2041015625, + -0.150390625, + 0.7890625, + -3.03125, + 0.31640625, + -1.796875, + 0.8203125, + -0.08251953125, + 0.2421875, + 1.1171875, + 1.9296875, + -0.279296875, + -0.306640625, + 0.6328125, + 1.515625, + -0.5390625, + 1.2578125, + -0.2060546875, + -1.5859375, + 1.6484375, + 1.4765625, + -0.298828125, + -3.40625, + 4.15625, + 0.71875, + -1.046875, + 0.03271484375, + -0.1748046875, + 2.28125, + 1.8984375, + -0.5546875, + 0.14453125, + -0.01202392578125, + -1.9453125, + 0.69921875, + 2.265625, + -0.25390625, + 0.267578125, + 0.412109375, + -1.515625, + 1.609375, + -1.1953125, + -0.1806640625, + 2.125, + -1.8671875, + 1.5, + 0.439453125, + -3.046875, + -0.193359375, + -0.71875, + 3.640625, + -2.078125, + 1.90625, + -1.1953125, + -7.9375, + -0.60546875, + 1.9375, + 0.423828125, + -1.71875, + -1.0390625, + -1.859375, + 1.4765625, + -1.6953125, + -1.1171875, + -5.375, + 3.4375, + -3.53125, + 1.1640625, + 1.1640625, + 1.1328125, + 2.1875, + -1.140625, + -0.99609375, + 1.453125, + 0.65234375, + 1.375, + 2.28125, + 1.1796875, + 0.75, + -0.99609375, + -3.796875, + -0.95703125, + -0.404296875, + 1.1484375, + -2.734375, + -1.6875, + -1.0, + 1.96875, + -1.1796875, + -2.078125, + -1.0078125, + 1.0859375, + 0.8828125, + 0.76171875, + 1.8671875, + 2.515625, + 4.3125, + 12.5, + 0.5234375, + -0.5390625, + -0.1279296875, + -1.390625, + 1.09375, + -0.6640625, + 2.25, + -0.341796875, + 1.375, + -2.15625, + 0.859375, + 0.953125, + -0.427734375, + -1.7421875, + -0.392578125, + -0.671875, + 1.1953125, + 0.90234375, + 1.34375, + -2.421875, + -0.279296875, + 2.203125, + -0.2734375, + 2.015625, + 3.03125, + 1.09375, + 3.296875, + -1.0546875, + 4.84375, + -0.81640625, + 0.1708984375, + -0.96875, + -1.3203125, + 0.265625, + -0.99609375, + 1.7734375, + -1.2734375, + 2.296875, + 3.703125, + -0.9296875, + 0.29296875, + -2.109375, + 3.046875, + 1.046875, + 0.73046875, + 0.6015625, + -2.171875, + -0.09521484375, + 0.74609375, + -4.84375, + -2.46875, + -1.3984375, + 2.015625, + 1.2265625, + -1.0390625, + -3.203125, + -1.296875, + -0.038818359375, + -0.2080078125, + -0.13671875, + -2.125, + -2.125, + -2.421875, + 0.65234375, + -0.1982421875, + 1.765625, + -0.70703125, + 1.359375, + 1.234375, + -0.451171875, + -3.09375, + 2.46875, + 1.8984375, + -0.4296875, + 0.185546875, + 2.03125, + 1.375, + 0.83984375, + 3.390625, + 2.125, + 0.0595703125, + -0.5078125, + 1.0859375, + -1.25, + 2.953125, + -1.0625, + -2.9375, + 1.125, + -2.59375, + 0.142578125, + -0.9765625, + -1.2734375, + 1.765625, + 1.078125, + 3.75, + 0.9921875, + -1.09375, + 2.265625, + 1.609375, + -1.75, + -0.431640625, + 0.1689453125, + 0.66015625, + 0.92578125, + 2.625, + -0.71484375, + 0.34765625, + 1.1015625, + 1.5234375, + 2.234375, + 2.78125, + 0.50390625, + 1.4453125, + 1.984375, + 0.369140625, + 5.4375, + -0.4921875, + -1.5703125, + 0.734375, + -2.171875, + 0.94140625, + -1.6953125, + 0.455078125, + 0.08837890625, + -3.0, + -0.6484375, + -0.349609375, + -2.609375, + -2.109375, + -0.9296875, + -1.890625, + 0.69921875, + -0.66015625, + 0.46484375, + -0.8203125, + -1.390625, + -1.65625, + 3.78125, + -3.265625, + -0.5234375, + -2.65625, + -2.46875, + 1.46875, + -2.984375, + 0.2177734375, + -0.8046875, + 2.8125, + 0.36328125, + 0.78515625, + 0.25, + 0.1279296875, + -1.5625, + -0.9453125, + 0.15625, + 4.03125, + -0.369140625, + -2.75, + -1.0, + -1.484375, + -6.5, + 2.21875, + 0.95703125, + -0.5703125, + -1.7421875, + -2.390625, + 0.8828125, + -0.515625, + -0.453125, + 1.734375, + -0.84765625, + 1.0390625, + 0.76171875, + -1.8046875, + 3.21875 + ], + "index": 0, + "object": "embedding", + "raw_output": null + } + ], + "model": "accounts/fireworks/models/qwen3-embedding-8b", + "object": "list", + "usage": { + "prompt_tokens": 9, + "total_tokens": 9, + "completion_tokens": 0 + }, + "perf_metrics": null + } + }, + "is_streaming": false + } +} diff --git a/tests/integration/recordings/responses/bebc02ac1fb5.json b/tests/integration/recordings/responses/bebc02ac1fb5.json new file mode 100644 index 000000000..cae485bf1 --- /dev/null +++ b/tests/integration/recordings/responses/bebc02ac1fb5.json @@ -0,0 +1,415 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "What is the boiling point of the liquid polyjuice in celsius?" + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_88k1yds9", + "type": "function", + "function": { + "name": "get_boiling_point", + "arguments": "{\"celcius\": true, \"liquid_name\": \"polyjuice\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_88k1yds9", + "content": "-100" + } + ], + "max_tokens": 512, + "stream": true, + "temperature": 0.0001, + "tool_choice": "auto", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius", + "default": true + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-425", + "choices": [ + { + "delta": { + "content": "The", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427015, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-425", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427015, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-425", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427015, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-425", + "choices": [ + { + "delta": { + "content": " of", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427015, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-425", + "choices": [ + { + "delta": { + "content": " Poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427015, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-425", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427015, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-425", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427015, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-425", + "choices": [ + { + "delta": { + "content": " is", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427015, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-425", + "choices": [ + { + "delta": { + "content": " -", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427015, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-425", + "choices": [ + { + "delta": { + "content": "100", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427015, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-425", + "choices": [ + { + "delta": { + "content": "\u00b0C", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427015, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-425", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427015, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-425", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759427015, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/c7ff69e043ea.json b/tests/integration/recordings/responses/c7ff69e043ea.json new file mode 100644 index 000000000..91365adf6 --- /dev/null +++ b/tests/integration/recordings/responses/c7ff69e043ea.json @@ -0,0 +1,389 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "Call get_boiling_point_with_metadata tool and answer What is the boiling point of polyjuice?" + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_e17msgo0", + "type": "function", + "function": { + "name": "get_boiling_point_with_metadata", + "arguments": "{\"celcius\": false, \"liquid_name\": \"polyjuice\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_e17msgo0", + "content": "-212" + } + ], + "max_tokens": 512, + "stream": true, + "temperature": 0.0001, + "tool_choice": "auto", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point_with_metadata", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius", + "default": true + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-316", + "choices": [ + { + "delta": { + "content": "The", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427031, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-316", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427031, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-316", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427031, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-316", + "choices": [ + { + "delta": { + "content": " of", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427031, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-316", + "choices": [ + { + "delta": { + "content": " poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427031, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-316", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427031, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-316", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427031, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-316", + "choices": [ + { + "delta": { + "content": " is", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427031, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-316", + "choices": [ + { + "delta": { + "content": " -", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427031, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-316", + "choices": [ + { + "delta": { + "content": "212", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427031, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-316", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427031, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-316", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759427031, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/d3fc756ea885.json b/tests/integration/recordings/responses/d3fc756ea885.json new file mode 100644 index 000000000..aec8b4506 --- /dev/null +++ b/tests/integration/recordings/responses/d3fc756ea885.json @@ -0,0 +1,415 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "What is the boiling point of the liquid polyjuice in celsius?" + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_gcyfwdi7", + "type": "function", + "function": { + "name": "get_boiling_point", + "arguments": "{\"celcius\": true, \"liquid_name\": \"polyjuice\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_gcyfwdi7", + "content": "-100" + } + ], + "max_tokens": 512, + "stream": true, + "temperature": 0.0001, + "tool_choice": "required", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius", + "default": true + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-820", + "choices": [ + { + "delta": { + "content": "The", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427018, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-820", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427018, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-820", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427018, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-820", + "choices": [ + { + "delta": { + "content": " of", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427018, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-820", + "choices": [ + { + "delta": { + "content": " Poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427019, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-820", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427019, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-820", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427019, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-820", + "choices": [ + { + "delta": { + "content": " is", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427019, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-820", + "choices": [ + { + "delta": { + "content": " -", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427019, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-820", + "choices": [ + { + "delta": { + "content": "100", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427019, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-820", + "choices": [ + { + "delta": { + "content": "\u00b0C", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427019, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-820", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427019, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-820", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759427019, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/afaacb433b7c.json b/tests/integration/recordings/responses/e11745e75e87.json similarity index 94% rename from tests/integration/recordings/responses/afaacb433b7c.json rename to tests/integration/recordings/responses/e11745e75e87.json index 9b54050db..ddcdbc54e 100644 --- a/tests/integration/recordings/responses/afaacb433b7c.json +++ b/tests/integration/recordings/responses/e11745e75e87.json @@ -15,7 +15,7 @@ "content": "What is the boiling point of the liquid polyjuice in celsius?" } ], - "max_tokens": 0, + "max_tokens": 512, "stream": true, "temperature": 0.0001, "tool_choice": "required", @@ -55,7 +55,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-867", + "id": "chatcmpl-105", "choices": [ { "delta": { @@ -66,7 +66,7 @@ "tool_calls": [ { "index": 0, - "id": "call_d952bbyw", + "id": "call_gcyfwdi7", "function": { "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}", "name": "get_boiling_point" @@ -80,7 +80,7 @@ "logprobs": null } ], - "created": 1759368378, + "created": 1759427018, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -91,7 +91,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-867", + "id": "chatcmpl-105", "choices": [ { "delta": { @@ -106,7 +106,7 @@ "logprobs": null } ], - "created": 1759368378, + "created": 1759427018, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, diff --git a/tests/integration/recordings/responses/e3bded498c54.json b/tests/integration/recordings/responses/e3bded498c54.json new file mode 100644 index 000000000..60089c43b --- /dev/null +++ b/tests/integration/recordings/responses/e3bded498c54.json @@ -0,0 +1,4137 @@ +{ + "request": { + "method": "POST", + "url": "https://api.fireworks.ai/inference/v1/v1/embeddings", + "headers": {}, + "body": { + "model": "accounts/fireworks/models/qwen3-embedding-8b", + "input": [ + "This is a test file 2" + ], + "encoding_format": "float" + }, + "endpoint": "/v1/embeddings", + "model": "accounts/fireworks/models/qwen3-embedding-8b" + }, + "response": { + "body": { + "__type__": "openai.types.create_embedding_response.CreateEmbeddingResponse", + "__data__": { + "data": [ + { + "embedding": [ + 3.65625, + 0.7890625, + -2.625, + -0.91015625, + 2.0625, + -1.703125, + 0.53515625, + 2.921875, + -4.09375, + 1.625, + -1.4375, + -1.6875, + 6.96875, + -1.0859375, + 3.765625, + 0.1416015625, + 1.0546875, + 2.140625, + -0.2021484375, + 2.953125, + -1.46875, + 2.859375, + 3.34375, + -1.5859375, + 2.4375, + 1.265625, + -1.7734375, + -2.5, + 0.6015625, + 1.1640625, + -2.078125, + 3.96875, + 1.828125, + 4.375, + -4.3125, + 1.34375, + -2.390625, + -3.515625, + -2.734375, + 1.3671875, + -2.703125, + -2.171875, + 1.3515625, + 1.2578125, + -2.21875, + -1.5546875, + -2.703125, + -3.09375, + 1.34375, + 1.3046875, + 0.70703125, + -1.7265625, + -0.875, + 1.609375, + 1.625, + 1.8046875, + 1.3515625, + -0.27734375, + -3.4375, + -0.9609375, + -5.34375, + -1.9609375, + 3.25, + 0.388671875, + 2.421875, + 1.7734375, + 1.0703125, + 1.203125, + 1.609375, + -1.9765625, + -0.08935546875, + 2.546875, + 1.9453125, + 0.020263671875, + -0.796875, + -4.0625, + -4.125, + -0.1455078125, + 2.84375, + 3.203125, + 0.51953125, + -4.09375, + 0.0240478515625, + -1.8359375, + -1.375, + -2.453125, + -1.8203125, + -1.1875, + -0.275390625, + -0.036376953125, + 2.453125, + 0.92578125, + -0.38671875, + 1.8125, + -0.41015625, + 0.35546875, + -3.25, + -2.609375, + -2.875, + 3.796875, + 1.671875, + 4.40625, + 2.734375, + 1.34375, + -0.76171875, + 1.9296875, + -1.015625, + -0.671875, + -1.453125, + -1.6953125, + 0.041259765625, + 2.109375, + 2.828125, + -0.09814453125, + -1.453125, + -0.08349609375, + 2.53125, + 1.25, + 1.0, + 1.8359375, + 3.375, + -0.94140625, + -2.28125, + 2.421875, + 3.125, + -2.71875, + 1.484375, + -1.1796875, + 0.1953125, + -1.1171875, + 1.53125, + -4.375, + 2.9375, + -2.375, + -1.5625, + 0.36328125, + -3.640625, + 0.0242919921875, + 4.21875, + -0.42578125, + -2.78125, + 1.8359375, + 3.265625, + -1.015625, + 0.90625, + 3.84375, + -1.3203125, + 3.328125, + 2.65625, + 1.9375, + 0.7421875, + 0.8046875, + -1.96875, + -2.203125, + -1.9609375, + -2.65625, + -2.53125, + 0.5546875, + 2.359375, + -0.05712890625, + -1.4921875, + 0.671875, + 2.3125, + 1.578125, + -0.6015625, + 0.376953125, + 2.78125, + 1.109375, + 1.578125, + -1.8046875, + 1.0078125, + 0.059326171875, + 0.65625, + 1.5546875, + 0.61328125, + 1.1484375, + -2.125, + -1.4609375, + -0.462890625, + 3.625, + 0.044677734375, + 1.1015625, + -2.75, + 1.8828125, + 0.73828125, + 4.6875, + 0.12890625, + -2.5, + 0.61328125, + -0.828125, + -0.7421875, + 0.400390625, + -1.0859375, + 1.6875, + -2.90625, + 0.1484375, + 0.369140625, + 0.26953125, + 1.4609375, + -2.140625, + 6.46875, + -4.34375, + 2.21875, + -0.271484375, + -0.3203125, + -2.21875, + -1.46875, + 0.69921875, + 0.71875, + -3.609375, + 1.7265625, + -1.34375, + 1.203125, + 3.546875, + 1.2890625, + -1.6640625, + 5.15625, + 1.546875, + -1.7109375, + -0.06982421875, + 1.4140625, + 1.625, + 0.2890625, + 1.8515625, + 0.19140625, + -4.15625, + -1.421875, + -2.9375, + 0.74609375, + -2.75, + 1.546875, + -2.96875, + -0.283203125, + -1.15625, + 3.359375, + -3.765625, + -1.4140625, + -0.032470703125, + 0.5078125, + 0.451171875, + -1.4375, + -0.0849609375, + 0.56640625, + -1.328125, + -0.1630859375, + 1.7109375, + 2.1875, + -0.8125, + 5.28125, + -0.171875, + 0.2421875, + -5.5, + -1.3046875, + -0.6796875, + 1.4375, + -3.078125, + -2.078125, + -1.5234375, + -1.5859375, + 1.09375, + 0.5234375, + -4.34375, + 2.734375, + 1.6015625, + -0.85546875, + -1.421875, + -2.28125, + -1.9140625, + 1.4140625, + -2.359375, + 0.291015625, + -2.390625, + -0.1474609375, + -0.404296875, + 1.0703125, + -0.84765625, + 0.1494140625, + -1.3203125, + -3.046875, + 1.2421875, + -1.2578125, + 0.5390625, + 2.59375, + -1.625, + 2.0625, + -0.265625, + -0.78125, + 1.2109375, + -2.09375, + 2.5625, + -0.52734375, + 4.4375, + -0.115234375, + 2.671875, + -3.046875, + 4.625, + -3.578125, + -2.828125, + -1.6015625, + -0.6875, + -1.890625, + 0.0172119140625, + -1.9140625, + -0.419921875, + -0.75, + -1.8203125, + -3.25, + 2.640625, + 0.578125, + -0.6796875, + -2.921875, + -0.0198974609375, + -2.484375, + -0.91796875, + -1.5703125, + 1.734375, + 1.2734375, + -2.4375, + -1.4140625, + 2.0625, + -2.09375, + -0.625, + -0.234375, + 0.166015625, + 0.55078125, + 0.166015625, + 0.453125, + -0.83984375, + -3.421875, + -1.1875, + 1.59375, + 0.5546875, + -3.75, + 2.046875, + -1.6875, + 0.56640625, + 1.84375, + -1.7578125, + 1.5703125, + 0.30078125, + -2.34375, + -2.15625, + 1.8984375, + 0.224609375, + 0.451171875, + -1.125, + 1.5625, + -3.53125, + -1.4921875, + -0.83984375, + 0.53515625, + 2.453125, + -0.474609375, + 0.703125, + 1.421875, + -0.2060546875, + -2.171875, + 3.078125, + 2.140625, + 2.5625, + -0.80859375, + 1.21875, + 0.33203125, + -1.796875, + 0.04150390625, + 4.65625, + 3.53125, + -0.9140625, + -3.390625, + -0.4765625, + 2.390625, + 0.73828125, + -1.921875, + -0.208984375, + -2.203125, + -2.28125, + -3.078125, + -1.15625, + 1.3203125, + -0.83203125, + -0.494140625, + -8.4375, + 0.77734375, + -0.416015625, + -0.134765625, + 1.1953125, + -3.078125, + -0.609375, + 0.953125, + 0.91796875, + -2.734375, + -0.5078125, + 0.10009765625, + -0.0966796875, + -1.9921875, + -1.203125, + 0.6953125, + 0.84765625, + -2.03125, + 1.84375, + -0.50390625, + -1.8046875, + -0.0927734375, + 2.671875, + 1.0, + -0.0966796875, + 1.6484375, + 0.058837890625, + -1.9453125, + 0.8046875, + 0.19140625, + 0.337890625, + 1.0078125, + -0.169921875, + 3.03125, + 2.984375, + 3.796875, + 3.96875, + -0.6640625, + 2.796875, + -0.59765625, + 1.8671875, + -1.84375, + -1.6171875, + -2.671875, + 2.09375, + 3.171875, + -0.10302734375, + 5.625, + -2.421875, + -0.44921875, + -0.2353515625, + -1.078125, + -0.1728515625, + 0.3515625, + -3.453125, + 1.40625, + 2.515625, + 0.63671875, + -0.1787109375, + 1.8984375, + 4.5625, + 2.0625, + 2.109375, + -1.2890625, + 1.6796875, + -2.71875, + 1.1796875, + 0.62109375, + -0.78515625, + -4.40625, + 3.5625, + -2.390625, + 2.875, + -0.271484375, + 0.064453125, + -2.9375, + -0.8359375, + -0.1044921875, + 1.5546875, + 0.40625, + -1.8828125, + 1.2734375, + -0.80078125, + 1.6796875, + -1.0546875, + -0.10498046875, + -0.578125, + -2.953125, + 2.09375, + -0.57421875, + 1.6640625, + -5.21875, + -0.94140625, + 0.52734375, + 4.5625, + -1.3984375, + -0.515625, + -3.296875, + -3.828125, + 0.388671875, + -3.609375, + -2.0625, + -10.0625, + 1.9296875, + 2.125, + 1.34375, + -0.1708984375, + 0.625, + 0.9296875, + -3.03125, + 1.4140625, + 4.96875, + -0.6015625, + 1.6484375, + -1.8984375, + 1.71875, + -0.498046875, + 0.07958984375, + 0.859375, + 0.86328125, + -3.09375, + -0.267578125, + 0.451171875, + -2.234375, + 1.0390625, + 2.0625, + 0.8046875, + -2.46875, + -1.9375, + 1.2265625, + -3.5625, + -2.234375, + 0.8203125, + 0.2041015625, + -3.453125, + 0.75390625, + 0.375, + 0.921875, + 1.25, + 1.640625, + -0.392578125, + -6.875, + 0.1328125, + 0.671875, + 2.171875, + -1.6953125, + 0.99609375, + 0.67578125, + -1.0234375, + 0.482421875, + -1.296875, + 2.5, + -0.82421875, + -2.71875, + -0.4453125, + -1.21875, + -1.2734375, + 3.921875, + 1.0078125, + -1.78125, + 2.53125, + -2.15625, + -0.6796875, + -0.1611328125, + 5.65625, + 0.83203125, + 1.671875, + -0.49609375, + 4.875, + 1.609375, + 3.25, + 3.703125, + -0.76171875, + 1.0390625, + 1.7265625, + -0.84375, + 0.044677734375, + -1.53125, + 1.28125, + 1.1875, + -0.671875, + -0.609375, + 2.296875, + -2.625, + -3.984375, + -2.59375, + 1.140625, + 0.859375, + 1.5703125, + 2.109375, + 1.78125, + 1.359375, + 1.2109375, + 2.3125, + 2.0625, + -1.2265625, + 2.65625, + -2.390625, + -3.4375, + 3.53125, + -1.6484375, + -1.2578125, + -0.416015625, + 2.375, + -3.453125, + 2.40625, + -2.328125, + 1.8515625, + -0.6796875, + -1.4609375, + 0.55859375, + 2.671875, + -4.78125, + -1.203125, + 1.2890625, + -1.9765625, + -6.65625, + 0.796875, + 3.21875, + -0.4375, + -1.1640625, + 2.78125, + 0.359375, + -1.9453125, + 0.55078125, + 0.10205078125, + -2.203125, + 2.8125, + -6.8125, + -1.9921875, + 0.79296875, + -1.2421875, + -1.71875, + -4.28125, + -0.1943359375, + 0.0242919921875, + -0.482421875, + -0.345703125, + 3.734375, + -3.65625, + -3.640625, + 2.390625, + 0.462890625, + -1.8046875, + 2.5625, + 0.201171875, + 3.125, + -1.1953125, + -0.89453125, + -1.984375, + 0.98828125, + 3.75, + -1.265625, + -2.4375, + 1.6015625, + 1.453125, + 1.5546875, + -2.875, + 3.4375, + 1.9921875, + 1.9609375, + 1.25, + 1.4140625, + 1.1640625, + 3.140625, + 0.60546875, + 2.296875, + -2.71875, + 2.90625, + 0.380859375, + -1.5390625, + 1.296875, + 1.1328125, + 1.890625, + -1.859375, + 1.2734375, + -0.85546875, + -0.423828125, + 0.4453125, + -1.3515625, + 0.255859375, + 0.169921875, + -2.78125, + -1.203125, + 0.466796875, + 0.70703125, + 0.4375, + 0.57421875, + -4.3125, + -2.109375, + 2.109375, + -1.59375, + 1.65625, + 2.96875, + -1.125, + 3.296875, + 1.3828125, + 0.8671875, + -3.8125, + 0.08154296875, + -1.9296875, + -5.25, + -1.828125, + 0.8828125, + -5.65625, + -2.703125, + 3.5625, + 1.5078125, + 1.7109375, + 1.3203125, + -3.25, + -2.109375, + 0.051513671875, + 1.375, + -0.890625, + 1.3828125, + -1.765625, + -1.9375, + 0.3828125, + 1.8515625, + 2.453125, + -1.4140625, + -2.046875, + -0.359375, + -2.421875, + -2.359375, + 2.5625, + -3.75, + -2.375, + -3.125, + -1.2109375, + -2.40625, + -0.734375, + 1.5546875, + -3.734375, + 1.25, + 2.78125, + -0.7578125, + -3.640625, + 4.4375, + -0.984375, + -0.94140625, + -0.89453125, + -0.9296875, + -4.6875, + 3.671875, + 3.53125, + 2.25, + -1.0390625, + -3.640625, + 0.46484375, + -1.90625, + 3.3125, + -3.25, + 2.71875, + 3.578125, + 0.0654296875, + -0.11083984375, + -0.71484375, + -1.015625, + 0.5078125, + -0.546875, + 1.75, + -0.2431640625, + -1.953125, + -0.43359375, + 1.3125, + 2.25, + -0.0576171875, + 1.1015625, + 0.03955078125, + -0.16796875, + -0.51171875, + 1.3515625, + 2.25, + -1.3515625, + -0.46484375, + 3.34375, + 1.5, + -4.90625, + 0.75390625, + -0.6875, + 1.015625, + -0.470703125, + 0.96484375, + 1.0703125, + 1.2265625, + 1.4453125, + -1.5234375, + 0.00396728515625, + -1.4375, + 0.2431640625, + 2.15625, + -2.625, + -1.4140625, + -2.25, + 0.73828125, + -1.2578125, + -0.42578125, + 0.061767578125, + 0.29296875, + 2.25, + 2.59375, + -0.1962890625, + 0.4296875, + -3.6875, + -1.0234375, + 1.03125, + 2.921875, + -3.25, + 1.9140625, + 1.4296875, + 1.203125, + 0.78515625, + 0.439453125, + 2.875, + -1.328125, + 0.193359375, + 2.515625, + -0.0223388671875, + 1.84375, + -3.828125, + -0.515625, + -0.52734375, + 1.0, + 0.06591796875, + 0.51171875, + -0.177734375, + 0.228515625, + -2.109375, + -0.1962890625, + 0.0155029296875, + 0.8046875, + -0.69921875, + 0.024169921875, + -0.92578125, + -2.8125, + -0.78515625, + -1.03125, + -1.328125, + 0.65234375, + 2.53125, + 0.52734375, + 0.16015625, + 0.9453125, + 0.4921875, + 4.84375, + -0.10888671875, + 2.640625, + 2.875, + 0.294921875, + -1.8828125, + 2.59375, + -1.5546875, + -0.703125, + 1.2265625, + -2.125, + -0.8125, + -0.953125, + 1.890625, + -2.78125, + -4.15625, + -2.640625, + -1.6875, + 2.625, + -3.03125, + 0.25, + -0.58984375, + 2.890625, + 2.1875, + -3.984375, + 3.015625, + -1.7578125, + -0.94140625, + 1.234375, + 1.46875, + 0.1533203125, + -0.07470703125, + 1.703125, + 2.984375, + -0.37109375, + 1.3203125, + -0.8359375, + 0.271484375, + -0.5703125, + 0.482421875, + -0.625, + -4.625, + -4.53125, + 0.1337890625, + -0.75, + -1.25, + 0.76953125, + -1.015625, + -0.482421875, + -2.21875, + 0.8359375, + 1.75, + -3.90625, + -2.734375, + -1.875, + -0.5546875, + -0.4375, + 2.78125, + 3.046875, + -0.68359375, + 1.0859375, + 0.84765625, + -0.19921875, + 4.28125, + -1.1953125, + -1.8984375, + 1.0859375, + -0.498046875, + 1.484375, + -0.0947265625, + 0.47265625, + 2.078125, + 1.9609375, + -0.859375, + 0.72265625, + -0.1376953125, + -0.61328125, + 1.6328125, + 0.7421875, + -0.8203125, + -3.25, + -6.0625, + -0.3125, + 2.453125, + -1.1875, + -3.421875, + -1.09375, + -0.2451171875, + 0.6015625, + 2.125, + 0.353515625, + 5.0, + -0.70703125, + 1.8828125, + 4.125, + 1.0703125, + 1.578125, + 1.5, + -0.859375, + -0.84375, + 2.4375, + 0.515625, + -1.890625, + -3.359375, + 0.71875, + 0.77734375, + 0.97265625, + -2.125, + -0.025390625, + 0.890625, + -1.5234375, + 3.265625, + -0.91796875, + 0.84765625, + -1.5546875, + 1.9921875, + 0.82421875, + -2.171875, + -0.59375, + -2.25, + -2.234375, + 0.609375, + -0.19921875, + 1.1328125, + -4.21875, + -4.09375, + 3.421875, + 2.421875, + -1.3984375, + 1.28125, + -0.88671875, + 0.48046875, + -0.92578125, + -4.53125, + -0.21484375, + 2.625, + -2.0, + 1.7109375, + -3.34375, + -0.6796875, + 1.78125, + -2.40625, + -1.1875, + -0.83203125, + -0.796875, + -2.421875, + -0.58203125, + -1.9765625, + -0.8515625, + 0.859375, + -2.078125, + -0.6953125, + -3.34375, + -0.8359375, + 0.2490234375, + 0.6796875, + 4.15625, + 2.03125, + 5.53125, + -0.140625, + -2.15625, + -1.546875, + 7.625, + 3.046875, + -0.8203125, + -0.046875, + 2.078125, + 4.9375, + -1.90625, + -0.44140625, + -1.2109375, + 5.5, + 3.078125, + 2.5625, + 3.484375, + -1.3515625, + -1.125, + -0.890625, + -1.09375, + -0.0240478515625, + -0.89453125, + 2.984375, + -0.25390625, + -0.70703125, + 1.9375, + -0.33984375, + 3.25, + -1.25, + -2.625, + 0.291015625, + 1.2109375, + -2.5625, + 1.4921875, + -0.80078125, + 0.12451171875, + -0.1474609375, + -0.14453125, + 1.0859375, + 0.0556640625, + 0.494140625, + 1.34375, + 1.859375, + 0.033203125, + -0.69140625, + 0.734375, + -3.625, + -0.73046875, + 0.244140625, + 1.7890625, + -1.1015625, + -0.9453125, + 3.015625, + 0.1689453125, + -1.609375, + 0.9921875, + 1.5625, + -0.427734375, + -1.140625, + 1.515625, + 0.93359375, + 0.125, + -0.87109375, + -2.015625, + 0.8671875, + 0.1630859375, + -1.8984375, + -0.95703125, + -0.55859375, + -0.6796875, + -3.171875, + 3.1875, + 1.546875, + -2.390625, + 1.34375, + -1.0546875, + 1.140625, + -2.0, + -2.234375, + -0.91796875, + -0.365234375, + -6.96875, + -0.5703125, + 1.5390625, + 0.671875, + -0.314453125, + -2.1875, + -0.87890625, + -3.0, + -0.921875, + -2.140625, + -1.1484375, + -3.359375, + -0.87890625, + 1.5546875, + -2.546875, + -1.078125, + 1.5234375, + 1.1953125, + 3.8125, + 1.140625, + -0.34375, + 1.5, + 1.8984375, + -0.203125, + -2.125, + 0.6328125, + 1.3671875, + 0.318359375, + -0.482421875, + 0.546875, + 0.73046875, + 0.515625, + 0.44140625, + 0.484375, + -1.234375, + 1.4140625, + -1.4921875, + -2.296875, + 0.76171875, + 1.59375, + -0.2236328125, + 1.59375, + -2.21875, + 0.455078125, + 2.1875, + -0.75, + -0.95703125, + 2.640625, + 2.390625, + 2.53125, + -0.419921875, + 2.15625, + 1.3359375, + -3.90625, + -0.294921875, + -2.46875, + 0.640625, + 1.296875, + -1.359375, + -0.8359375, + 0.02294921875, + -2.375, + -1.375, + -0.98828125, + 2.671875, + -4.875, + 4.75, + 7.28125, + -1.09375, + 1.703125, + 4.0625, + -0.625, + 1.9375, + 1.7734375, + 1.1015625, + 0.3203125, + 0.90625, + -4.0625, + 1.6171875, + -1.875, + -0.78125, + -0.166015625, + -1.4453125, + -0.341796875, + 0.9765625, + -0.291015625, + 0.470703125, + -0.040283203125, + 0.80859375, + 0.640625, + -1.7734375, + -0.32421875, + -2.4375, + -0.8515625, + 0.224609375, + 0.70703125, + 1.109375, + -0.96484375, + 0.91796875, + -1.53125, + 2.03125, + 0.359375, + -1.1875, + 2.40625, + -0.8359375, + -2.9375, + 0.96875, + -1.203125, + 1.3203125, + 0.53125, + -0.96484375, + -2.953125, + 4.9375, + 0.322265625, + -1.578125, + -2.390625, + 0.71484375, + -1.09375, + -2.234375, + -1.2890625, + -1.6875, + 5.0, + -0.5859375, + -0.93359375, + 2.21875, + 1.28125, + 1.4765625, + -0.031982421875, + 1.4375, + 3.359375, + -0.6015625, + 0.06787109375, + 0.62109375, + 0.828125, + -0.23046875, + 0.828125, + -0.75390625, + 1.0234375, + -2.046875, + 0.60546875, + -0.43359375, + 0.7109375, + -2.484375, + -2.046875, + 1.1328125, + 0.8203125, + -1.8359375, + -4.90625, + -2.25, + -0.71484375, + 0.81640625, + 1.421875, + -1.2421875, + 1.3828125, + -1.03125, + -7.1875, + 0.5390625, + -1.2890625, + -4.15625, + -0.287109375, + 6.09375, + -1.203125, + 1.015625, + -0.65625, + -2.765625, + 0.48046875, + 0.96484375, + 4.125, + 1.265625, + 0.224609375, + 0.6484375, + -1.3671875, + -0.78515625, + -0.0038299560546875, + -4.0, + -0.78125, + -0.51171875, + -3.4375, + -1.765625, + 1.71875, + -1.890625, + -2.40625, + -4.84375, + -3.0625, + 1.96875, + -0.6171875, + 0.8203125, + -2.53125, + 1.125, + -1.359375, + 3.859375, + 2.640625, + -1.5703125, + -1.1015625, + -1.7265625, + 0.490234375, + 0.70703125, + 0.3125, + 1.90625, + -1.1640625, + -1.4765625, + -1.1328125, + -1.7734375, + 4.75, + 0.48828125, + -1.453125, + 0.3125, + -0.77734375, + 3.484375, + 1.1171875, + 0.74609375, + -0.28125, + -0.8671875, + 2.03125, + -1.203125, + -3.3125, + 1.0078125, + 1.40625, + -4.25, + 1.8828125, + 0.40234375, + 0.259765625, + -0.7578125, + 1.8671875, + -1.671875, + 2.71875, + 1.3046875, + 4.21875, + -1.8125, + -1.9375, + 1.9375, + 0.76953125, + 1.375, + 2.578125, + 0.291015625, + -1.9609375, + 3.328125, + -2.703125, + -2.1875, + 0.28125, + 3.40625, + -1.828125, + 0.388671875, + -0.578125, + 5.46875, + 1.921875, + -0.98046875, + -2.625, + -0.91015625, + -0.99609375, + -0.72265625, + 0.09375, + 0.953125, + 0.62109375, + -0.26953125, + 1.421875, + 0.451171875, + -2.890625, + -0.59765625, + 2.875, + 3.40625, + 0.82421875, + 0.64453125, + -0.7734375, + 7.46875, + -6.78125, + 1.3125, + -3.84375, + -2.203125, + 0.404296875, + 1.015625, + -1.7890625, + 2.84375, + 1.65625, + 3.578125, + 0.81640625, + -0.3671875, + -2.796875, + 3.515625, + 2.4375, + 1.9375, + -4.65625, + 0.9921875, + -3.734375, + 1.15625, + -2.734375, + -1.1484375, + 0.80078125, + 1.8203125, + -0.01348876953125, + 1.96875, + -0.8359375, + -0.4296875, + -0.9765625, + 1.84375, + 0.328125, + 0.2216796875, + 2.703125, + 0.044921875, + -2.625, + 0.75, + -1.3125, + 1.8515625, + 1.6328125, + 0.98046875, + -0.83203125, + -2.21875, + -0.045166015625, + 0.65625, + -0.86328125, + 1.109375, + -1.53125, + 1.6015625, + -0.455078125, + 1.3125, + 1.15625, + -0.44140625, + -1.890625, + 0.10009765625, + -2.03125, + 2.1875, + -1.328125, + 0.58984375, + 4.625, + -0.71875, + 0.197265625, + 2.1875, + 0.82421875, + -1.4375, + 3.0, + -0.076171875, + 2.859375, + -1.3359375, + 0.85546875, + -0.81640625, + 2.234375, + 1.53125, + 0.087890625, + -0.515625, + 2.84375, + -0.92578125, + -3.359375, + -2.9375, + -1.5234375, + 2.265625, + -0.341796875, + 0.8828125, + 2.515625, + -1.3515625, + 0.65234375, + -3.5625, + 2.0, + -1.8359375, + -1.078125, + 2.359375, + 1.2109375, + -0.119140625, + 1.1484375, + -2.078125, + 3.0, + -2.234375, + 1.3046875, + 0.59765625, + -0.1611328125, + -1.15625, + 0.75, + -0.31640625, + -0.20703125, + 0.9921875, + -1.09375, + 0.8671875, + -2.375, + -2.640625, + -0.74609375, + -2.171875, + 1.7734375, + 10.625, + 1.1171875, + -3.78125, + 0.035400390625, + -0.6015625, + -0.546875, + -3.671875, + -6.90625, + -0.55859375, + -2.5, + -0.94140625, + -1.984375, + 0.87109375, + -0.408203125, + -2.3125, + 0.84765625, + 1.328125, + -0.90625, + 0.2001953125, + 1.0625, + -2.109375, + 0.11328125, + 0.9609375, + -0.212890625, + -0.05810546875, + -1.484375, + 0.62890625, + 1.6796875, + -0.322265625, + 1.40625, + -0.486328125, + 1.796875, + 0.0224609375, + 0.90234375, + -0.625, + -2.453125, + -0.75, + -0.54296875, + 6.0, + 1.9609375, + 1.03125, + -0.734375, + -0.96875, + 3.140625, + -2.53125, + 0.69140625, + 2.015625, + -1.140625, + -1.0390625, + 2.265625, + -1.953125, + 0.60546875, + -4.03125, + -1.3046875, + 2.171875, + -1.078125, + 1.1953125, + 1.3125, + -0.392578125, + 0.53515625, + 0.51171875, + 2.328125, + 1.4609375, + -0.78125, + -0.443359375, + 1.9453125, + 1.71875, + 0.07421875, + 3.609375, + 0.412109375, + -1.5, + -0.361328125, + -0.85546875, + 3.1875, + 2.4375, + 0.171875, + 0.90625, + 0.2080078125, + -1.203125, + -0.40625, + -2.578125, + 2.96875, + 1.9921875, + 0.251953125, + 1.359375, + 2.453125, + 0.81640625, + -0.62109375, + -2.59375, + -0.003997802734375, + 0.6953125, + 0.70703125, + 2.046875, + 0.99609375, + -3.984375, + 1.0625, + -0.8125, + 3.671875, + 3.734375, + -1.46875, + -1.71875, + 0.5703125, + 2.859375, + 1.28125, + -0.640625, + -2.921875, + -3.296875, + 1.1328125, + -1.046875, + 0.37109375, + -0.84375, + 1.046875, + 0.30859375, + 2.078125, + 0.234375, + -0.12255859375, + -0.92578125, + 2.140625, + -0.431640625, + 0.21875, + -0.373046875, + 1.84375, + -3.234375, + 0.1904296875, + 1.140625, + 2.6875, + -1.5234375, + -3.21875, + 0.09375, + -2.34375, + -1.4140625, + 1.1015625, + 4.4375, + 0.2177734375, + -1.6015625, + 1.703125, + 2.171875, + -0.8828125, + -1.6640625, + -0.1923828125, + 1.3828125, + 0.80078125, + 2.671875, + -1.640625, + -2.0625, + 1.8359375, + 1.984375, + 0.458984375, + -1.578125, + 0.13671875, + -5.84375, + 3.171875, + 2.40625, + -1.78125, + -1.6015625, + -1.296875, + -1.6015625, + 0.72265625, + -0.08837890625, + -0.27734375, + 0.330078125, + 0.1240234375, + -4.15625, + 1.234375, + -0.392578125, + 1.609375, + -0.5078125, + 1.859375, + -1.2109375, + -3.5625, + 3.484375, + -0.47265625, + 3.65625, + -0.828125, + 0.2451171875, + 1.046875, + 1.5, + -2.328125, + -0.016357421875, + -0.419921875, + -0.404296875, + 1.40625, + 1.1171875, + 1.375, + 0.212890625, + 1.4453125, + -2.765625, + -0.11962890625, + 0.1796875, + -3.8125, + -2.140625, + 2.328125, + 1.7578125, + 0.7265625, + -0.6875, + 3.53125, + 2.234375, + 1.109375, + -1.171875, + 1.359375, + 1.109375, + -1.03125, + -5.8125, + 0.94921875, + -1.328125, + -1.40625, + -1.265625, + -0.625, + 3.5625, + 1.90625, + -2.96875, + -0.341796875, + 5.0625, + 0.53515625, + 2.359375, + -1.5546875, + 2.21875, + -0.7109375, + 0.486328125, + 1.8046875, + -2.03125, + -1.375, + -1.3125, + -2.03125, + -0.26953125, + -0.31640625, + 1.7421875, + -2.21875, + -3.71875, + -1.1953125, + 3.328125, + 0.71484375, + 0.1943359375, + 1.4765625, + -1.8671875, + 2.921875, + 0.69921875, + -1.2734375, + -0.0252685546875, + 2.703125, + 0.1943359375, + -0.00830078125, + -0.1787109375, + -1.4609375, + 1.28125, + -0.376953125, + -0.462890625, + -0.251953125, + 2.9375, + -2.953125, + 2.875, + -1.40625, + -1.234375, + 0.72265625, + -1.5546875, + -1.2109375, + -1.140625, + -2.34375, + -1.1171875, + -4.28125, + -0.953125, + -0.6875, + 7.125, + 0.4140625, + 0.765625, + 0.052734375, + -0.7421875, + 2.65625, + 1.5078125, + 0.2373046875, + -1.0625, + 0.6484375, + 3.234375, + -2.90625, + 3.5625, + 4.53125, + -1.625, + 1.84375, + 0.6015625, + 0.84375, + -0.166015625, + -0.76953125, + 4.875, + 2.234375, + -0.69140625, + -2.234375, + 0.58203125, + 0.7421875, + -1.3828125, + 0.76953125, + -1.140625, + -2.03125, + 3.75, + -2.765625, + -2.28125, + -2.75, + 0.890625, + -2.9375, + 2.65625, + 0.953125, + 1.8125, + -1.2890625, + 0.578125, + -0.0751953125, + -1.7890625, + -0.216796875, + 1.734375, + 1.6484375, + -1.515625, + -0.55078125, + -1.375, + 2.0625, + 0.78125, + -4.5, + -1.34375, + -0.29296875, + -0.67578125, + -7.0, + 1.375, + -1.953125, + 0.2265625, + -1.109375, + 1.890625, + -0.828125, + 7.28125, + 1.6953125, + 0.5390625, + 2.96875, + -2.921875, + 1.65625, + -0.0712890625, + -1.7734375, + -2.59375, + 1.7265625, + 2.703125, + 0.71875, + 0.76171875, + -3.796875, + -1.6640625, + -0.61328125, + -4.875, + -0.9453125, + 0.39453125, + 3.28125, + 4.8125, + -5.5625, + -1.421875, + 1.7421875, + -2.3125, + 0.76953125, + -1.6328125, + -1.3671875, + -3.421875, + 0.90625, + 2.6875, + -2.859375, + -3.6875, + -0.375, + 2.859375, + -1.7578125, + -0.255859375, + -2.203125, + -0.220703125, + -0.09521484375, + -2.203125, + -1.59375, + -0.220703125, + -0.310546875, + -1.8671875, + -3.359375, + -1.296875, + -0.1005859375, + 2.390625, + 0.171875, + 1.2734375, + -2.203125, + -0.93359375, + -3.25, + -1.640625, + 0.3203125, + 1.7109375, + -2.8125, + 0.37109375, + 1.1328125, + -5.78125, + -1.171875, + -1.453125, + -0.53515625, + -2.71875, + 2.796875, + 2.8125, + -0.15625, + 2.234375, + -0.87890625, + -5.875, + -0.0002460479736328125, + -1.8984375, + 4.25, + -0.9140625, + 1.5546875, + 1.890625, + -1.140625, + 0.921875, + 2.125, + 0.578125, + 3.03125, + -0.06298828125, + -1.4140625, + 0.1953125, + 4.46875, + 2.71875, + 1.9296875, + -0.83203125, + -2.6875, + 2.71875, + -1.3359375, + -2.859375, + -0.037109375, + -1.875, + 1.3984375, + 1.0078125, + 0.06396484375, + -2.359375, + -1.78125, + -0.451171875, + -0.5234375, + 2.078125, + 0.1630859375, + -0.1025390625, + -0.97265625, + 3.234375, + 1.9765625, + -1.0390625, + 1.625, + -0.408203125, + -1.046875, + -3.25, + 1.15625, + -1.3125, + -2.46875, + 3.140625, + -0.212890625, + 0.640625, + 1.796875, + -0.09765625, + 1.2421875, + 16.125, + -1.796875, + 0.11962890625, + 1.171875, + 1.5859375, + -1.6328125, + -0.10205078125, + -2.828125, + -1.796875, + -0.6796875, + 3.203125, + 2.671875, + 2.25, + 2.21875, + -0.609375, + -2.234375, + -0.8984375, + 1.5234375, + 0.1650390625, + -0.30078125, + 0.97265625, + 4.96875, + -0.515625, + -2.765625, + 0.4921875, + -0.953125, + -0.515625, + -1.5390625, + 1.2109375, + -0.3671875, + 2.390625, + -1.7734375, + -2.21875, + 2.453125, + 0.2001953125, + 3.0, + 0.79296875, + 3.90625, + 2.453125, + 4.8125, + -2.546875, + -0.0908203125, + 1.3203125, + -0.7109375, + -2.09375, + 0.69140625, + -0.8125, + -1.9375, + 0.546875, + -0.39453125, + 2.359375, + -0.1796875, + 2.25, + 2.5625, + -2.75, + -0.45703125, + 3.203125, + 5.75, + 0.42578125, + -0.71875, + -0.2138671875, + 0.66796875, + -0.48046875, + 0.96484375, + -0.337890625, + 2.4375, + -1.25, + -0.59765625, + 2.3125, + 2.609375, + -1.9609375, + -0.91796875, + -1.40625, + -0.50390625, + 0.2353515625, + -1.765625, + -2.5625, + 0.146484375, + -0.8359375, + -3.125, + -1.234375, + -0.38671875, + -0.1689453125, + -0.1533203125, + 1.734375, + 1.421875, + 1.2890625, + 0.2333984375, + 0.302734375, + 0.314453125, + 1.1484375, + 1.140625, + -0.890625, + -0.50390625, + 1.96875, + -0.46875, + -1.75, + -1.125, + -2.140625, + -2.578125, + -5.3125, + 1.5859375, + 0.3046875, + 2.15625, + 0.87890625, + -0.52734375, + -0.26171875, + -5.28125, + 0.19921875, + 0.392578125, + 0.56640625, + 1.59375, + -0.7890625, + 0.8203125, + 1.1171875, + -1.828125, + -1.5, + -0.0084228515625, + 0.53515625, + -2.125, + 0.9296875, + 3.34375, + -0.76953125, + 1.171875, + -2.5, + -2.28125, + -2.984375, + -0.498046875, + 3.328125, + 0.58203125, + 0.4375, + -1.25, + 2.21875, + -1.8828125, + -0.400390625, + 0.28515625, + 2.109375, + 3.390625, + 1.9296875, + 0.84375, + -1.6484375, + 1.203125, + -3.671875, + 2.25, + -1.578125, + 1.703125, + -0.302734375, + 1.625, + 1.0859375, + -2.375, + -0.56640625, + 2.0625, + 1.75, + 1.921875, + -1.6328125, + 1.6640625, + -2.125, + -2.671875, + 1.296875, + 0.9140625, + -1.390625, + 0.84765625, + -0.1884765625, + -0.62890625, + 1.53125, + -0.57421875, + -1.3203125, + -0.73828125, + 2.375, + 1.65625, + -0.86328125, + 1.859375, + -1.9140625, + 1.1953125, + -1.0859375, + -0.4765625, + 1.578125, + 0.23046875, + 1.7734375, + -2.046875, + 1.3359375, + -0.51953125, + 0.375, + 1.0625, + 0.31640625, + 1.796875, + -0.462890625, + -0.94140625, + 4.25, + 2.28125, + 1.8828125, + -1.5078125, + -0.58203125, + 0.482421875, + 0.1337890625, + -0.1298828125, + 2.953125, + 0.1376953125, + 0.1845703125, + 1.25, + -0.484375, + 0.0283203125, + -1.21875, + 3.796875, + 2.609375, + -2.625, + -0.1484375, + 3.34375, + -2.484375, + 3.328125, + -2.046875, + -0.734375, + -2.125, + 2.078125, + 0.76171875, + 0.98828125, + 1.9453125, + -1.6875, + -0.65625, + -1.625, + 0.55078125, + -1.5234375, + 1.90625, + 1.3359375, + -0.2216796875, + 0.1396484375, + 3.046875, + 2.796875, + -3.171875, + -1.640625, + 3.453125, + 1.1640625, + -1.15625, + 1.0859375, + -0.59375, + -3.046875, + -1.0859375, + 4.21875, + -0.08447265625, + -1.171875, + -2.4375, + 1.625, + 1.5234375, + -0.921875, + 0.3515625, + 1.390625, + 1.1953125, + -0.455078125, + 0.28125, + -1.03125, + -1.90625, + 0.734375, + 0.19921875, + -0.5234375, + -3.046875, + 2.25, + -1.7578125, + -0.53125, + -2.96875, + -0.0361328125, + 2.03125, + 0.8984375, + 2.625, + -3.078125, + -0.703125, + 1.296875, + 0.97265625, + 0.3515625, + -4.65625, + 4.125, + -2.046875, + -0.08642578125, + -1.6875, + 0.8203125, + 0.59375, + 3.53125, + 0.8203125, + -1.0078125, + 0.74609375, + 4.53125, + -1.7265625, + 3.75, + -2.09375, + 0.91015625, + 0.3203125, + -0.5, + -0.45703125, + 0.16796875, + 2.296875, + -0.6875, + -1.2734375, + 0.75, + -1.78125, + 1.53125, + 0.47265625, + 0.5078125, + 1.8125, + -4.1875, + -2.8125, + -0.2578125, + 0.73046875, + -0.447265625, + -0.453125, + -2.28125, + -3.9375, + -1.921875, + 2.203125, + 0.5546875, + -0.455078125, + -1.703125, + -2.53125, + -0.41015625, + -1.5625, + -16.25, + 1.171875, + 1.546875, + -1.734375, + 3.65625, + -0.412109375, + 1.53125, + 1.328125, + -0.1826171875, + 0.8359375, + 0.921875, + 0.126953125, + 0.130859375, + -1.625, + 4.3125, + -0.25390625, + 0.73828125, + -1.859375, + 1.671875, + 2.609375, + 0.65625, + -0.26953125, + -0.46875, + 0.69140625, + -0.1796875, + -4.21875, + 0.31640625, + 0.12890625, + -3.046875, + -0.81640625, + -0.984375, + 2.09375, + 0.37890625, + 1.953125, + -3.703125, + -1.2734375, + 1.4609375, + -1.3671875, + -1.2578125, + -0.33203125, + -0.60546875, + 0.78125, + -0.033203125, + -0.318359375, + 1.0546875, + -1.0546875, + -3.75, + -0.75390625, + -0.15234375, + 0.458984375, + 1.0625, + 2.296875, + 3.171875, + -3.3125, + -1.203125, + -0.5703125, + 1.0078125, + 3.796875, + -1.09375, + -1.1875, + 1.4296875, + 4.6875, + -1.125, + -0.04833984375, + -0.5625, + -2.015625, + -1.0625, + 2.1875, + 0.11328125, + 3.953125, + -4.15625, + 1.2578125, + 2.703125, + -2.28125, + 0.00150299072265625, + 0.3984375, + -2.765625, + -2.015625, + -2.71875, + 0.1650390625, + 1.71875, + -0.92578125, + -6.1875, + -0.81640625, + 0.66796875, + -0.828125, + -0.75390625, + 1.8828125, + -0.5, + 1.5703125, + -1.3671875, + 1.46875, + -0.59765625, + -2.015625, + -1.65625, + -1.1328125, + 2.9375, + 0.6875, + 0.3515625, + 0.024169921875, + -0.1640625, + -0.0849609375, + -2.5625, + 0.0771484375, + 1.203125, + -1.1015625, + -0.8125, + -1.53125, + -2.109375, + 5.53125, + -0.49609375, + -2.21875, + 0.2373046875, + 4.03125, + -0.419921875, + 1.125, + -0.1083984375, + 0.28125, + 1.1484375, + 1.671875, + 0.828125, + 0.478515625, + 0.94140625, + 0.5390625, + 0.67578125, + 2.125, + 0.2255859375, + -0.064453125, + -1.765625, + -0.92578125, + 0.435546875, + -0.1162109375, + -1.15625, + 2.859375, + -0.5546875, + -2.328125, + 3.078125, + -1.7890625, + 0.6796875, + -2.65625, + 3.65625, + -0.81640625, + -0.74609375, + -0.8515625, + 0.061767578125, + 0.2041015625, + -1.296875, + 3.96875, + 1.9296875, + -2.125, + 0.8828125, + 4.53125, + 1.046875, + -2.015625, + 0.27734375, + 1.2734375, + -1.65625, + 0.53125, + 2.734375, + 1.328125, + -0.75390625, + -0.87890625, + -2.734375, + -0.6171875, + 1.3203125, + 2.234375, + 6.5, + 1.6015625, + 2.671875, + -0.47265625, + 1.078125, + 0.5703125, + 0.1982421875, + -2.34375, + 0.94140625, + -0.02783203125, + -3.265625, + 0.6015625, + 1.203125, + 3.140625, + 2.421875, + -1.4765625, + -0.80078125, + -1.0859375, + 1.9765625, + 7.78125, + 0.3125, + 0.3828125, + 3.0625, + 2.15625, + 0.326171875, + -1.4921875, + 1.4453125, + 2.5625, + 1.1328125, + -3.484375, + -0.4765625, + -0.22265625, + 1.6328125, + -1.5859375, + -2.25, + 1.4296875, + 0.0201416015625, + 0.439453125, + -0.173828125, + -0.458984375, + 0.470703125, + 0.1650390625, + -0.1025390625, + -0.703125, + 1.1328125, + 1.640625, + 1.8046875, + 3.515625, + 1.8359375, + 0.26953125, + 3.28125, + -3.328125, + -0.064453125, + 1.6484375, + 0.12890625, + 1.5, + 0.40625, + -0.59375, + -1.734375, + 2.21875, + 1.6328125, + -1.46875, + 1.078125, + 4.53125, + 1.1484375, + 0.0986328125, + 3.078125, + -1.9140625, + -0.177734375, + 0.6328125, + 0.640625, + 1.0, + -1.1328125, + 3.84375, + 1.203125, + -1.296875, + 1.0703125, + 2.203125, + -3.5625, + -1.765625, + 0.94921875, + -3.078125, + -1.21875, + -3.6875, + 1.6328125, + 1.984375, + 0.328125, + 1.09375, + 2.03125, + -4.25, + -0.93359375, + 2.53125, + -0.47265625, + 1.5703125, + 1.9921875, + 0.361328125, + -0.275390625, + -2.453125, + 0.51171875, + -0.3359375, + 2.265625, + -1.8125, + -1.2890625, + -0.5234375, + -1.0546875, + 0.81640625, + 3.0, + -3.515625, + 3.015625, + -2.796875, + 0.34765625, + -1.5390625, + 0.33984375, + 2.484375, + 1.640625, + -1.7421875, + 0.6796875, + -0.875, + -0.054931640625, + 1.109375, + -0.9296875, + -2.296875, + -6.71875, + 1.28125, + 1.6015625, + 0.4296875, + 1.53125, + 0.1845703125, + 2.140625, + -2.65625, + -0.7109375, + 1.8828125, + -1.65625, + -0.216796875, + 0.263671875, + -0.40625, + 2.65625, + -0.671875, + -2.671875, + -2.34375, + 0.296875, + -4.4375, + -3.703125, + -1.3984375, + 1.578125, + -2.8125, + -1.046875, + 0.31640625, + 1.3203125, + 0.482421875, + -0.69140625, + -1.0390625, + -0.8984375, + -3.484375, + 2.953125, + -1.546875, + -0.10693359375, + -6.59375, + -1.953125, + 0.87109375, + -0.455078125, + 1.953125, + -0.91796875, + 2.40625, + -1.65625, + 0.578125, + -0.96875, + -1.265625, + -1.5234375, + -3.765625, + -1.7578125, + -1.9296875, + 0.357421875, + 0.69140625, + -1.265625, + -3.46875, + 2.015625, + 1.1640625, + -1.2890625, + 0.12451171875, + -2.25, + -4.21875, + 2.875, + -2.46875, + 0.1865234375, + -1.046875, + 0.6875, + -1.2734375, + -2.140625, + 2.59375, + 1.8125, + 1.421875, + -1.90625, + 0.96875, + 1.828125, + 0.39453125, + -0.3359375, + -1.453125, + 5.78125, + 1.609375, + -0.44921875, + 4.15625, + 3.0625, + 0.5625, + -1.0859375, + 0.56640625, + -0.828125, + -1.8828125, + 8.5, + -1.640625, + 0.59375, + -4.21875, + -0.53515625, + 0.87109375, + -1.359375, + 4.84375, + 0.625, + 0.0537109375, + 1.4140625, + -1.0859375, + 2.71875, + -1.171875, + 2.15625, + -1.1796875, + 0.50390625, + -3.921875, + 3.421875, + 1.09375, + 0.2412109375, + 1.3359375, + 2.734375, + 3.015625, + 0.08447265625, + 0.427734375, + 0.76171875, + -1.7265625, + -1.890625, + -0.08251953125, + 0.28125, + 1.6796875, + -0.8359375, + -1.4609375, + -1.5078125, + -0.5703125, + 4.40625, + -3.671875, + 0.6796875, + 2.46875, + 1.7578125, + -2.375, + 0.5234375, + -1.15625, + 1.8046875, + -2.421875, + 0.9765625, + 1.4375, + -0.12255859375, + -0.0040283203125, + -0.7265625, + -1.4140625, + 0.00811767578125, + -0.4296875, + 0.412109375, + -0.1162109375, + -1.7890625, + 1.65625, + -2.96875, + -1.40625, + 0.1357421875, + 0.2451171875, + -2.5625, + -0.9296875, + 0.17578125, + 3.09375, + 1.6171875, + -1.765625, + 3.40625, + 3.578125, + 2.609375, + 0.859375, + -1.5390625, + 0.91796875, + -2.359375, + 2.171875, + -5.75, + 0.0201416015625, + 1.7890625, + 1.859375, + -1.6796875, + 2.375, + 0.5546875, + -2.6875, + -0.06591796875, + 3.171875, + -1.875, + -1.2734375, + 1.453125, + 0.46875, + -0.234375, + -1.65625, + 1.6796875, + 0.73046875, + 0.5234375, + 1.1953125, + 3.796875, + -0.61328125, + 0.040283203125, + -1.515625, + -1.5390625, + -2.640625, + 2.0625, + 6.1875, + -0.3359375, + 1.6328125, + -1.1015625, + 0.6328125, + 0.296875, + -0.279296875, + 1.2109375, + 0.48828125, + 0.341796875, + -1.765625, + -2.3125, + 2.3125, + 1.296875, + 0.5, + -0.0517578125, + 3.1875, + -0.326171875, + 0.57421875, + -2.328125, + 0.0859375, + -0.93359375, + -2.1875, + 1.0703125, + 1.6484375, + 0.474609375, + -1.8125, + 4.09375, + -0.79296875, + -0.65234375, + -3.96875, + -1.0, + 2.78125, + 0.9609375, + 5.8125, + 0.48046875, + -2.4375, + -0.0240478515625, + -1.96875, + -1.1484375, + -0.166015625, + 0.578125, + 1.1015625, + -0.95703125, + 2.9375, + 1.71875, + 0.70703125, + -1.59375, + 0.3046875, + -4.875, + 1.5, + 1.28125, + -2.0625, + 0.345703125, + -1.25, + 0.2294921875, + 0.69140625, + 0.6640625, + -0.7734375, + -0.6875, + 0.306640625, + 1.75, + 0.23828125, + 1.8671875, + 1.1953125, + -2.4375, + 4.1875, + 2.21875, + -1.75, + -1.09375, + 4.28125, + -0.5390625, + -1.921875, + 0.11328125, + 3.015625, + 0.08642578125, + 2.21875, + 2.234375, + 1.3359375, + -0.8359375, + -0.67578125, + 2.078125, + 3.46875, + -3.3125, + -0.005828857421875, + -0.71484375, + -0.3203125, + -1.734375, + -2.5, + 2.5, + -5.5625, + 0.01611328125, + 0.66015625, + 0.6796875, + 0.98828125, + -1.5, + -0.81640625, + -4.3125, + -0.26953125, + -2.375, + 1.90625, + 0.9140625, + 2.859375, + -0.68359375, + 0.130859375, + -2.671875, + 1.15625, + -1.6484375, + -0.77734375, + -3.71875, + -0.58203125, + -1.21875, + 0.2236328125, + -3.25, + -0.30078125, + 2.359375, + 0.306640625, + -2.21875, + 0.263671875, + -0.228515625, + -2.4375, + 1.7421875, + -0.61328125, + 0.453125, + 5.03125, + 0.1396484375, + 1.15625, + 0.26171875, + -0.455078125, + -2.796875, + 0.6640625, + 1.375, + -0.044189453125, + 0.7421875, + -0.65234375, + -0.421875, + -0.0155029296875, + 0.8828125, + 0.283203125, + -1.1484375, + 1.0, + -0.32421875, + -0.8515625, + 0.546875, + 1.3125, + -0.423828125, + 3.46875, + 0.765625, + -1.6953125, + 1.265625, + -1.109375, + 1.1875, + -0.275390625, + 0.69921875, + -2.234375, + 2.046875, + 2.90625, + 0.5390625, + -0.5703125, + -0.51953125, + 0.1552734375, + -0.53515625, + 3.6875, + 0.484375, + 3.5625, + -0.66796875, + -0.11083984375, + 0.73046875, + -0.04833984375, + -1.3359375, + -0.671875, + 0.61328125, + 1.0078125, + 0.337890625, + -0.74609375, + -0.703125, + -0.1650390625, + -1.9296875, + -0.94921875, + 0.5625, + 0.90625, + -2.96875, + 1.6640625, + 4.28125, + -0.1982421875, + 1.484375, + 1.6484375, + -0.8359375, + 1.546875, + 0.84375, + 2.109375, + 2.046875, + 3.90625, + -0.96875, + -1.5, + 1.4375, + 0.76953125, + 2.75, + -2.40625, + -1.5546875, + -2.3125, + -2.25, + -2.0625, + 2.578125, + 0.248046875, + -0.03271484375, + 1.4375, + -0.60546875, + 1.7421875, + -2.234375, + 3.203125, + 0.09521484375, + -0.6953125, + 0.8828125, + 1.125, + -2.453125, + 0.9921875, + 0.11328125, + 0.79296875, + 2.328125, + 0.8515625, + 1.84375, + 0.81640625, + -2.484375, + 1.5859375, + 2.875, + -1.6953125, + -1.921875, + -2.375, + 0.828125, + 1.890625, + 0.38671875, + -2.5, + 6.40625, + -1.046875, + -2.796875, + -0.396484375, + -0.53515625, + 0.2890625, + -1.390625, + 6.46875, + -1.6875, + 0.53125, + 3.09375, + -0.294921875, + 1.140625, + 0.38671875, + -2.328125, + -1.1015625, + 3.15625, + 0.283203125, + 1.40625, + 0.5078125, + 1.125, + 0.52734375, + 0.158203125, + 0.6875, + -0.99609375, + 1.3203125, + -6.84375, + 1.3984375, + -1.140625, + 0.91015625, + -0.466796875, + 1.671875, + 0.427734375, + -1.6640625, + -0.54296875, + 3.5625, + 1.7578125, + 0.88671875, + -2.515625, + 3.0, + 1.6015625, + -1.0703125, + -0.93359375, + 2.5, + -0.83203125, + 1.15625, + -1.9453125, + 0.39453125, + 0.2734375, + -3.671875, + -2.015625, + 3.4375, + 0.2021484375, + -0.70703125, + 2.03125, + -0.130859375, + 0.1796875, + -4.625, + -1.796875, + 1.671875, + 1.1796875, + -7.4375, + 0.87109375, + 3.421875, + 0.21875, + 2.78125, + -0.5390625, + -5.125, + 0.2421875, + -1.5859375, + -3.84375, + 1.1015625, + 0.78125, + 0.0, + 2.265625, + 2.25, + -2.1875, + -1.7734375, + 2.65625, + 1.4453125, + 0.17578125, + -3.453125, + -2.859375, + 1.359375, + -1.3125, + -0.341796875, + 3.265625, + -2.578125, + -1.2265625, + -1.0390625, + -0.50390625, + -1.1640625, + 3.75, + 1.3671875, + 0.376953125, + 0.134765625, + 0.20703125, + -1.171875, + -5.75, + -1.2421875, + -0.6015625, + 0.9375, + 0.455078125, + 0.8671875, + 6.875, + -0.3671875, + -1.1328125, + 0.61328125, + 0.6484375, + -2.078125, + -0.453125, + -0.890625, + 0.2490234375, + 2.125, + -1.390625, + 0.1455078125, + 4.0625, + 0.60546875, + 9.25, + -1.2421875, + -4.5625, + -0.6171875, + 0.55859375, + 0.06591796875, + 1.265625, + -4.78125, + -0.0081787109375, + 5.5, + -0.25, + 3.625, + -1.40625, + 0.9921875, + 0.953125, + -0.2314453125, + -1.734375, + 2.65625, + 0.388671875, + -3.25, + -0.52734375, + -1.859375, + -1.0, + -0.298828125, + 1.5, + 2.234375, + -1.5703125, + -1.7734375, + -0.51171875, + -2.109375, + 0.158203125, + 0.15234375, + 2.09375, + -0.2431640625, + -1.7734375, + 1.2421875, + 0.42578125, + 5.09375, + 3.140625, + -2.140625, + -1.9375, + -1.3359375, + 6.21875, + 0.46875, + 5.15625, + -1.1875, + 1.6328125, + 0.75, + 1.5, + 0.47265625, + 2.53125, + 2.34375, + 1.125, + -2.15625, + -0.267578125, + 3.046875, + 1.6015625, + -0.69921875, + -0.255859375, + -3.296875, + 0.326171875, + -0.0179443359375, + -3.9375, + 0.8828125, + -0.6171875, + 2.859375, + 0.2578125, + 1.046875, + -2.421875, + -0.52734375, + 1.078125, + 1.421875, + 1.1875, + -0.72265625, + -0.3515625, + 1.0703125, + 2.34375, + 0.89453125, + -0.91015625, + 1.4375, + 0.0634765625, + -1.6875, + 0.55078125, + 1.6796875, + 2.375, + 1.109375, + 0.56640625, + -1.3125, + -0.6328125, + -3.09375, + 1.171875, + 1.125, + 0.4609375, + 2.59375, + 0.734375, + 1.109375, + 4.40625, + -0.9296875, + -1.015625, + 1.2578125, + -0.9453125, + -0.458984375, + -1.234375, + -2.484375, + 2.03125, + 1.3203125, + 1.2734375, + -0.69140625, + -1.1640625, + -1.8671875, + -1.4140625, + -0.6171875, + 1.578125, + -0.55859375, + 1.296875, + 1.1796875, + -1.3515625, + 3.640625, + -0.82421875, + -0.640625, + -1.734375, + -3.625, + -2.65625, + 1.8046875, + -4.46875, + -0.8359375, + 1.6953125, + 1.8984375, + -2.890625, + 2.296875, + 2.734375, + -1.3671875, + 1.265625, + -0.9765625, + -1.796875, + 3.078125, + -1.234375, + 2.125, + 1.640625, + -8.8125, + -1.9765625, + -0.5703125, + 0.73828125, + -3.5625, + 1.7734375, + -1.125, + -1.2265625, + -0.69140625, + -1.578125, + -4.375, + -0.0419921875, + 1.6796875, + 1.3125, + 1.453125, + 3.375, + 1.09375, + -1.671875, + -1.109375, + 0.3984375, + -0.3203125, + 0.380859375, + 0.007110595703125, + 2.3125, + 1.421875, + 1.0234375, + 0.478515625, + 3.640625, + -2.59375, + 0.458984375, + -2.796875, + 1.0703125, + -4.25, + -2.09375, + -2.5, + 1.578125, + -1.53125, + -2.046875, + 0.82421875, + 0.78125, + 0.36328125, + 1.4765625, + 0.0849609375, + -1.8203125, + -0.640625, + 1.7734375, + -0.9765625, + -1.1171875, + 0.71484375, + 2.953125, + -1.8984375, + -0.98828125, + 0.072265625, + -3.375, + 2.203125, + 4.53125, + -3.3125, + 2.171875, + 4.375, + 0.033203125, + 2.765625, + 0.2890625, + 1.4140625, + 1.1953125, + 2.71875, + 2.609375, + -1.1640625, + 3.859375, + 1.1484375, + 2.46875, + 0.51171875, + -3.828125, + -2.375, + -0.7890625, + 1.0703125, + 0.78515625, + 2.609375, + -0.984375, + -0.890625, + -3.625, + -4.84375, + 0.55078125, + -4.875, + -1.078125, + 0.109375, + 1.6640625, + -0.466796875, + -3.140625, + -0.169921875, + -1.1640625, + -0.875, + -1.21875, + 2.296875, + -3.34375, + 2.765625, + 0.032470703125, + 0.6328125, + 1.5546875, + -2.9375, + -1.4609375, + -0.9921875, + 2.140625, + 0.318359375, + -0.70703125, + -0.546875, + -0.42578125, + -1.1796875, + -1.34375, + 2.40625, + 3.59375, + 0.5078125, + 0.58984375, + -1.15625, + 1.3359375, + -0.90234375, + -2.46875, + -2.3125, + -3.890625, + 3.890625, + -0.72265625, + -2.140625, + -3.125, + 0.138671875, + -1.5703125, + 1.609375, + -2.15625, + -1.3515625, + -2.203125, + -0.71484375, + 1.5390625, + 1.578125, + -2.03125, + -1.78125, + -1.7421875, + -3.109375, + -1.578125, + 1.5390625, + -2.375, + -0.91015625, + -0.10888671875, + 0.8125, + 0.2177734375, + -3.046875, + -2.03125, + -1.921875, + -0.7265625, + 1.7421875, + 0.373046875, + -1.109375, + 1.203125, + 0.890625, + 3.03125, + 1.890625, + -2.109375, + -1.2265625, + 0.8359375, + 1.28125, + 2.453125, + -1.1875, + 0.224609375, + 3.9375, + -1.953125, + 0.0031280517578125, + -1.3203125, + -0.53125, + -0.365234375, + 2.3125, + -0.2275390625, + 1.1875, + -0.7109375, + 3.4375, + -2.765625, + 0.33984375, + -1.265625, + -3.3125, + -0.04052734375, + 0.310546875, + 2.0, + -1.171875, + 3.171875, + -4.40625, + 2.640625, + -0.2080078125, + 1.0625, + 1.1953125, + 5.75, + -0.6015625, + 0.57421875, + 2.171875, + -0.50390625, + -0.61328125, + -0.828125, + 1.140625, + -1.75, + -2.015625, + -3.734375, + -0.13671875, + -1.0078125, + 1.3671875, + 4.34375, + -2.265625, + 0.01348876953125, + 2.0625, + -1.109375, + 0.55078125, + -2.65625, + -1.4453125, + -0.08935546875, + -0.205078125, + -0.8828125, + 2.015625, + -2.625, + -3.515625, + 0.73828125, + 0.6875, + 1.40625, + -1.328125, + -1.0546875, + -0.3203125, + 1.2890625, + -2.8125, + 0.373046875, + 0.984375, + 2.859375, + 0.75390625, + -1.640625, + -1.7578125, + 2.078125, + -0.52734375, + 1.3203125, + 0.0079345703125, + -3.296875, + -0.078125, + -1.0078125, + 2.6875, + -1.265625, + 2.515625, + 1.8828125, + -3.25, + -0.6015625, + -1.5859375, + -0.06298828125, + 1.4765625, + 0.56640625, + 1.8828125, + -0.05908203125, + -0.024169921875, + 3.71875, + 0.16796875, + -4.375, + -0.58984375, + -1.6796875, + -3.734375, + -1.796875, + 0.71875, + 2.84375, + 0.453125, + -1.8203125, + -2.6875, + -0.267578125, + 1.421875, + -2.78125, + 0.1513671875, + 1.8359375, + 0.81640625, + 2.75, + -0.671875, + -1.1015625, + 0.671875, + -0.73828125, + -0.9765625, + 0.70703125, + -1.1875, + 0.08203125, + -5.21875, + -3.734375, + -0.57421875, + 0.828125, + 2.8125, + 1.5390625, + -1.9296875, + 0.318359375, + -0.75, + 2.921875, + 0.62109375, + 1.140625, + 0.232421875, + -1.296875, + 2.453125, + -2.46875, + 1.84375, + -1.2109375, + -2.09375, + -2.859375, + 0.90625, + -0.11279296875, + -0.546875, + -0.03271484375, + 1.0234375, + -0.51953125, + -0.51953125, + -11.0, + -0.2265625, + -1.953125, + 1.1640625, + 3.9375, + 2.375, + -1.140625, + -1.2109375, + -0.36328125, + -0.08837890625, + -1.1484375, + 0.65234375, + -0.392578125, + 2.859375, + 0.6875, + 1.046875, + 1.90625, + -0.051025390625, + 1.359375, + 0.38671875, + -0.53125, + 2.3125, + -2.03125, + 1.796875, + -0.56640625, + -1.8515625, + -6.0, + 2.390625, + -1.46875, + 1.3671875, + 0.9375, + -1.0625, + -3.796875, + 1.7421875, + -0.71484375, + -0.498046875, + -0.9765625, + -0.04931640625, + 0.921875, + 2.40625, + 1.0859375, + 2.65625, + -3.765625, + 1.0390625, + -1.015625, + -0.33203125, + 1.2265625, + -1.703125, + -3.359375, + -3.03125, + -3.453125, + 0.8515625, + 0.91796875, + 0.400390625, + 0.0301513671875, + 1.8828125, + 0.8671875, + 3.828125, + 0.1591796875, + 3.78125, + -2.453125, + 1.8125, + -0.34765625, + 3.59375, + -0.1220703125, + 1.34375, + -0.1787109375, + 2.484375, + -1.265625, + -1.5390625, + -0.228515625, + -5.34375, + 1.078125, + 2.3125, + -0.44921875, + -0.90625, + 0.09326171875, + -1.3828125, + -0.96875, + 2.546875, + 0.609375, + -1.5625, + 2.53125, + -0.478515625, + -0.6875, + -1.6875, + -0.953125, + 1.3515625, + 1.7578125, + 1.265625, + 0.7109375, + 1.734375, + 2.015625, + -0.5859375, + -1.390625, + 0.25, + -0.6796875, + 0.49609375, + 2.25, + 0.0947265625, + 1.09375, + -4.15625, + -0.431640625, + 0.765625, + -0.58203125, + -1.3515625, + -3.625, + -0.1123046875, + 3.046875, + -0.48046875, + -1.4375, + 0.94140625, + 3.578125, + -1.4609375, + 0.1279296875, + -1.234375, + -1.84375, + -0.0037384033203125, + -3.125, + -0.53125, + -0.1826171875, + -0.921875, + 1.65625, + -2.46875, + 2.390625, + -0.1728515625, + 3.25, + 0.5390625, + 1.203125, + -3.546875, + 1.953125, + -3.71875, + -0.1572265625, + 3.59375, + -0.043212890625, + 1.3671875, + 1.25, + 0.25390625, + 1.109375, + 1.640625, + -1.1015625, + 4.34375, + -2.828125, + 0.71484375, + 1.1484375, + -1.3828125, + 3.359375, + 0.48046875, + -0.921875, + -0.5859375, + 1.578125, + 0.490234375, + -0.4765625, + 1.8359375, + -1.1328125, + 3.765625, + -0.484375, + -0.37109375, + 1.4609375, + -0.392578125, + 0.80078125, + 1.9375, + 2.1875, + -1.46875, + 0.703125, + 1.65625, + 2.296875, + -0.7265625, + 1.5234375, + 1.078125, + -4.90625, + -2.546875, + 0.8828125, + -1.890625, + 0.09423828125, + -1.828125, + -1.7734375, + -2.421875, + -2.171875, + -1.2890625, + -0.9609375, + 3.0, + 1.65625, + 0.306640625, + -0.97265625, + 1.25, + 1.703125, + -0.9453125, + 0.59375, + 1.3046875, + -2.96875, + 0.283203125, + -1.1640625, + 0.1689453125, + -2.515625, + 2.15625, + -3.6875, + -0.443359375, + -0.98046875, + -1.65625, + -3.8125, + 0.427734375, + -1.90625, + 8.0625, + 1.1171875, + -0.1337890625, + -0.380859375, + -0.72265625, + 0.21484375, + -1.5, + -0.11279296875, + -1.953125, + -0.08935546875, + -0.578125, + 1.4453125, + -1.1171875, + 1.3984375, + 0.6640625, + 2.1875, + -2.328125, + -3.765625, + -5.34375, + -2.5, + 0.71875, + -0.00799560546875, + 2.046875, + 2.75, + -0.65625, + 0.484375, + -6.78125, + 0.51171875, + -0.95703125, + -1.4296875, + 0.328125, + 1.984375, + 0.04736328125, + 0.003997802734375, + 1.421875, + -0.330078125, + 1.453125, + -3.515625, + -2.65625, + 1.0234375, + 2.234375, + 0.1650390625, + 1.96875, + 0.1279296875, + 5.09375, + 0.2041015625, + -4.40625, + -1.71875, + -3.046875, + -1.0390625, + 1.9765625, + 2.03125, + 1.265625, + 0.140625, + 0.404296875, + 2.359375, + -1.0390625, + 3.328125, + -0.7265625, + -2.46875, + -3.1875, + 0.2138671875, + -2.28125, + 0.08544921875, + 1.21875, + -0.6796875, + 0.423828125, + -3.109375, + -1.1484375, + -2.109375, + 1.4765625, + 1.859375, + -0.1376953125, + -0.91015625, + -3.484375, + -1.0546875, + 2.234375, + -0.640625, + -1.4296875, + -1.8125, + -1.4296875, + -1.09375, + 2.90625, + 1.6953125, + 0.1455078125, + -2.09375, + 0.26953125, + -2.890625, + 1.4296875, + 1.015625, + 1.6875, + 4.15625, + 2.296875, + 0.71875, + -1.15625, + -0.2275390625, + 1.328125, + -1.6640625, + -1.8203125, + -0.470703125, + -0.65625, + 3.53125, + 2.53125, + 1.359375, + -0.443359375, + -0.53125, + -1.140625, + -2.859375, + 3.4375, + -0.90234375, + 0.640625, + 0.0242919921875, + 3.203125, + -0.1611328125, + 1.515625, + 1.78125, + 3.859375, + 1.484375, + -0.70703125, + -3.125, + -1.6640625, + 0.314453125, + 3.359375, + -0.96875, + -0.0081787109375, + 0.9296875, + -2.140625, + 0.7734375, + 0.39453125, + 2.328125, + -0.85546875, + -2.609375, + 1.0703125, + 2.875, + -1.640625, + -1.5703125, + 2.46875, + -0.546875, + -1.5234375, + 2.5, + 2.6875, + -2.59375, + 1.9765625, + -3.34375, + 1.8984375, + 1.1640625, + -1.484375, + -0.451171875, + 0.2578125, + -1.0859375, + 0.0233154296875, + -0.142578125, + 0.0079345703125, + 0.9921875, + -5.625, + 1.015625, + -1.96875, + -0.01318359375, + 0.181640625, + -0.119140625, + -0.6484375, + 1.3359375, + 0.48046875, + -0.047119140625, + 0.498046875, + 0.5625, + 0.9296875, + 1.8046875, + -1.375, + -0.390625, + 0.703125, + 0.703125, + 0.1904296875, + -2.109375, + 2.71875, + 1.7421875, + -1.046875, + -1.46875, + 0.0206298828125, + 1.71875, + 2.171875, + -0.6953125, + -2.625, + -1.5390625, + -1.078125, + -1.15625, + 0.7890625, + -1.2578125, + -0.059326171875, + 0.032470703125, + 0.71484375, + -1.96875, + -0.6953125, + 0.57421875, + 1.203125, + -0.26171875, + 1.859375, + 0.380859375, + -3.890625, + 1.578125, + 0.65234375, + 3.1875, + -1.78125, + 1.078125, + -0.51953125, + -4.90625, + -0.70703125, + 2.859375, + 0.023681640625, + -1.671875, + -0.90234375, + 0.59375, + 3.671875, + -1.8984375, + -1.5078125, + -3.421875, + 3.5625, + -1.59375, + 1.4140625, + 0.1650390625, + 1.2109375, + 2.125, + -0.439453125, + -1.1015625, + -0.7109375, + 0.77734375, + 2.71875, + 2.3125, + 0.56640625, + -0.85546875, + -0.4296875, + -4.4375, + -0.59375, + 0.640625, + 1.7421875, + -2.484375, + -1.7890625, + -0.267578125, + 2.46875, + -0.3359375, + -2.875, + -0.92578125, + 1.0234375, + 0.06298828125, + 0.1416015625, + 2.46875, + 4.28125, + 4.0625, + 12.125, + -0.08154296875, + -0.271484375, + -0.0159912109375, + -0.66796875, + 0.1103515625, + -0.1337890625, + 1.3984375, + -0.66015625, + 0.84765625, + -0.87109375, + -1.5234375, + 0.71484375, + -2.453125, + -1.5078125, + -0.451171875, + 1.1875, + -0.5234375, + 2.125, + 0.1259765625, + -1.6875, + -0.279296875, + -0.0225830078125, + 1.6328125, + -4.1875, + 2.765625, + 1.390625, + 1.78125, + -1.1484375, + 2.015625, + -1.234375, + 0.26953125, + -1.5546875, + -0.30859375, + 1.3203125, + -0.953125, + -0.34375, + -0.1982421875, + 1.625, + 2.75, + -0.0164794921875, + 1.3515625, + -2.71875, + 1.3203125, + 0.796875, + 0.97265625, + 0.72265625, + 0.61328125, + 1.15625, + 4.375, + -3.484375, + 0.5546875, + 0.458984375, + 2.0625, + 1.0546875, + 0.7265625, + -3.40625, + -0.00176239013671875, + 0.380859375, + -0.1806640625, + 0.5234375, + -3.578125, + -2.40625, + -1.21875, + 0.890625, + -0.216796875, + 3.734375, + 1.25, + 0.73828125, + 1.3359375, + -0.2421875, + -2.265625, + 0.55078125, + 1.1953125, + -0.44921875, + 0.74609375, + 1.8359375, + 2.453125, + 1.0078125, + 2.609375, + 0.71875, + -0.306640625, + -1.796875, + -2.15625, + 0.392578125, + 1.2734375, + 0.123046875, + -1.203125, + -0.58203125, + -1.859375, + -0.478515625, + -0.6796875, + -0.039306640625, + -1.5703125, + 0.306640625, + 2.875, + 1.328125, + -0.2109375, + 0.0966796875, + 2.875, + -0.70703125, + -0.6015625, + 0.296875, + 1.8515625, + 0.2216796875, + 2.53125, + -1.5078125, + 0.2158203125, + 1.671875, + 0.859375, + 2.046875, + 0.75390625, + -0.71484375, + 1.4765625, + 3.734375, + 0.58203125, + 8.3125, + -1.609375, + -1.0078125, + 0.490234375, + -1.4296875, + -1.5078125, + -1.6171875, + 0.38671875, + 0.349609375, + -2.75, + -0.251953125, + -0.142578125, + -1.6171875, + -6.03125, + -0.5078125, + -1.109375, + 2.609375, + -0.134765625, + -0.33203125, + -0.59765625, + 0.68359375, + -2.71875, + 4.3125, + -2.234375, + -1.8125, + -3.875, + -1.84375, + 1.7109375, + -3.46875, + 0.439453125, + 1.671875, + 3.828125, + 0.296875, + 0.95703125, + -0.96875, + 1.1875, + -1.8359375, + -2.265625, + 0.81640625, + 1.75, + 0.65234375, + -2.359375, + -2.28125, + -1.65625, + -8.625, + 1.8828125, + -1.0546875, + -1.4453125, + -1.6796875, + -1.234375, + 3.609375, + 0.99609375, + -1.5078125, + 2.171875, + 0.23828125, + -0.0869140625, + 0.4375, + 0.1865234375, + 0.0164794921875 + ], + "index": 0, + "object": "embedding", + "raw_output": null + } + ], + "model": "accounts/fireworks/models/qwen3-embedding-8b", + "object": "list", + "usage": { + "prompt_tokens": 9, + "total_tokens": 9, + "completion_tokens": 0 + }, + "perf_metrics": null + } + }, + "is_streaming": false + } +} diff --git a/tests/integration/recordings/responses/8e5912c90491.json b/tests/integration/recordings/responses/e4cee6b71b0e.json similarity index 94% rename from tests/integration/recordings/responses/8e5912c90491.json rename to tests/integration/recordings/responses/e4cee6b71b0e.json index f0e4ba93e..2fd58eb5f 100644 --- a/tests/integration/recordings/responses/8e5912c90491.json +++ b/tests/integration/recordings/responses/e4cee6b71b0e.json @@ -15,7 +15,7 @@ "content": "Call get_boiling_point_with_metadata tool and answer What is the boiling point of polyjuice?" } ], - "max_tokens": 0, + "max_tokens": 512, "stream": true, "temperature": 0.0001, "tool_choice": "auto", @@ -55,7 +55,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-942", + "id": "chatcmpl-293", "choices": [ { "delta": { @@ -66,7 +66,7 @@ "tool_calls": [ { "index": 0, - "id": "call_rwvmhoza", + "id": "call_e17msgo0", "function": { "arguments": "{\"celcius\":false,\"liquid_name\":\"polyjuice\"}", "name": "get_boiling_point_with_metadata" @@ -80,7 +80,7 @@ "logprobs": null } ], - "created": 1759368464, + "created": 1759427030, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -91,7 +91,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-942", + "id": "chatcmpl-293", "choices": [ { "delta": { @@ -106,7 +106,7 @@ "logprobs": null } ], - "created": 1759368464, + "created": 1759427030, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, diff --git a/tests/integration/recordings/responses/e871b8007b8c.json b/tests/integration/recordings/responses/e871b8007b8c.json new file mode 100644 index 000000000..71806138b --- /dev/null +++ b/tests/integration/recordings/responses/e871b8007b8c.json @@ -0,0 +1,389 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "Call get_boiling_point tool and answer What is the boiling point of polyjuice?" + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_8rf1aax7", + "type": "function", + "function": { + "name": "get_boiling_point", + "arguments": "{\"celcius\": null, \"liquid_name\": \"polyjuice\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_8rf1aax7", + "content": "-212" + } + ], + "max_tokens": 512, + "stream": true, + "temperature": 0.0001, + "tool_choice": "auto", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius", + "default": true + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-126", + "choices": [ + { + "delta": { + "content": "The", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427029, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-126", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427029, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-126", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427029, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-126", + "choices": [ + { + "delta": { + "content": " of", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427029, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-126", + "choices": [ + { + "delta": { + "content": " poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427029, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-126", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427029, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-126", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427029, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-126", + "choices": [ + { + "delta": { + "content": " is", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427029, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-126", + "choices": [ + { + "delta": { + "content": " -", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427029, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-126", + "choices": [ + { + "delta": { + "content": "212", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427029, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-126", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427029, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-126", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759427029, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/f389f5cdf583.json b/tests/integration/recordings/responses/f389f5cdf583.json new file mode 100644 index 000000000..bdee5ab1f --- /dev/null +++ b/tests/integration/recordings/responses/f389f5cdf583.json @@ -0,0 +1,4137 @@ +{ + "request": { + "method": "POST", + "url": "https://api.fireworks.ai/inference/v1/v1/embeddings", + "headers": {}, + "body": { + "model": "accounts/fireworks/models/qwen3-embedding-8b", + "input": [ + "Python programming language" + ], + "encoding_format": "float" + }, + "endpoint": "/v1/embeddings", + "model": "accounts/fireworks/models/qwen3-embedding-8b" + }, + "response": { + "body": { + "__type__": "openai.types.create_embedding_response.CreateEmbeddingResponse", + "__data__": { + "data": [ + { + "embedding": [ + 1.4765625, + 3.96875, + -3.21875, + -4.21875, + 3.90625, + -4.78125, + -2.78125, + 3.4375, + 3.609375, + 1.15625, + -3.828125, + 3.46875, + 6.84375, + 0.83203125, + 8.875, + 3.453125, + 4.71875, + -1.6875, + 0.62890625, + -0.447265625, + -5.34375, + 4.5625, + 5.59375, + 3.796875, + -0.1953125, + 5.0625, + -5.0625, + 0.447265625, + 1.6328125, + -0.0133056640625, + -3.1875, + 2.8125, + 2.15625, + 6.15625, + -4.5, + 1.828125, + -1.0234375, + -1.921875, + 0.2578125, + 3.28125, + -2.234375, + -0.78125, + -1.7734375, + 1.2421875, + -0.75390625, + -2.28125, + -1.6875, + -1.3046875, + 0.248046875, + 0.1591796875, + -3.421875, + -0.84765625, + 0.162109375, + -1.0546875, + -2.078125, + 1.28125, + -0.404296875, + -1.015625, + -3.828125, + -2.234375, + -2.328125, + 0.314453125, + 3.578125, + -1.78125, + -1.2265625, + 0.9609375, + 0.470703125, + 1.6640625, + 1.953125, + 0.0301513671875, + -0.65234375, + 1.453125, + 1.859375, + -1.1328125, + -3.234375, + -0.29296875, + -2.875, + -3.890625, + 0.3671875, + 2.796875, + 0.373046875, + -1.5625, + -2.203125, + -1.4375, + -2.21875, + -0.98828125, + -1.421875, + -0.64453125, + -0.380859375, + -2.953125, + -1.5546875, + 2.015625, + -0.58203125, + 1.65625, + -1.2890625, + -4.5, + -0.68359375, + -0.58203125, + -0.64453125, + 2.4375, + 4.125, + 3.0, + -0.392578125, + 3.34375, + 1.28125, + 1.3984375, + 1.3359375, + -0.6171875, + -0.73828125, + -0.80078125, + 1.6328125, + 3.125, + 3.84375, + 0.271484375, + 0.578125, + -2.328125, + -1.0, + -1.265625, + 1.3671875, + -1.1640625, + 0.439453125, + -1.703125, + 0.875, + 0.07763671875, + 3.28125, + -3.859375, + -1.109375, + -0.39453125, + -5.0625, + -1.0078125, + -0.10595703125, + -0.2314453125, + -3.046875, + -3.34375, + -2.03125, + 0.1396484375, + 0.396484375, + -1.859375, + -3.890625, + -0.5625, + -1.1171875, + 0.5625, + 3.359375, + -0.4921875, + 1.4453125, + 0.73828125, + -2.015625, + 0.5703125, + 0.248046875, + 0.251953125, + 1.15625, + -1.515625, + -0.498046875, + -0.69921875, + -0.419921875, + -0.72265625, + -0.53515625, + 1.109375, + 2.1875, + 0.2353515625, + -1.4921875, + 0.1845703125, + 1.9765625, + 1.7265625, + 2.203125, + -1.328125, + 3.703125, + -2.21875, + 0.17578125, + -0.5703125, + 3.09375, + 0.0208740234375, + -2.671875, + 3.90625, + 0.91015625, + 1.21875, + 1.421875, + -0.625, + 0.79296875, + 2.15625, + 1.4140625, + 3.21875, + -1.8515625, + 2.5625, + 0.64453125, + -0.62109375, + -3.328125, + 1.9140625, + 1.34375, + 2.90625, + 0.0673828125, + 0.236328125, + 0.97265625, + 1.640625, + 1.1953125, + 0.57421875, + -0.267578125, + -2.296875, + 1.328125, + -0.0888671875, + 3.859375, + -2.46875, + 4.03125, + -0.1982421875, + 0.96484375, + -2.296875, + -1.3125, + 0.76953125, + -0.33203125, + -4.125, + 2.625, + -1.3203125, + 0.9765625, + 3.265625, + 0.65625, + 3.15625, + -0.341796875, + 0.671875, + -1.96875, + 0.8828125, + 1.234375, + 2.84375, + -1.703125, + 1.34375, + -1.96875, + -3.875, + -0.09912109375, + 1.171875, + 2.171875, + -4.5625, + 3.625, + -5.09375, + -0.59375, + -0.7890625, + 5.125, + -3.421875, + -2.265625, + -0.390625, + -0.412109375, + -0.248046875, + -2.5625, + -1.5234375, + 0.06103515625, + -1.359375, + -2.625, + 0.173828125, + 5.46875, + 1.296875, + 3.46875, + 0.89453125, + -1.765625, + -1.5078125, + -0.80078125, + 0.5234375, + 1.6171875, + -0.68359375, + -1.984375, + -1.1953125, + 0.87890625, + 0.421875, + -0.1953125, + -0.37890625, + -0.6015625, + -0.6640625, + -2.375, + -1.015625, + -1.4140625, + -2.734375, + -0.08740234375, + 0.6796875, + 0.384765625, + -0.84375, + -0.396484375, + 0.12158203125, + 0.28125, + 2.046875, + 2.3125, + -1.4765625, + -2.359375, + -0.5078125, + -0.310546875, + 1.90625, + 2.640625, + -1.03125, + -1.875, + -2.78125, + 0.92578125, + 3.515625, + 0.390625, + 0.57421875, + -1.453125, + -0.09912109375, + 0.365234375, + 2.890625, + -1.703125, + -2.515625, + -4.5625, + -2.46875, + 1.8515625, + 2.40625, + 0.609375, + -0.359375, + 2.28125, + 1.34375, + -2.90625, + 1.84375, + -1.5390625, + 0.921875, + -1.5234375, + -2.09375, + -4.34375, + 2.09375, + -0.99609375, + -1.1875, + 2.421875, + -1.4609375, + 0.8125, + 1.9609375, + -2.84375, + 0.142578125, + -3.125, + 0.90234375, + -0.4609375, + 1.3984375, + -2.46875, + 1.6015625, + -0.7265625, + -0.376953125, + -1.890625, + 0.458984375, + -1.1015625, + -2.78125, + -1.7890625, + 3.53125, + -2.28125, + 0.96875, + 2.671875, + -4.1875, + 2.859375, + 0.08349609375, + -3.71875, + -1.953125, + 4.15625, + -0.349609375, + -0.8671875, + 0.875, + 0.65234375, + 0.89453125, + -3.453125, + 2.5, + -1.671875, + 3.0625, + 1.234375, + -1.5, + 0.67578125, + -1.0859375, + -0.443359375, + 1.640625, + 3.515625, + 1.140625, + -0.94921875, + 0.77734375, + -0.77734375, + -2.21875, + -1.3046875, + 2.875, + 2.40625, + -1.7890625, + -2.921875, + -1.125, + -1.265625, + 3.796875, + -0.0859375, + 0.12451171875, + 0.87109375, + 1.3828125, + 0.0830078125, + 1.3671875, + 2.359375, + 2.421875, + 1.6796875, + -6.09375, + 0.92578125, + -2.796875, + -0.466796875, + -0.283203125, + -1.0546875, + -0.8828125, + -1.1484375, + -3.0625, + -5.1875, + -1.46875, + 0.7734375, + -1.421875, + -2.65625, + 0.3984375, + -0.76171875, + 0.49609375, + -0.2734375, + 0.70703125, + 3.796875, + -0.1845703125, + -0.8359375, + 1.6875, + 2.6875, + 2.0625, + -0.5234375, + 0.1318359375, + -2.546875, + -0.88671875, + 1.4140625, + 0.60546875, + -0.44140625, + -1.4765625, + 1.71875, + 2.25, + 0.248046875, + 0.94921875, + -4.65625, + -3.09375, + -3.109375, + 1.4921875, + -1.8046875, + 1.2421875, + -1.1484375, + 0.228515625, + -2.828125, + 1.7734375, + 1.9296875, + -2.640625, + -0.90625, + -2.171875, + -6.875, + -1.1796875, + -1.828125, + -1.4375, + -0.4609375, + 0.86328125, + 2.328125, + -0.82421875, + -0.28515625, + 0.042724609375, + 4.5625, + 1.3984375, + -3.46875, + -0.294921875, + -1.2265625, + 3.28125, + 1.7421875, + -1.8046875, + -1.8203125, + 2.171875, + -0.94140625, + 0.375, + -0.1533203125, + -2.421875, + -2.890625, + -5.71875, + -1.140625, + 2.765625, + -0.5078125, + -0.248046875, + -0.7890625, + 3.640625, + -0.51171875, + -2.046875, + 1.640625, + 1.84375, + -3.703125, + -0.40234375, + -7.375, + -1.453125, + 0.8671875, + -4.09375, + 3.3125, + 3.0, + -1.078125, + -0.380859375, + 0.78515625, + -1.3046875, + 0.55859375, + -2.671875, + -2.203125, + -5.625, + 2.234375, + 8.125, + -0.6875, + -8.9375, + -1.015625, + 0.81640625, + 0.55078125, + 2.546875, + 2.796875, + 0.90234375, + 1.46875, + -0.8828125, + -0.8046875, + 0.1484375, + 0.90234375, + 0.8984375, + 0.6953125, + -0.65625, + -3.203125, + 1.6875, + -0.5703125, + -1.5, + 3.078125, + -3.140625, + -0.142578125, + 0.6484375, + 2.828125, + -0.51171875, + -2.484375, + 2.34375, + -1.859375, + 0.037353515625, + -0.4609375, + -1.0234375, + 0.51953125, + 1.6484375, + 0.3828125, + -3.375, + -0.18359375, + 1.609375, + 2.0, + -0.12158203125, + 1.4140625, + 0.046875, + 4.375, + 1.609375, + 0.75390625, + -0.2490234375, + -0.75390625, + -0.8828125, + -0.298828125, + -0.80859375, + -0.341796875, + -1.6484375, + 2.78125, + -4.59375, + 1.3359375, + 1.8046875, + -1.984375, + -0.39453125, + -0.287109375, + 0.26171875, + 0.15234375, + 0.61328125, + 0.8828125, + 1.25, + 0.482421875, + 2.765625, + 1.8515625, + 1.609375, + 2.265625, + 0.55078125, + -2.59375, + 1.671875, + 0.53515625, + 0.29296875, + -0.404296875, + 0.365234375, + -0.240234375, + 1.8125, + -2.34375, + -0.6171875, + -1.8828125, + 1.2109375, + 0.765625, + 0.96484375, + 1.25, + 1.1796875, + -1.28125, + -1.0078125, + -0.10302734375, + 0.43359375, + -1.4140625, + 0.310546875, + 0.166015625, + -6.0, + -0.150390625, + -2.34375, + 0.6171875, + -1.96875, + 1.3984375, + -2.796875, + -2.03125, + -1.40625, + -1.4765625, + 1.6015625, + 0.6328125, + 0.84375, + 0.953125, + -0.3046875, + -4.53125, + 1.2265625, + -0.203125, + -0.94140625, + 0.2294921875, + 0.515625, + -1.625, + -2.828125, + 2.34375, + 1.3671875, + -1.53125, + 3.609375, + -2.390625, + 0.0157470703125, + 1.7109375, + -8.4375, + 4.1875, + -0.29296875, + -3.34375, + 2.703125, + -2.734375, + -5.78125, + 2.515625, + 0.267578125, + 2.0, + -0.353515625, + -3.421875, + -1.796875, + -0.26953125, + 0.2470703125, + 3.953125, + 1.0, + -0.279296875, + 1.53125, + -1.703125, + -1.8125, + -3.0, + 1.8671875, + 1.46875, + -0.353515625, + -2.109375, + -0.1533203125, + 0.2294921875, + 1.5078125, + -2.484375, + 4.9375, + 3.703125, + -1.7890625, + 1.09375, + 0.322265625, + 2.875, + 3.0625, + 3.859375, + 1.2265625, + -1.9609375, + -0.75390625, + -0.32421875, + -0.125, + 1.265625, + -2.0625, + -1.2578125, + -1.28125, + 1.90625, + -0.47265625, + -3.8125, + -1.5, + 0.07666015625, + -0.90234375, + 2.25, + -0.8984375, + 1.671875, + 0.07080078125, + 0.70703125, + 0.408203125, + 2.0, + -1.1328125, + -0.0390625, + 2.984375, + -1.84375, + 0.78515625, + 1.375, + 3.5625, + 3.453125, + 0.298828125, + 0.20703125, + -3.9375, + 0.44921875, + 0.6875, + 0.10400390625, + 0.8984375, + 2.765625, + -5.3125, + 0.037353515625, + 0.42578125, + 1.546875, + 0.0218505859375, + -1.6171875, + -2.671875, + -1.046875, + 0.69921875, + 0.87890625, + 1.90625, + -1.5, + -0.0107421875, + -0.2890625, + 3.15625, + -0.71875, + 1.875, + -2.546875, + -2.515625, + -0.39453125, + 0.022216796875, + -0.76171875, + 0.80859375, + -1.5234375, + -0.84765625, + -3.90625, + -0.30078125, + 0.1357421875, + 0.47265625, + -0.60546875, + -1.2890625, + 7.5, + 1.8828125, + -2.9375, + -1.03125, + 3.125, + 1.4921875, + 0.2431640625, + 1.9609375, + 1.15625, + -3.578125, + 3.609375, + 0.0, + 0.65234375, + -0.36328125, + -3.484375, + 0.546875, + 0.89453125, + 0.90234375, + -2.46875, + 2.671875, + -1.1953125, + -0.6484375, + -0.60546875, + -0.051513671875, + 0.107421875, + -0.50390625, + -2.890625, + 2.25, + -3.15625, + -0.80859375, + -1.1015625, + -2.65625, + -1.0, + -1.375, + 2.140625, + 1.3125, + -2.46875, + 1.5234375, + -0.40234375, + 0.640625, + 0.3671875, + -0.8046875, + 2.625, + -2.046875, + -0.65625, + 0.10546875, + -0.060302734375, + 3.453125, + 3.09375, + 3.828125, + -1.671875, + -0.74609375, + 0.458984375, + -1.9765625, + -0.0279541015625, + -3.328125, + -0.50390625, + 0.169921875, + 2.015625, + -2.171875, + -2.328125, + 1.6171875, + 0.02490234375, + 1.1875, + -0.3828125, + 1.0859375, + 1.28125, + 2.375, + -1.3125, + 1.703125, + -4.5625, + -1.375, + 0.7109375, + 1.859375, + -2.5, + -0.06884765625, + 3.40625, + 2.609375, + 3.3125, + 2.34375, + 1.609375, + 0.22265625, + -3.375, + 6.125, + -2.21875, + 0.8515625, + -0.435546875, + 0.97265625, + 0.67578125, + -0.197265625, + 3.40625, + -0.90234375, + 3.796875, + 6.59375, + -2.0625, + 6.03125, + -0.67578125, + 1.421875, + -0.1630859375, + 0.349609375, + -1.734375, + -0.0361328125, + -2.21875, + 1.1875, + -1.3515625, + -1.625, + 3.625, + 3.328125, + -0.37109375, + -2.015625, + -0.146484375, + 2.015625, + 3.359375, + -4.625, + -1.953125, + -1.7890625, + -2.421875, + -0.6953125, + -0.7421875, + -2.484375, + 3.15625, + -0.462890625, + 1.8125, + 2.15625, + 1.421875, + 2.375, + -1.890625, + -4.53125, + -3.296875, + 2.734375, + -2.0625, + -2.671875, + 0.7578125, + -1.3828125, + -0.6796875, + -2.546875, + 1.2734375, + 0.8515625, + -1.109375, + 0.057373046875, + 1.890625, + -1.4140625, + -5.03125, + -3.0625, + 2.265625, + 1.640625, + -0.494140625, + -3.328125, + 3.203125, + -1.7421875, + -0.1357421875, + 3.515625, + -0.890625, + -1.5234375, + -0.52734375, + 0.447265625, + 3.28125, + 0.7109375, + 0.486328125, + 1.8203125, + 1.515625, + 1.0, + 0.61328125, + -0.48046875, + 0.05517578125, + 1.375, + -1.03125, + 2.0, + 1.1171875, + 3.453125, + 0.07763671875, + 1.8828125, + -0.404296875, + 3.5625, + -0.11962890625, + -2.9375, + 0.259765625, + 3.015625, + -2.390625, + 1.8046875, + 2.9375, + 1.1640625, + -0.06591796875, + -3.765625, + -1.3828125, + 2.953125, + 2.015625, + 1.1640625, + 0.2109375, + 1.9375, + -1.984375, + -2.109375, + -2.578125, + -1.9296875, + -0.1318359375, + -2.0, + 0.53125, + 2.0625, + 0.08154296875, + 1.5546875, + -1.6171875, + 0.0, + -0.90625, + 0.625, + 0.82421875, + 0.55859375, + -0.25, + 0.85546875, + -0.76953125, + 0.490234375, + 1.234375, + -1.1171875, + -3.15625, + -1.125, + -0.55078125, + 0.6484375, + 0.55859375, + 1.46875, + -0.43359375, + 1.3828125, + -1.546875, + 1.1015625, + 3.03125, + -0.05908203125, + -0.107421875, + -4.03125, + 2.34375, + -2.21875, + -4.65625, + -1.21875, + 0.2021484375, + -2.59375, + 1.7421875, + -2.46875, + 0.7109375, + -2.875, + 0.953125, + 2.046875, + 1.015625, + 2.71875, + -1.9765625, + -3.84375, + -1.5859375, + 0.2412109375, + 5.53125, + -0.98828125, + 2.234375, + 0.87109375, + 0.33203125, + 0.08740234375, + 1.203125, + -0.279296875, + -5.96875, + 0.640625, + 1.65625, + -0.69140625, + -0.62109375, + 1.1953125, + -0.1181640625, + 1.96875, + 1.84375, + -1.234375, + 1.3828125, + 0.52734375, + 2.453125, + 0.12255859375, + 0.037353515625, + 1.28125, + 1.3828125, + 1.7109375, + 0.375, + -1.0859375, + -6.09375, + 12.625, + 0.65625, + 3.09375, + 1.4375, + 1.4765625, + 5.53125, + -1.0859375, + 0.11572265625, + -1.7109375, + 4.28125, + 1.53125, + 1.640625, + 0.2470703125, + -1.765625, + 0.2197265625, + 1.28125, + 0.53515625, + -1.140625, + -0.022216796875, + 1.1328125, + -4.09375, + -1.5, + -1.1953125, + 0.78515625, + 3.703125, + -1.78125, + -2.609375, + -1.03125, + 1.7109375, + -1.3515625, + -0.166015625, + -1.796875, + -2.46875, + 1.140625, + -3.328125, + 1.5390625, + -0.435546875, + 1.078125, + 0.44140625, + -1.203125, + -1.75, + 1.140625, + 1.53125, + 0.84375, + -1.28125, + 3.296875, + 0.984375, + 2.734375, + 1.40625, + -2.078125, + -1.9921875, + 2.765625, + -0.419921875, + -0.6328125, + 0.9609375, + -1.046875, + -1.6328125, + -0.318359375, + 1.4375, + -1.8125, + -3.296875, + -0.5234375, + -2.90625, + 2.015625, + 0.6015625, + 0.2275390625, + 0.1953125, + 3.5, + 0.8828125, + -0.69140625, + -1.0703125, + 1.6171875, + -1.2578125, + 0.69921875, + 1.453125, + -2.78125, + 0.267578125, + -0.259765625, + -4.15625, + -3.046875, + 1.453125, + -0.5234375, + -1.609375, + -2.0625, + -2.1875, + -0.58984375, + 0.64453125, + 0.287109375, + -0.5859375, + -3.984375, + -0.796875, + 1.953125, + -1.1640625, + 1.125, + 0.5, + 3.796875, + 3.296875, + 0.036376953125, + 1.34375, + -1.09375, + -1.609375, + 1.453125, + -2.34375, + 0.353515625, + 1.1171875, + -0.3046875, + 1.0546875, + 5.40625, + -1.7265625, + -0.25390625, + 0.98046875, + -0.81640625, + -1.1640625, + 1.671875, + 0.234375, + -0.478515625, + 0.765625, + -1.5, + -2.828125, + 0.953125, + -1.421875, + -0.400390625, + 1.890625, + -0.1513671875, + -1.0390625, + 6.5625, + 1.765625, + 2.96875, + -0.447265625, + -0.609375, + 1.6484375, + -0.380859375, + -4.3125, + 0.55859375, + -0.9453125, + -0.474609375, + -3.53125, + -0.8203125, + -2.953125, + -0.61328125, + -5.875, + -0.439453125, + 0.08642578125, + 0.1328125, + 1.84375, + 3.203125, + -1.5859375, + 1.890625, + 2.421875, + 1.859375, + 0.625, + -2.875, + 0.59765625, + -0.1171875, + -0.7890625, + -5.78125, + -0.333984375, + 1.5625, + -0.734375, + -2.90625, + 1.65625, + 1.5, + -1.1171875, + 1.6484375, + -0.53515625, + 1.1015625, + -0.5625, + 3.828125, + -2.765625, + -0.6640625, + -1.328125, + -0.23046875, + 0.28515625, + 2.609375, + -2.6875, + 0.2158203125, + 1.7265625, + -1.9296875, + 0.380859375, + 0.1728515625, + 0.99609375, + 1.21875, + 0.314453125, + 1.1640625, + -0.3125, + -1.7734375, + -0.146484375, + -0.08154296875, + 0.9140625, + -0.48046875, + 3.203125, + 1.25, + -4.21875, + -1.234375, + 1.140625, + -0.56640625, + -3.390625, + -0.91015625, + 0.12353515625, + 3.5625, + -2.1875, + -0.447265625, + -2.03125, + 1.015625, + 2.078125, + -0.90234375, + -1.5703125, + 1.4453125, + -3.328125, + 1.78125, + 1.0078125, + 0.7109375, + 2.359375, + -0.287109375, + -1.8359375, + 0.022216796875, + -1.4765625, + 2.421875, + -0.765625, + 1.4375, + 0.41796875, + -3.015625, + 0.4296875, + -3.234375, + -1.9765625, + -2.546875, + 2.890625, + -1.34375, + 2.171875, + -1.2734375, + -0.75, + -1.515625, + -1.71875, + 2.484375, + 0.373046875, + 2.5, + -1.015625, + 2.84375, + 2.21875, + -1.1796875, + 1.28125, + 0.2275390625, + -3.8125, + -0.91015625, + 2.625, + 1.5390625, + -0.10498046875, + -1.109375, + -0.2890625, + 1.109375, + -0.87109375, + -1.3125, + 0.01611328125, + 0.482421875, + -0.6640625, + 0.41015625, + 1.0625, + -1.0625, + -1.6015625, + 1.9921875, + -2.921875, + -1.5234375, + 0.376953125, + -0.96875, + 1.140625, + -2.34375, + 0.228515625, + -0.828125, + 0.8984375, + 1.09375, + 1.0546875, + -0.6640625, + -2.40625, + -0.275390625, + 1.0078125, + 0.28125, + 4.8125, + -2.734375, + 2.0625, + -0.2080078125, + -1.671875, + -0.059814453125, + -1.7109375, + 0.29296875, + 3.515625, + -0.10107421875, + 2.375, + 0.69140625, + -2.375, + -0.03271484375, + -0.40234375, + 3.609375, + -0.7734375, + -0.279296875, + -0.1806640625, + -4.34375, + -0.484375, + 3.296875, + 0.091796875, + -0.1455078125, + 0.333984375, + -0.36328125, + -0.259765625, + -0.58203125, + -1.65625, + 2.5625, + 1.015625, + -1.90625, + 3.703125, + 1.984375, + 0.4375, + 1.515625, + 1.8984375, + -2.25, + 3.3125, + -0.05322265625, + 2.140625, + -0.58203125, + 2.078125, + -2.125, + -0.671875, + 1.6328125, + 2.53125, + 0.61328125, + -0.5703125, + -2.125, + -1.84375, + 2.34375, + 2.984375, + 0.427734375, + -1.203125, + 2.3125, + 1.0390625, + -0.345703125, + 0.1533203125, + 0.21875, + 3.40625, + 1.296875, + 0.5234375, + 0.75390625, + 0.421875, + 0.388671875, + 3.53125, + -1.953125, + 3.203125, + -1.4140625, + -1.0703125, + -2.234375, + -1.5, + -1.2109375, + 1.2421875, + -0.291015625, + 2.25, + 0.0908203125, + -2.09375, + 0.0191650390625, + 1.75, + 1.328125, + -1.9140625, + -0.5, + -6.59375, + -7.28125, + 1.078125, + -0.6484375, + 0.9765625, + 2.296875, + 0.87890625, + 0.038818359375, + -0.4375, + -2.5, + 3.078125, + -1.8203125, + -0.10302734375, + -0.93359375, + 0.8125, + 1.0859375, + 2.875, + 0.7734375, + 4.1875, + -1.3359375, + 1.7421875, + 1.015625, + -0.6796875, + -2.953125, + -0.73828125, + -0.2373046875, + -0.80859375, + -2.21875, + 2.078125, + -1.4375, + 1.0703125, + 0.05615234375, + -0.478515625, + 2.03125, + -1.8984375, + 0.115234375, + -0.671875, + 4.3125, + 1.640625, + -1.3046875, + 0.435546875, + 3.03125, + -1.8828125, + -1.4921875, + 2.515625, + -1.34375, + -0.90234375, + 1.2265625, + -1.0546875, + -3.171875, + -0.07861328125, + 0.515625, + -2.3125, + 0.80859375, + 2.09375, + -2.234375, + -1.296875, + 1.6171875, + -1.171875, + -0.138671875, + 1.9296875, + -0.1416015625, + 0.310546875, + -0.6015625, + -1.4140625, + 2.609375, + -0.10888671875, + 1.609375, + 0.1533203125, + -0.88671875, + -2.546875, + -1.6171875, + -2.71875, + -0.1650390625, + 1.96875, + 2.484375, + -2.578125, + 0.388671875, + -3.1875, + -2.21875, + -3.0625, + 0.59765625, + -1.390625, + 2.78125, + -1.03125, + 0.1904296875, + 0.2578125, + -2.5, + 2.609375, + 2.34375, + 2.5625, + -0.46484375, + -2.71875, + 2.859375, + 18.375, + 0.9453125, + -4.15625, + 1.3671875, + 1.5, + 0.96875, + -2.328125, + -5.40625, + 0.10986328125, + -0.81640625, + -1.4375, + -0.38671875, + -1.9921875, + 0.150390625, + -0.1396484375, + 3.078125, + -0.08837890625, + -1.46875, + 1.7421875, + -1.9296875, + -1.9140625, + 4.59375, + -1.96875, + -2.671875, + -0.515625, + -0.6640625, + 0.61328125, + 3.875, + 0.890625, + -0.0810546875, + -4.25, + 2.5625, + -0.1318359375, + -0.8828125, + 0.169921875, + -1.171875, + 1.828125, + 0.98046875, + 2.671875, + 2.9375, + 1.25, + 2.140625, + 1.65625, + 0.83203125, + 0.44921875, + 0.016357421875, + -3.078125, + 0.00909423828125, + 1.7578125, + -1.1640625, + 1.1328125, + 0.84765625, + 1.125, + -1.546875, + 1.578125, + -1.515625, + 0.1728515625, + -1.109375, + 1.796875, + 3.21875, + 1.3125, + -1.796875, + -0.451171875, + -2.1875, + -1.296875, + 1.7265625, + 0.5703125, + 0.88671875, + -1.28125, + -0.431640625, + -1.8125, + 0.9375, + 2.921875, + 5.75, + 0.50390625, + 0.2890625, + -3.28125, + 0.423828125, + -2.25, + 1.1015625, + -0.66796875, + 5.34375, + 0.4453125, + 0.466796875, + 2.625, + 0.76171875, + 2.34375, + -0.494140625, + -1.671875, + -0.53125, + -0.54296875, + -0.341796875, + 1.359375, + -0.3359375, + -2.390625, + 0.052490234375, + 2.328125, + 3.265625, + 1.2109375, + -0.287109375, + -0.91796875, + 3.828125, + 1.875, + 3.015625, + -1.171875, + 0.03759765625, + -0.435546875, + -0.10888671875, + -1.7578125, + 1.7734375, + -0.474609375, + -1.3828125, + 0.875, + -2.421875, + -1.1484375, + 1.546875, + -0.90234375, + -0.30859375, + -0.94921875, + -1.2734375, + -1.453125, + -0.412109375, + 0.58984375, + 1.0703125, + 0.98828125, + 0.9765625, + 0.26171875, + -3.0625, + -3.5, + -0.859375, + 3.046875, + 2.03125, + 2.40625, + -1.2578125, + -1.765625, + 1.453125, + 2.546875, + -0.2099609375, + -0.9296875, + -1.03125, + 2.40625, + 1.0625, + 1.125, + -0.56640625, + 0.365234375, + 0.25390625, + 2.578125, + -0.1279296875, + 0.18359375, + -0.7265625, + 3.296875, + 3.015625, + 0.1796875, + -3.625, + 0.3046875, + 3.3125, + 0.2021484375, + 1.2421875, + -0.76953125, + -1.1796875, + -0.44140625, + 2.5625, + -1.515625, + 2.21875, + 0.271484375, + -0.04345703125, + -1.078125, + 2.28125, + -1.109375, + 1.828125, + 0.0, + 1.8515625, + -0.154296875, + 0.98828125, + -4.03125, + 2.59375, + 0.021240234375, + -0.5078125, + 0.72265625, + 0.0301513671875, + -1.53125, + 3.109375, + 1.1328125, + -0.75, + 0.7265625, + -0.83984375, + -0.1259765625, + 0.26953125, + -0.70703125, + -2.203125, + -0.142578125, + 2.625, + -1.046875, + -1.953125, + -1.4453125, + 0.87109375, + 4.625, + -1.7734375, + -0.455078125, + 0.703125, + 0.48046875, + -1.015625, + -1.1328125, + -0.66796875, + -0.26953125, + 1.1953125, + 3.984375, + -1.359375, + -1.8984375, + 2.78125, + -0.318359375, + -1.109375, + 1.4140625, + -1.3828125, + -2.53125, + -1.1328125, + -2.515625, + 0.0299072265625, + -2.515625, + 0.306640625, + -2.09375, + -0.90625, + 0.498046875, + 0.2236328125, + -0.1572265625, + -0.6171875, + -1.0390625, + 1.328125, + -0.5078125, + -4.59375, + -0.466796875, + 2.015625, + -3.90625, + 2.40625, + -0.9765625, + -0.5546875, + 0.3046875, + -1.4921875, + 2.15625, + -0.56640625, + -0.34765625, + 0.14453125, + 1.0546875, + 1.5859375, + 0.56640625, + -1.078125, + 1.3046875, + -0.65234375, + 0.6953125, + -2.4375, + 2.15625, + -0.31640625, + 4.09375, + -0.498046875, + 1.2890625, + 3.09375, + -0.060546875, + -3.0625, + 4.125, + -3.890625, + 2.46875, + 1.4453125, + -1.109375, + 0.88671875, + 0.3515625, + -2.734375, + 0.0, + 2.0625, + 3.0625, + -0.318359375, + -0.9140625, + 0.134765625, + 0.6953125, + -0.5078125, + 6.09375, + -1.234375, + -4.25, + -1.7734375, + 1.25, + -0.515625, + -2.734375, + -2.875, + 3.1875, + -3.296875, + 0.369140625, + -1.2421875, + -0.2177734375, + 0.69921875, + -2.90625, + 0.95703125, + -2.421875, + -2.28125, + 1.265625, + 0.2421875, + -2.953125, + -1.6640625, + 2.359375, + -2.203125, + -0.9296875, + 0.2021484375, + 0.478515625, + -0.65234375, + 2.109375, + 2.078125, + 3.390625, + -1.21875, + 1.1015625, + 2.96875, + -1.421875, + -0.921875, + -0.57421875, + 2.390625, + -0.79296875, + 1.015625, + -1.4375, + 2.359375, + -1.296875, + -1.140625, + 3.796875, + -2.390625, + -0.3203125, + 0.016357421875, + 3.578125, + 2.671875, + 4.84375, + 1.734375, + 2.453125, + 1.03125, + -0.353515625, + 1.5234375, + -0.66796875, + -1.2734375, + -3.703125, + -0.6875, + 0.70703125, + -0.91796875, + -1.6171875, + -3.515625, + -2.5625, + 2.46875, + -9.4375, + -2.84375, + -0.76171875, + 0.85546875, + 0.38671875, + -2.46875, + 1.640625, + 1.6484375, + -2.171875, + -4.40625, + 1.1640625, + 0.341796875, + -2.828125, + -0.1767578125, + 3.09375, + -1.2578125, + 0.515625, + -0.625, + 3.234375, + -0.99609375, + 1.1484375, + 2.078125, + 1.4609375, + 0.9609375, + -0.451171875, + 1.4921875, + 1.28125, + -0.369140625, + 1.7734375, + -3.3125, + 0.54296875, + 0.053955078125, + 1.4609375, + 0.3984375, + 2.203125, + -2.703125, + -3.375, + -3.0625, + -1.046875, + 0.625, + -0.416015625, + 1.578125, + -3.859375, + -1.046875, + -1.34375, + -2.03125, + -1.1796875, + -0.59765625, + -5.15625, + 0.8984375, + 3.25, + 1.046875, + 1.71875, + 0.318359375, + -6.59375, + -0.0002574920654296875, + -1.4921875, + 0.384765625, + -0.90625, + 0.1357421875, + 1.3125, + -1.8359375, + -1.96875, + -0.427734375, + -1.21875, + 1.2890625, + -0.87890625, + 1.453125, + -0.7578125, + 1.453125, + 1.3203125, + 0.1767578125, + -0.296875, + 2.03125, + -0.765625, + -2.703125, + -2.234375, + 0.435546875, + 1.3046875, + 0.146484375, + -0.236328125, + 1.0546875, + -0.796875, + 0.58984375, + -0.2333984375, + -0.1533203125, + -2.03125, + -1.6015625, + 3.46875, + 1.25, + 3.046875, + 0.89453125, + -1.6171875, + 1.1015625, + -6.1875, + 0.134765625, + -1.4296875, + -0.81640625, + 3.125, + -3.265625, + 0.478515625, + -1.6015625, + -0.30078125, + 0.1484375, + -2.3125, + -0.6484375, + 15.0, + -1.65625, + -1.6484375, + 0.416015625, + -0.07666015625, + -0.48828125, + -0.5703125, + -0.031982421875, + -3.6875, + 1.953125, + 0.640625, + 2.828125, + -3.734375, + 1.296875, + 1.4375, + -4.375, + 3.84375, + -2.390625, + 0.80078125, + 3.65625, + -0.021484375, + 0.44140625, + -1.140625, + -0.007781982421875, + 1.1171875, + 1.84375, + 0.83984375, + -1.6328125, + -1.0625, + 1.1015625, + 2.46875, + 0.009765625, + -1.203125, + 0.10595703125, + 0.203125, + 0.57421875, + 1.34375, + 3.0, + -2.625, + 2.34375, + -2.703125, + 0.203125, + 0.5703125, + -0.71875, + -1.25, + 0.177734375, + -2.296875, + 1.5234375, + 0.26953125, + 1.5703125, + 0.37109375, + -0.80078125, + 1.328125, + -1.578125, + -3.796875, + -1.078125, + 0.73828125, + 5.6875, + -3.40625, + -1.8984375, + 3.09375, + -0.1845703125, + -0.0177001953125, + 0.025634765625, + -2.375, + 2.53125, + -0.515625, + -2.3125, + 1.84375, + 0.765625, + -1.9921875, + 1.3828125, + 3.6875, + 1.2109375, + 1.7421875, + -1.84375, + -2.421875, + 0.5234375, + 0.453125, + -0.07177734375, + -0.07470703125, + 1.203125, + -1.453125, + -1.3359375, + 0.5703125, + -0.2021484375, + 1.3125, + -0.06494140625, + 2.40625, + -1.1015625, + 0.09326171875, + 1.390625, + 1.8828125, + -1.015625, + -0.59375, + -0.546875, + 2.4375, + -1.34375, + 1.1328125, + 1.296875, + 1.984375, + 3.375, + -0.267578125, + 0.69140625, + 0.01068115234375, + 0.73828125, + -2.578125, + -2.984375, + 1.09375, + 2.40625, + 0.82421875, + 0.609375, + 0.41015625, + -1.4609375, + 1.625, + 0.44140625, + 3.21875, + 1.6796875, + -1.59375, + 0.10595703125, + -1.546875, + 1.3671875, + -1.3359375, + -0.75, + -0.2080078125, + -3.0, + 1.5234375, + -2.234375, + 2.25, + 1.09375, + -2.203125, + 0.08154296875, + 2.0, + -1.3671875, + 0.83203125, + -0.90234375, + 1.7734375, + 1.546875, + 0.671875, + 1.0234375, + 1.2109375, + 1.1640625, + -3.015625, + 0.1533203125, + 1.765625, + 0.6484375, + -0.4375, + 0.98828125, + -1.625, + -0.91015625, + -0.494140625, + 1.6640625, + -0.80859375, + 2.90625, + -1.3671875, + 3.0, + 0.83984375, + -0.53125, + 0.3359375, + -3.203125, + -0.251953125, + -0.3203125, + 0.318359375, + -0.55078125, + 0.89453125, + 3.53125, + -1.359375, + 0.51171875, + 0.72265625, + 0.89453125, + -2.46875, + 1.265625, + -2.328125, + -0.31640625, + 1.09375, + -0.4921875, + 2.4375, + 0.7734375, + 0.86328125, + 0.43359375, + -1.078125, + 0.796875, + -1.34375, + -0.9140625, + 0.94921875, + -0.376953125, + -0.16796875, + 2.671875, + 1.765625, + 3.71875, + 1.171875, + -4.28125, + -1.5390625, + 0.78515625, + -0.16796875, + -2.296875, + 1.609375, + -2.515625, + -1.0078125, + -0.328125, + 0.439453125, + -0.2119140625, + 0.66796875, + 0.79296875, + 0.71875, + 2.0, + 3.1875, + 5.6875, + -2.09375, + 0.50390625, + -1.5703125, + -0.859375, + 1.609375, + 2.59375, + 0.5546875, + 0.72265625, + -2.0, + 0.921875, + 1.015625, + -2.65625, + 1.2734375, + 2.234375, + 1.546875, + 0.64453125, + -0.61328125, + 0.71484375, + -0.0830078125, + 5.375, + -1.703125, + -1.96875, + 3.0, + -0.2392578125, + 2.03125, + 3.4375, + -0.921875, + -3.734375, + 0.6953125, + 2.5625, + -2.296875, + 0.7890625, + -0.84375, + -1.8046875, + 0.984375, + -0.3671875, + -0.875, + 3.859375, + -1.4921875, + 0.15234375, + -0.6171875, + -1.4375, + 0.1435546875, + -1.2578125, + -1.3984375, + 0.96484375, + -0.77734375, + 0.63671875, + -2.109375, + -0.84765625, + 2.578125, + -0.482421875, + -2.078125, + -0.3203125, + 1.1875, + 1.1953125, + -0.80859375, + 1.7734375, + 3.640625, + -2.578125, + 0.94921875, + 7.46875, + -1.0703125, + -1.015625, + -2.625, + 4.625, + 1.390625, + 1.5390625, + -0.0419921875, + -0.31640625, + 0.26953125, + 0.41796875, + 1.046875, + -4.09375, + -2.0, + -1.3984375, + 1.953125, + 0.8203125, + 3.921875, + -2.734375, + 1.6328125, + -2.25, + -2.578125, + 1.3359375, + -0.9609375, + 0.41015625, + -0.70703125, + 1.5234375, + 0.7890625, + -3.71875, + -0.462890625, + -0.1025390625, + 2.09375, + -1.7421875, + 2.21875, + -0.435546875, + -3.53125, + -0.33984375, + 0.58984375, + 1.875, + -0.462890625, + -1.875, + -1.1171875, + -0.03515625, + 1.8359375, + -8.0625, + -1.4453125, + 2.265625, + 0.031494140625, + -1.2890625, + -0.5703125, + -2.15625, + 1.28125, + 0.36328125, + 1.28125, + 1.4921875, + -3.171875, + -0.267578125, + 0.7265625, + 3.578125, + -2.25, + 3.765625, + -1.1015625, + -0.310546875, + 5.75, + -2.125, + 2.59375, + 1.4609375, + -1.015625, + 0.9609375, + -1.421875, + 0.984375, + 0.0294189453125, + -2.859375, + -1.8125, + 0.734375, + -0.271484375, + 1.7890625, + 0.8125, + 0.74609375, + -0.0859375, + -0.027587890625, + -1.3125, + -2.703125, + 0.333984375, + -2.671875, + -0.7109375, + -0.0888671875, + 0.453125, + -0.197265625, + -0.65234375, + -3.859375, + -0.259765625, + -0.330078125, + 1.9140625, + -0.059326171875, + 1.4453125, + -0.1767578125, + -5.46875, + -1.515625, + -2.109375, + 0.96484375, + 1.828125, + -1.515625, + -1.2265625, + -0.462890625, + -1.046875, + -1.6640625, + 1.3125, + -0.96875, + -2.890625, + 1.59375, + 1.1484375, + 2.640625, + 2.40625, + -2.15625, + 3.203125, + -0.41015625, + -5.875, + 0.0004024505615234375, + 0.609375, + -0.8828125, + -1.6953125, + 1.8359375, + 1.765625, + 2.1875, + 1.5703125, + -5.375, + 1.34375, + 0.890625, + -2.265625, + 1.5625, + 3.234375, + -0.4765625, + 1.9296875, + 1.515625, + 1.359375, + 0.1484375, + 1.046875, + 1.171875, + 1.3359375, + 2.21875, + -1.2890625, + -2.828125, + -3.1875, + -1.0625, + -0.494140625, + -2.453125, + 0.53515625, + 2.859375, + -0.54296875, + -4.0625, + -1.7890625, + -4.3125, + 1.0859375, + -1.34375, + -1.96875, + 0.5390625, + -0.46875, + 1.90625, + 2.515625, + -0.83984375, + -0.353515625, + -1.9296875, + 1.046875, + 1.78125, + 4.25, + -1.6953125, + 1.046875, + 0.322265625, + 3.859375, + -1.296875, + -2.703125, + -2.046875, + 1.3515625, + -0.72265625, + 0.435546875, + -0.83203125, + 0.89453125, + 2.09375, + 0.1376953125, + 2.1875, + -0.388671875, + 0.6484375, + 0.796875, + -0.796875, + -1.0859375, + 1.21875, + -1.6171875, + -0.6875, + 0.484375, + -0.41015625, + 2.453125, + -1.078125, + -0.474609375, + 0.2734375, + 1.875, + 0.73828125, + -3.140625, + -2.125, + 2.421875, + -2.484375, + 2.078125, + 0.11572265625, + -2.640625, + -0.458984375, + -3.53125, + -0.83203125, + -0.87890625, + -1.0703125, + 3.734375, + 0.4609375, + -0.29296875, + 1.9296875, + -1.4375, + 0.734375, + 1.0078125, + -1.578125, + 0.416015625, + -1.875, + -2.484375, + 0.14453125, + 0.18359375, + 0.380859375, + -1.5546875, + -0.65625, + -3.734375, + -0.39453125, + 1.8359375, + 0.734375, + 6.34375, + 2.65625, + -1.0625, + 2.75, + 0.9609375, + 0.035888671875, + -2.96875, + 1.03125, + -0.1298828125, + 0.98828125, + -0.439453125, + -0.26171875, + 0.486328125, + 3.359375, + 0.515625, + -0.25, + -0.470703125, + 2.3125, + -1.765625, + -1.734375, + 0.435546875, + 1.6875, + 4.34375, + -1.28125, + 2.890625, + 1.5234375, + 1.203125, + 3.796875, + 3.03125, + 0.70703125, + -0.78125, + 0.498046875, + -3.5625, + -1.46875, + 2.375, + -3.84375, + 0.83203125, + -1.1484375, + 1.7734375, + -1.0703125, + 3.875, + 1.109375, + -5.53125, + -0.10986328125, + 3.671875, + 0.11669921875, + 1.8203125, + 2.625, + -1.0625, + 1.9375, + -2.296875, + 1.859375, + 3.59375, + 1.2109375, + -0.1611328125, + 1.546875, + 2.84375, + 0.5234375, + 1.515625, + 0.0, + -5.15625, + -0.29296875, + -0.470703125, + -0.03271484375, + -0.2578125, + 2.03125, + 1.2109375, + -2.546875, + 1.8984375, + 2.1875, + -1.7578125, + -1.859375, + -0.059814453125, + -2.546875, + -0.50390625, + 3.390625, + -2.84375, + 2.328125, + -2.5, + 4.0625, + -0.482421875, + 2.8125, + -2.75, + -2.296875, + 0.796875, + 3.28125, + -1.8046875, + 0.578125, + -1.53125, + -2.734375, + 0.45703125, + -1.5625, + -1.671875, + 0.859375, + 0.6953125, + -1.2109375, + -0.99609375, + -0.33984375, + -1.421875, + 3.6875, + 0.5625, + -2.046875, + -1.96875, + -6.59375, + 0.09375, + 3.171875, + -0.59375, + -2.359375, + 1.0859375, + -0.86328125, + 2.015625, + -1.4453125, + 2.640625, + 0.5234375, + -3.609375, + 2.171875, + 3.625, + -2.953125, + -0.9921875, + -0.283203125, + -1.609375, + 1.4765625, + -2.078125, + 3.4375, + -1.8125, + 1.6953125, + -2.40625, + -4.0625, + 1.65625, + 1.03125, + -2.609375, + 0.408203125, + -0.35546875, + -0.72265625, + 0.1552734375, + 2.484375, + -0.412109375, + -0.6015625, + -1.515625, + -1.5, + 0.369140625, + 0.232421875, + 1.8828125, + -1.4921875, + 1.0625, + -1.515625, + -0.486328125, + 0.034423828125, + -1.234375, + -1.796875, + -1.546875, + 0.6015625, + -2.109375, + 1.84375, + -2.25, + -1.2421875, + 0.5078125, + 0.439453125, + -1.015625, + 1.1796875, + 0.98828125, + -2.375, + -1.3125, + -0.462890625, + -2.671875, + -2.65625, + -1.6328125, + 0.3828125, + -3.078125, + 2.53125, + 0.98046875, + 2.34375, + 3.78125, + 1.7578125, + 3.25, + 1.9296875, + -0.353515625, + -1.03125, + -2.46875, + 2.125, + 1.609375, + 1.6015625, + -0.41015625, + 3.75, + -1.1953125, + 0.333984375, + 2.03125, + 0.77734375, + -3.421875, + 4.625, + -2.40625, + -0.4453125, + -4.71875, + -3.046875, + -3.015625, + 0.40625, + 2.0625, + 0.59375, + -2.015625, + 0.1396484375, + -0.98828125, + 0.263671875, + -2.5625, + 2.4375, + 0.466796875, + 1.765625, + -0.71875, + 1.296875, + -3.484375, + 1.71875, + 1.0234375, + 1.046875, + -1.4765625, + -1.1640625, + 2.125, + 0.0, + 3.109375, + -0.11767578125, + 0.6640625, + -0.75390625, + 0.7109375, + -1.8359375, + -2.890625, + 1.75, + -1.4765625, + 3.0625, + -3.828125, + 1.5, + 0.1376953125, + -1.296875, + 1.9765625, + -0.5703125, + 0.78515625, + 1.0703125, + -2.4375, + 0.0732421875, + 1.546875, + 0.014892578125, + -2.4375, + -1.84375, + -2.453125, + -1.09375, + -0.6484375, + -1.0546875, + 0.75390625, + -4.28125, + -0.53125, + 3.4375, + 2.546875, + -1.6875, + 1.5703125, + -2.4375, + 1.5546875, + -2.140625, + -0.9765625, + 2.265625, + -2.75, + 0.68359375, + -0.00946044921875, + -0.70703125, + -2.59375, + -1.65625, + -0.091796875, + 1.125, + 1.6484375, + -0.76953125, + 0.90234375, + -0.7265625, + 2.6875, + 0.234375, + -0.96875, + -0.87109375, + -4.25, + -0.32421875, + -0.49609375, + 1.4140625, + -3.984375, + -2.375, + 0.9765625, + 1.1640625, + -0.74609375, + -1.0078125, + 0.55078125, + 0.73046875, + -2.171875, + -0.91015625, + -1.890625, + -0.1728515625, + -1.21875, + 2.046875, + -5.1875, + 1.3984375, + 2.09375, + -1.1953125, + -1.09375, + 0.5546875, + 0.2333984375, + 1.765625, + -2.359375, + 1.8359375, + 0.6953125, + -2.765625, + -1.8203125, + -1.6640625, + 2.5625, + 1.9453125, + 0.73046875, + -2.25, + 0.00555419921875, + -0.1728515625, + 2.171875, + -1.6484375, + -1.578125, + -0.453125, + 4.1875, + -4.625, + 1.0546875, + 1.6484375, + -1.46875, + 1.171875, + 1.515625, + -2.828125, + -1.8671875, + 0.53125, + -1.1171875, + 0.51171875, + 2.65625, + 1.484375, + -1.7734375, + 0.021484375, + -0.29296875, + -0.03466796875, + 0.609375, + 1.609375, + 0.78125, + -2.625, + 0.68359375, + 3.0, + 2.34375, + 0.4609375, + 0.1650390625, + -1.8984375, + 0.3046875, + 0.314453125, + -2.4375, + -4.875, + -1.1484375, + -2.25, + 1.015625, + 0.890625, + 0.10791015625, + -2.125, + 3.28125, + 0.81640625, + -3.9375, + 4.59375, + 0.8359375, + 1.5390625, + 5.15625, + 0.52734375, + 0.51953125, + 2.46875, + -0.4296875, + -0.94921875, + -4.75, + -2.59375, + 1.34375, + -1.3984375, + 0.69140625, + -0.439453125, + 1.7578125, + -4.15625, + -1.6875, + 1.8671875, + -0.5, + -0.6328125, + -2.0, + 0.43359375, + 2.609375, + 0.76953125, + -2.96875, + -3.390625, + -3.046875, + -0.62109375, + -0.283203125, + -2.109375, + 0.365234375, + -1.65625, + -1.9609375, + -1.2890625, + 0.0, + -3.390625, + 0.494140625, + 0.453125, + 2.390625, + -0.384765625, + 1.453125, + -1.296875, + -0.0419921875, + -2.140625, + -0.6171875, + -1.125, + -0.51953125, + -4.3125, + -1.28125, + -4.34375, + -0.9375, + 4.625, + -0.345703125, + 0.6015625, + -0.3125, + 1.40625, + 0.671875, + 0.8671875, + -0.28125, + -0.1328125, + -0.10107421875, + -2.109375, + 3.796875, + 0.75, + -2.8125, + 2.078125, + -1.6015625, + 2.953125, + 2.453125, + 1.4375, + 1.0, + 2.59375, + -0.353515625, + 2.15625, + -0.66796875, + -2.875, + 1.1640625, + 0.71875, + -0.408203125, + -1.171875, + 2.9375, + -0.69140625, + -0.0869140625, + 1.5, + 1.625, + 1.6171875, + -3.84375, + -1.1875, + -0.6484375, + 0.78515625, + -3.015625, + -2.421875, + 2.984375, + 2.234375, + -0.83984375, + -0.1982421875, + 0.54296875, + -0.5859375, + 3.453125, + -2.921875, + 1.3984375, + -1.203125, + -1.4140625, + 1.53125, + -1.75, + -0.8984375, + -0.341796875, + 1.0234375, + -2.5, + 2.28125, + -0.61328125, + 0.24609375, + 0.5859375, + 0.734375, + -0.5859375, + 1.015625, + 0.470703125, + 1.109375, + -1.609375, + 3.3125, + 1.609375, + 1.421875, + 0.3359375, + -2.453125, + -0.025634765625, + 0.7890625, + 2.125, + -1.09375, + 3.734375, + -0.1005859375, + 0.08544921875, + 1.7265625, + 1.0, + 1.7890625, + 4.625, + -2.59375, + -0.2255859375, + -2.515625, + 2.015625, + -0.55859375, + 1.0, + -0.859375, + -1.359375, + -1.2578125, + 0.70703125, + -1.1015625, + -0.828125, + -0.3125, + 1.171875, + 1.4921875, + 2.671875, + 0.80078125, + 1.625, + -0.96484375, + -2.09375, + 1.1796875, + -0.1865234375, + 1.71875, + 2.71875, + -0.83984375, + 1.6953125, + -0.53515625, + -0.10107421875, + -0.609375, + 3.203125, + -0.0186767578125, + -1.984375, + -2.515625, + 1.4453125, + -2.5, + 0.92578125, + -0.9296875, + -0.1669921875, + 3.96875, + -0.80859375, + 1.5390625, + 6.4375, + -2.3125, + 0.255859375, + 2.46875, + -1.6171875, + 3.921875, + -0.050537109375, + -0.75, + 1.1796875, + -0.76953125, + 2.296875, + 0.216796875, + 0.1806640625, + 2.96875, + 0.38671875, + 0.03173828125, + -3.59375, + 2.015625, + -3.046875, + -3.390625, + 0.328125, + -1.1015625, + -0.25, + -0.73046875, + 3.125, + -5.59375, + -3.734375, + -1.8359375, + 0.859375, + -1.203125, + 1.484375, + -3.59375, + 2.09375, + -1.515625, + -2.046875, + -3.1875, + -0.056884765625, + -1.3359375, + 0.94921875, + -0.388671875, + 0.80078125, + -1.6015625, + -2.0, + -0.828125, + 1.109375, + -1.4921875, + 1.1875, + 0.337890625, + 2.65625, + 0.68359375, + 1.953125, + -0.77734375, + 3.4375, + 0.72265625, + 2.8125, + 1.484375, + 1.40625, + 0.703125, + -1.40625, + -0.1884765625, + -2.5625, + 0.2265625, + -1.125, + -2.515625, + -1.2890625, + -0.97265625, + -1.765625, + -0.26171875, + 3.890625, + -1.8125, + -1.140625, + -3.03125, + 0.044189453125, + 3.109375, + -0.96875, + -0.30859375, + 2.171875, + -0.3828125, + 0.60546875, + 4.84375, + -1.671875, + -1.59375, + -1.2734375, + 2.15625, + -2.40625, + -2.59375, + 1.1171875, + -0.60546875, + 0.796875, + 2.15625, + -1.0234375, + -8.0, + -1.7265625, + 3.21875, + 3.40625, + -2.59375, + 0.87109375, + -1.4296875, + 0.34765625, + -4.3125, + 0.86328125, + -2.953125, + 3.015625, + -0.74609375, + 0.431640625, + 0.2109375, + 0.69140625, + -2.859375, + -1.109375, + 0.15234375, + -0.016357421875, + 8.5, + 1.9453125, + -1.0390625, + 0.294921875, + 0.298828125, + 0.5546875, + 3.28125, + -2.546875, + 2.40625, + -1.1875, + 2.703125, + 1.78125, + 1.015625, + 1.78125, + 1.65625, + 0.50390625, + 1.5546875, + -0.2421875, + -1.375, + -1.3671875, + 0.87109375, + 1.8984375, + -0.08154296875, + -1.328125, + 1.671875, + 2.90625, + -0.828125, + 0.69921875, + -3.90625, + 2.125, + 0.59375, + -0.126953125, + 1.34375, + 0.337890625, + 0.380859375, + 0.25390625, + 1.2734375, + 8.3125, + 2.09375, + -2.6875, + 2.234375, + -0.62109375, + 3.140625, + -0.232421875, + 1.625, + -1.0390625, + -0.021728515625, + 2.90625, + 1.203125, + -0.298828125, + -0.69921875, + 0.10400390625, + 3.359375, + -1.859375, + -0.85546875, + -0.5625, + -1.6640625, + -2.25, + -1.4140625, + 2.640625, + -1.21875, + 0.1494140625, + -1.46875, + 2.078125, + -3.796875, + 3.90625, + -1.796875, + 0.83203125, + -2.671875, + 0.294921875, + -2.53125, + -0.24609375, + -0.216796875, + -1.171875, + 1.4765625, + 5.40625, + 1.7421875, + -3.703125, + -0.859375, + 1.2734375, + 1.7734375, + -1.421875, + 1.1953125, + -1.46875, + -2.15625, + 1.21875, + -2.671875, + -0.271484375, + -1.28125, + 5.375, + -2.015625, + -0.98828125, + 0.7734375, + 1.5, + -1.0234375, + 1.578125, + 4.03125, + 0.515625, + -1.4765625, + 1.421875, + 0.82421875, + -0.390625, + 0.76171875, + -1.9453125, + 2.9375, + -0.65625, + 0.1533203125, + -2.75, + -1.0625, + 0.296875, + -1.9140625, + -0.9375, + 0.953125, + -1.21875, + 3.15625, + 1.4140625, + -0.93359375, + 0.0693359375, + -3.859375, + 2.171875, + -1.3671875, + -2.453125, + -0.51171875, + 2.203125, + -0.90234375, + 0.361328125, + -1.1796875, + 0.2333984375, + -2.078125, + 1.6328125, + 0.28515625, + -3.6875, + 1.1328125, + -1.6015625, + -0.2177734375, + -0.2412109375, + -0.85546875, + 0.72265625, + 1.2265625, + -9.4375, + -1.203125, + 0.390625, + -4.5625, + -0.2216796875, + 0.77734375, + -2.453125, + -1.578125, + -0.0296630859375, + 2.015625, + 1.96875, + 3.6875, + 3.578125, + 2.171875, + 3.1875, + 2.375, + 2.03125, + -0.59765625, + 0.95703125, + -0.07470703125, + 0.169921875, + 1.6015625, + 0.1435546875, + -0.408203125, + -2.734375, + -1.75, + -0.439453125, + -0.84375, + -0.33984375, + 0.04736328125, + -0.96484375, + -0.2470703125, + -1.8671875, + 1.828125, + -1.296875, + -1.203125, + -0.46484375, + -3.703125, + -0.44140625, + -1.0234375, + -0.078125, + 2.828125, + 4.53125, + -0.984375, + 3.125, + 2.671875, + -0.41015625, + 2.390625, + 0.279296875, + 5.28125, + -0.38671875, + -1.1640625, + -2.28125, + -0.96875, + -0.4375, + -3.734375, + -0.734375, + 1.03125, + 1.7734375, + -1.171875, + -1.2265625, + 0.046630859375, + 0.62109375, + 4.21875, + -0.546875, + 2.125, + -0.11328125, + 1.7421875, + -0.5078125, + 0.291015625, + -1.84375, + -0.2021484375, + 0.162109375, + 2.65625, + 0.2431640625, + -3.09375, + 0.5, + 0.16015625, + 0.169921875, + -4.96875, + -5.8125, + -2.671875, + -3.40625, + 1.5078125, + 1.46875, + 0.8984375, + 3.21875, + -1.9453125, + -0.14453125, + 3.34375, + 0.2451171875, + -1.515625, + 1.0078125, + -3.046875, + 1.09375, + 1.125, + -1.8984375, + 1.796875, + 0.1337890625, + 2.21875, + -0.1865234375, + 0.048583984375, + 2.65625, + -2.046875, + -1.1796875, + 1.3828125, + -3.28125, + -3.78125, + 1.375, + -0.435546875, + 0.72265625, + 1.703125, + -1.59375, + 3.625, + 1.9140625, + -3.390625, + -0.26171875, + -1.2734375, + 1.3984375, + 1.90625, + -2.671875, + 2.125, + -1.2734375, + -1.765625, + 1.6484375, + 0.52734375, + 1.5234375, + -5.28125, + -0.375, + 1.7734375, + 2.6875, + 1.515625, + -1.625, + 0.81640625, + -1.0390625, + -1.90625, + -0.1494140625, + -2.34375, + 1.3046875, + 0.400390625, + 0.44921875, + 3.125, + -0.5078125, + -3.0, + 2.015625, + 1.5703125, + 3.203125, + 1.0390625, + -0.921875, + 2.265625, + 2.078125, + 0.384765625, + -0.71484375, + 1.59375, + -2.140625, + 4.78125, + 1.09375, + -0.193359375, + -0.89453125, + -2.171875, + 2.703125, + 0.25390625, + -0.50390625, + 0.28125, + 0.8828125, + -1.1953125, + 2.921875, + 0.181640625, + -0.515625, + -1.5390625, + 1.6953125, + -5.5, + 2.046875, + 0.51171875, + -4.34375, + 2.4375, + 1.7265625, + 3.25, + -0.65625, + 0.83984375, + -0.5546875, + 1.6796875, + -0.98828125, + -0.03369140625, + -0.05712890625, + 10.875, + 2.796875, + 1.71875, + 1.6171875, + -1.25, + -0.39453125, + -4.25, + -0.1640625, + 1.78125, + -1.4453125, + -0.87109375, + -0.5078125, + -2.234375, + 2.796875, + 1.6328125, + -1.859375, + -0.15234375, + -0.78515625, + 2.328125, + 2.359375, + -3.515625, + -2.6875, + -0.91015625, + 3.984375, + -2.765625, + 0.8125, + -2.4375, + -3.3125, + -0.015869140625, + -0.2373046875, + 1.671875, + -3.28125, + -0.734375, + 0.2333984375, + -0.87109375, + 0.30859375, + 1.46875, + 4.125, + 1.625, + 2.40625, + -2.0625, + -1.4609375, + 0.7421875, + 0.51953125, + -2.078125, + -0.0106201171875, + -1.8984375, + -2.546875, + -0.91015625, + 2.09375, + 1.7421875, + -2.484375, + 1.1875, + -1.046875, + 3.34375, + 1.1875, + -0.1376953125, + 0.78515625, + 1.8984375, + -2.046875, + -0.44140625, + -0.053955078125, + 5.125, + 2.6875, + -2.328125, + 0.359375, + -0.41796875, + 0.78125, + 4.78125, + 2.265625, + 2.484375, + 2.4375, + -0.302734375, + -0.71875, + -0.392578125, + 0.984375, + 0.734375, + 0.490234375, + 1.796875, + 1.4609375, + 0.298828125, + -1.328125, + 1.984375, + -1.109375, + -2.578125, + 2.484375, + 0.75, + -0.30078125, + 1.5078125, + 1.3203125, + 0.359375, + 0.6875, + -0.63671875, + -1.53125, + -2.1875, + -0.66796875, + 2.078125, + -0.9375, + 1.3984375, + -0.83203125, + -2.015625, + 2.484375, + 2.09375, + 3.0625, + 1.265625, + -0.79296875, + -0.18359375, + 0.27734375, + 0.88671875, + 1.9453125, + -0.076171875, + 2.71875, + -1.3125, + -0.86328125, + -0.09423828125, + 0.86328125, + -1.140625, + -0.16796875, + -4.9375, + 0.470703125, + 3.9375, + 1.3203125, + -0.06591796875, + 1.6171875, + -0.2265625, + -0.69140625, + 1.1328125, + -1.2265625, + 1.7734375, + -0.455078125, + 1.2890625, + -2.6875, + 2.421875, + 2.109375, + -0.375, + 0.6796875, + -3.546875, + 0.93359375, + -0.462890625, + 1.3671875, + 1.0625, + 0.384765625, + -1.5, + 1.1328125, + -1.515625, + 3.828125, + 0.859375, + -0.83984375, + 0.3125, + -1.265625, + 0.005401611328125, + -0.1591796875, + -3.078125, + 0.296875, + 0.4921875, + -1.4375, + 0.0294189453125, + 1.90625, + -0.298828125, + -0.609375, + -1.1640625, + 1.4375, + -0.94140625, + -7.21875, + 1.5546875, + -0.31640625, + 0.2060546875, + -0.65625, + -0.349609375, + 0.01153564453125, + 0.72265625, + -0.263671875, + 1.6484375, + 1.453125, + 1.546875, + 0.35546875, + -1.828125, + -1.921875, + 1.0859375, + 3.578125, + -0.02734375, + 0.90234375, + -0.06982421875, + -4.53125, + 1.296875, + 0.2578125, + -0.50390625, + -1.171875, + 4.46875, + -0.08203125, + 2.34375, + -1.9921875, + 2.171875, + 0.267578125, + 1.0625, + -2.828125, + 1.5703125, + -1.8671875, + 1.296875, + -3.234375, + -2.453125, + -2.859375, + -0.7109375, + 2.25, + 0.875, + 0.6953125, + 0.3359375, + -1.3984375, + 2.765625, + 1.8125, + 1.53125, + -1.59375, + -0.240234375, + -2.21875, + 1.75, + 0.158203125, + 2.859375, + 1.953125, + -0.86328125, + -0.98828125, + -1.2109375, + -4.25, + -1.2265625, + -0.7265625, + 0.26953125, + 0.076171875, + -0.48828125, + -1.640625, + 3.140625, + 1.75, + -0.54296875, + 0.81640625, + 1.421875, + 1.34375, + -2.5625, + 3.1875, + -1.28125, + 1.28125, + 0.0242919921875, + -1.2421875, + -2.453125, + -0.86328125, + 2.46875, + 2.5625, + -0.0262451171875, + 0.7890625, + -1.2109375, + -3.40625, + 2.59375, + 1.5859375, + 2.1875, + 1.5703125, + 1.5234375, + 0.7890625, + 2.5625, + 3.171875, + -1.90625, + 1.7578125, + 0.484375, + 1.7109375, + -0.6796875, + 3.59375, + 1.0859375, + -0.042236328125, + -3.5625, + -0.298828125, + 2.125, + -2.40625, + 2.171875, + 0.1298828125, + -0.1982421875, + 4.34375, + -2.0625, + 4.28125, + 2.578125, + -1.140625, + -2.875, + -0.06689453125, + 0.478515625, + -1.4140625, + 2.21875, + -1.875, + 1.3671875, + -2.390625, + 0.76171875, + 2.671875, + -2.46875, + 0.85546875, + -2.875, + -3.796875, + -0.85546875, + -1.796875, + 0.75390625, + -0.81640625, + -1.1875, + -2.109375, + 1.9140625, + 2.9375, + -0.90625, + 0.345703125, + -1.4609375, + 0.0986328125, + 0.1689453125, + -2.046875, + 0.04541015625, + 1.859375, + -1.703125, + 0.4140625, + 2.875, + -0.053955078125, + -0.87890625, + 6.1875, + -6.40625, + -0.875, + -0.7890625, + 0.33984375, + -1.515625, + 0.33203125, + -0.19140625, + -2.765625, + -0.01123046875, + -1.703125, + 0.8125, + -0.037841796875, + 1.578125, + 0.032470703125, + -0.50390625, + -2.109375, + 0.1376953125, + -2.625, + 2.28125, + -0.6328125, + 0.474609375, + 1.5546875, + -0.138671875, + -0.96484375, + -1.3203125, + -2.609375, + -1.2734375, + 1.2890625, + -1.7734375, + 0.376953125, + 0.7109375, + 1.25, + -3.84375, + 0.6796875, + 0.70703125, + -0.94921875, + 0.74609375, + 2.4375, + 0.1669921875, + 0.1904296875, + 1.484375, + 1.5234375, + 0.6015625, + -0.64453125, + -0.6640625, + -1.3359375, + -1.25, + 1.515625, + 0.462890625, + 0.1640625, + -0.63671875, + 3.46875, + -1.1640625, + -3.6875, + -0.490234375, + -2.015625, + -3.515625, + -1.859375, + 1.6171875, + -2.5625, + 2.375, + -0.65234375, + 3.671875, + -1.8359375, + -0.58203125, + 0.93359375, + 0.294921875, + 0.3984375, + -0.049560546875, + -1.7890625, + -0.171875, + -2.09375, + -2.953125, + 1.3828125, + 1.7578125, + 0.6484375, + -2.625, + 0.453125, + 1.921875, + 2.296875, + 0.419921875, + -1.1796875, + 2.796875, + -0.58203125, + 1.953125, + -1.6171875, + 1.0390625, + 1.25, + 0.9921875, + 2.234375, + 1.15625, + -0.2099609375, + 0.72265625, + 0.8828125, + -0.69921875, + 2.078125, + -4.125, + -0.984375, + 1.6015625, + 1.0, + 0.419921875, + -3.296875, + -0.765625, + -1.3515625, + 1.8828125, + 0.4609375, + -0.8046875, + 1.4765625, + 0.490234375, + -4.1875, + 1.3515625, + -3.1875, + -1.9765625, + 0.1328125, + 3.59375, + -1.1875, + -3.359375, + 2.34375, + 0.326171875, + 1.21875, + -0.91796875, + 1.140625, + 0.37109375, + -0.2080078125, + 1.0078125, + 0.98828125, + -2.625, + -1.125, + -0.5703125, + 2.671875, + -2.328125, + 0.734375, + 0.90234375, + 2.078125, + 1.140625, + -0.154296875, + -1.25, + 0.7734375, + -2.578125, + 2.875, + -0.46875, + -4.90625, + -0.93359375, + -2.890625, + 3.328125, + -2.84375, + -1.65625, + 3.109375, + 2.46875, + -2.609375, + -0.77734375, + 4.3125, + 2.390625, + -3.109375, + 0.76953125, + 3.609375, + -0.8828125, + 0.72265625, + -1.9296875, + -0.037353515625, + 0.94921875, + -0.349609375, + 0.46484375, + 0.373046875, + 1.984375, + -0.9453125, + 1.3671875, + 0.5078125, + 2.625, + -1.3125, + -0.029296875, + -0.7109375, + 1.3046875, + -0.75390625, + 0.37890625, + 0.064453125, + 1.4453125, + -0.20703125, + 1.921875, + -1.203125, + 3.34375, + 2.15625, + -1.0078125, + -0.46484375, + 0.1806640625, + -2.59375, + 0.302734375, + 1.1953125, + 2.40625, + -0.87109375, + -0.2373046875, + -1.828125, + -3.25, + 0.5390625, + -2.109375, + 0.25390625, + -2.484375, + -2.140625, + 0.02099609375, + -1.671875, + -0.06640625, + 2.453125, + -1.0234375, + -0.03125, + 2.234375, + -0.03271484375, + -0.83984375, + 0.2197265625, + 0.8125, + 0.2578125, + 0.52734375, + 0.53515625, + -9.5, + -1.359375, + 3.984375, + -3.375, + -2.6875, + 1.5625, + -1.9609375, + 3.109375, + -2.40625, + -1.0, + -11.0625, + -1.75, + -1.7109375, + -2.0625, + 0.703125, + -0.7734375, + 2.21875, + 0.57421875, + 0.61328125, + 0.96484375, + -0.2080078125, + -2.703125, + 1.9453125, + 0.046142578125, + -1.625, + -0.359375, + -0.90625, + -2.15625, + -1.0625, + -0.7734375, + -3.21875, + 1.8671875, + -0.11181640625, + -0.90625, + 0.8515625, + -2.578125, + -2.375, + 0.578125, + -2.4375, + 1.25, + 1.8203125, + 1.1015625, + 2.234375, + 11.75, + -0.66796875, + 1.5859375, + 0.064453125, + -0.99609375, + 2.6875, + -0.66796875, + -0.0537109375, + 2.1875, + 3.453125, + 1.0078125, + -0.4609375, + 0.5390625, + -1.53125, + -0.07373046875, + -1.7421875, + 1.2265625, + 0.57421875, + -4.875, + -0.8046875, + -0.2236328125, + 3.578125, + 2.21875, + 0.515625, + 0.8515625, + 0.388671875, + -0.357421875, + 0.359375, + 0.8984375, + 1.09375, + -1.4375, + 1.3125, + 1.640625, + 3.671875, + -1.0, + 0.013916015625, + -3.40625, + 1.0625, + 2.78125, + 2.265625, + 2.28125, + -1.0625, + 3.265625, + -0.86328125, + 1.03125, + -0.138671875, + -2.875, + -1.90625, + 3.875, + 1.1875, + -4.03125, + 2.390625, + -0.6171875, + 0.068359375, + 0.51953125, + -0.99609375, + -1.90625, + -0.66015625, + -2.0625, + -0.23046875, + 0.7421875, + -2.375, + 0.59375, + -3.46875, + 0.796875, + -2.46875, + 2.5, + -4.03125, + 0.95703125, + -1.0078125, + 0.0673828125, + -0.353515625, + -2.4375, + 4.375, + 4.46875, + -0.3828125, + 2.828125, + -0.88671875, + 7.40625, + 0.8828125, + 2.921875, + -1.2265625, + 4.09375, + 3.515625, + -2.59375, + 0.251953125, + -2.34375, + -2.21875, + -3.390625, + -1.4375, + -2.484375, + -1.6171875, + 1.3359375, + 2.5, + -1.796875, + 3.484375, + 0.5703125, + -1.9375, + -2.515625, + 0.5390625, + 2.15625, + 0.6328125, + 0.5078125, + -0.94921875, + -0.11328125, + 0.287109375, + -0.9375, + 0.984375, + 0.0, + 0.53125, + 1.390625, + -1.578125, + 1.3828125, + 0.546875, + 1.0390625, + 3.203125, + 6.3125, + -2.625, + -2.015625, + -2.515625, + -0.98046875, + -2.671875, + -2.421875, + 0.97265625, + -0.59375, + 1.28125, + -2.53125, + 0.7421875, + 2.3125, + -5.375, + -2.3125, + -2.40625, + -0.376953125, + -0.9296875, + 1.9375, + -1.9921875, + 0.25, + -0.369140625, + 7.3125, + 0.54296875, + -0.921875, + -2.6875, + -4.90625, + -0.96875, + -1.046875, + 2.03125, + -0.41015625, + 2.1875, + 2.453125, + 2.625, + 0.51953125, + -0.82421875, + -1.40625, + -0.259765625, + -0.044189453125, + 2.515625, + -0.408203125, + -0.8671875, + -2.046875, + -0.051513671875, + -2.828125, + 1.1953125, + 0.953125, + 2.859375, + -0.65234375, + -4.78125, + 1.6953125, + 1.3671875, + -0.396484375, + 1.84375, + -0.375, + 1.0, + -0.130859375, + -1.46875, + -0.275390625 + ], + "index": 0, + "object": "embedding", + "raw_output": null + } + ], + "model": "accounts/fireworks/models/qwen3-embedding-8b", + "object": "list", + "usage": { + "prompt_tokens": 5, + "total_tokens": 5, + "completion_tokens": 0 + }, + "perf_metrics": null + } + }, + "is_streaming": false + } +} diff --git a/tests/integration/recordings/responses/fc0662299704.json b/tests/integration/recordings/responses/fc0662299704.json new file mode 100644 index 000000000..d025c84de --- /dev/null +++ b/tests/integration/recordings/responses/fc0662299704.json @@ -0,0 +1,415 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant Always respond with tool calls no matter what. " + }, + { + "role": "user", + "content": "Get the boiling point of polyjuice with a tool call." + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_nhfpubt2", + "type": "function", + "function": { + "name": "get_boiling_point", + "arguments": "{\"celcius\": \"true\", \"liquid_name\": \"polyjuice\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_nhfpubt2", + "content": "-100" + } + ], + "max_tokens": 512, + "stream": true, + "temperature": 0.0001, + "tool_choice": "auto", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius", + "default": true + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-462", + "choices": [ + { + "delta": { + "content": "The", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427017, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-462", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427017, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-462", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427017, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-462", + "choices": [ + { + "delta": { + "content": " of", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427017, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-462", + "choices": [ + { + "delta": { + "content": " Poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427017, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-462", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427017, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-462", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427017, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-462", + "choices": [ + { + "delta": { + "content": " is", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427017, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-462", + "choices": [ + { + "delta": { + "content": " -", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427017, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-462", + "choices": [ + { + "delta": { + "content": "100", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427017, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-462", + "choices": [ + { + "delta": { + "content": "\u00b0C", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427017, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-462", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427017, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-462", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759427017, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} From 0beb61b13b15bc9cb6ed607bddad77621056e6c0 Mon Sep 17 00:00:00 2001 From: Ashwin Bharambe Date: Thu, 2 Oct 2025 13:03:17 -0700 Subject: [PATCH 08/13] fix(api): fix a mistake from #3636 which overwrote POST /responses --- docs/docs/providers/agents/index.mdx | 4 +- docs/docs/providers/batches/index.mdx | 24 +++++------ docs/docs/providers/inference/index.mdx | 12 +++--- docs/static/deprecated-llama-stack-spec.html | 42 +++++--------------- docs/static/deprecated-llama-stack-spec.yaml | 34 ++++------------ docs/static/llama-stack-spec.html | 42 +++++--------------- docs/static/llama-stack-spec.yaml | 34 ++++------------ docs/static/stainless-llama-stack-spec.html | 42 +++++--------------- docs/static/stainless-llama-stack-spec.yaml | 34 ++++------------ llama_stack/apis/agents/agents.py | 2 +- 10 files changed, 75 insertions(+), 195 deletions(-) diff --git a/docs/docs/providers/agents/index.mdx b/docs/docs/providers/agents/index.mdx index 06eb104af..52b92734e 100644 --- a/docs/docs/providers/agents/index.mdx +++ b/docs/docs/providers/agents/index.mdx @@ -1,7 +1,7 @@ --- description: "Agents - APIs for creating and interacting with agentic systems." +APIs for creating and interacting with agentic systems." sidebar_label: Agents title: Agents --- @@ -12,6 +12,6 @@ title: Agents Agents - APIs for creating and interacting with agentic systems. +APIs for creating and interacting with agentic systems. This section contains documentation for all available providers for the **agents** API. diff --git a/docs/docs/providers/batches/index.mdx b/docs/docs/providers/batches/index.mdx index 2c64b277f..18e5e314d 100644 --- a/docs/docs/providers/batches/index.mdx +++ b/docs/docs/providers/batches/index.mdx @@ -1,14 +1,14 @@ --- description: "The Batches API enables efficient processing of multiple requests in a single operation, - particularly useful for processing large datasets, batch evaluation workflows, and - cost-effective inference at scale. +particularly useful for processing large datasets, batch evaluation workflows, and +cost-effective inference at scale. - The API is designed to allow use of openai client libraries for seamless integration. +The API is designed to allow use of openai client libraries for seamless integration. - This API provides the following extensions: - - idempotent batch creation +This API provides the following extensions: + - idempotent batch creation - Note: This API is currently under active development and may undergo changes." +Note: This API is currently under active development and may undergo changes." sidebar_label: Batches title: Batches --- @@ -18,14 +18,14 @@ title: Batches ## Overview The Batches API enables efficient processing of multiple requests in a single operation, - particularly useful for processing large datasets, batch evaluation workflows, and - cost-effective inference at scale. +particularly useful for processing large datasets, batch evaluation workflows, and +cost-effective inference at scale. - The API is designed to allow use of openai client libraries for seamless integration. +The API is designed to allow use of openai client libraries for seamless integration. - This API provides the following extensions: - - idempotent batch creation +This API provides the following extensions: + - idempotent batch creation - Note: This API is currently under active development and may undergo changes. +Note: This API is currently under active development and may undergo changes. This section contains documentation for all available providers for the **batches** API. diff --git a/docs/docs/providers/inference/index.mdx b/docs/docs/providers/inference/index.mdx index ebbaf1be1..1dc479675 100644 --- a/docs/docs/providers/inference/index.mdx +++ b/docs/docs/providers/inference/index.mdx @@ -1,9 +1,9 @@ --- description: "Llama Stack Inference API for generating completions, chat completions, and embeddings. - This API provides the raw interface to the underlying models. Two kinds of models are supported: - - LLM models: these models generate \"raw\" and \"chat\" (conversational) completions. - - Embedding models: these models generate embeddings to be used for semantic search." +This API provides the raw interface to the underlying models. Two kinds of models are supported: +- LLM models: these models generate \"raw\" and \"chat\" (conversational) completions. +- Embedding models: these models generate embeddings to be used for semantic search." sidebar_label: Inference title: Inference --- @@ -14,8 +14,8 @@ title: Inference Llama Stack Inference API for generating completions, chat completions, and embeddings. - This API provides the raw interface to the underlying models. Two kinds of models are supported: - - LLM models: these models generate "raw" and "chat" (conversational) completions. - - Embedding models: these models generate embeddings to be used for semantic search. +This API provides the raw interface to the underlying models. Two kinds of models are supported: +- LLM models: these models generate "raw" and "chat" (conversational) completions. +- Embedding models: these models generate embeddings to be used for semantic search. This section contains documentation for all available providers for the **inference** API. diff --git a/docs/static/deprecated-llama-stack-spec.html b/docs/static/deprecated-llama-stack-spec.html index 99ce8ee9c..fe63f78bc 100644 --- a/docs/static/deprecated-llama-stack-spec.html +++ b/docs/static/deprecated-llama-stack-spec.html @@ -2089,11 +2089,16 @@ "post": { "responses": { "200": { - "description": "A ListOpenAIResponseObject.", + "description": "An OpenAIResponseObject.", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ListOpenAIResponseObject" + "$ref": "#/components/schemas/OpenAIResponseObject" + } + }, + "text/event-stream": { + "schema": { + "$ref": "#/components/schemas/OpenAIResponseObjectStream" } } } @@ -2114,14 +2119,14 @@ "tags": [ "Agents" ], - "summary": "List all OpenAI responses.", - "description": "List all OpenAI responses.", + "summary": "Create a new OpenAI response.", + "description": "Create a new OpenAI response.", "parameters": [], "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ListOpenaiResponsesRequest" + "$ref": "#/components/schemas/CreateOpenaiResponseRequest" } } }, @@ -10908,33 +10913,6 @@ ], "title": "OpenAIResponseObjectStreamResponseWebSearchCallSearching" }, - "ListOpenaiResponsesRequest": { - "type": "object", - "properties": { - "after": { - "type": "string", - "description": "The ID of the last response to return." - }, - "limit": { - "type": "integer", - "description": "The number of responses to return." - }, - "model": { - "type": "string", - "description": "The model to filter responses by." - }, - "order": { - "type": "string", - "enum": [ - "asc", - "desc" - ], - "description": "The order to sort responses by when sorted by created_at ('asc' or 'desc')." - } - }, - "additionalProperties": false, - "title": "ListOpenaiResponsesRequest" - }, "OpenAIDeleteResponseObject": { "type": "object", "properties": { diff --git a/docs/static/deprecated-llama-stack-spec.yaml b/docs/static/deprecated-llama-stack-spec.yaml index d2e595b5d..9b1d3eff6 100644 --- a/docs/static/deprecated-llama-stack-spec.yaml +++ b/docs/static/deprecated-llama-stack-spec.yaml @@ -1529,11 +1529,14 @@ paths: post: responses: '200': - description: A ListOpenAIResponseObject. + description: An OpenAIResponseObject. content: application/json: schema: - $ref: '#/components/schemas/ListOpenAIResponseObject' + $ref: '#/components/schemas/OpenAIResponseObject' + text/event-stream: + schema: + $ref: '#/components/schemas/OpenAIResponseObjectStream' '400': $ref: '#/components/responses/BadRequest400' '429': @@ -1546,14 +1549,14 @@ paths: $ref: '#/components/responses/DefaultError' tags: - Agents - summary: List all OpenAI responses. - description: List all OpenAI responses. + summary: Create a new OpenAI response. + description: Create a new OpenAI response. parameters: [] requestBody: content: application/json: schema: - $ref: '#/components/schemas/ListOpenaiResponsesRequest' + $ref: '#/components/schemas/CreateOpenaiResponseRequest' required: true deprecated: true /v1/openai/v1/responses/{response_id}: @@ -8150,27 +8153,6 @@ components: - type title: >- OpenAIResponseObjectStreamResponseWebSearchCallSearching - ListOpenaiResponsesRequest: - type: object - properties: - after: - type: string - description: The ID of the last response to return. - limit: - type: integer - description: The number of responses to return. - model: - type: string - description: The model to filter responses by. - order: - type: string - enum: - - asc - - desc - description: >- - The order to sort responses by when sorted by created_at ('asc' or 'desc'). - additionalProperties: false - title: ListOpenaiResponsesRequest OpenAIDeleteResponseObject: type: object properties: diff --git a/docs/static/llama-stack-spec.html b/docs/static/llama-stack-spec.html index 3da721a4e..fa16e62ee 100644 --- a/docs/static/llama-stack-spec.html +++ b/docs/static/llama-stack-spec.html @@ -1310,11 +1310,16 @@ "post": { "responses": { "200": { - "description": "A ListOpenAIResponseObject.", + "description": "An OpenAIResponseObject.", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ListOpenAIResponseObject" + "$ref": "#/components/schemas/OpenAIResponseObject" + } + }, + "text/event-stream": { + "schema": { + "$ref": "#/components/schemas/OpenAIResponseObjectStream" } } } @@ -1335,14 +1340,14 @@ "tags": [ "Agents" ], - "summary": "List all OpenAI responses.", - "description": "List all OpenAI responses.", + "summary": "Create a new OpenAI response.", + "description": "Create a new OpenAI response.", "parameters": [], "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ListOpenaiResponsesRequest" + "$ref": "#/components/schemas/CreateOpenaiResponseRequest" } } }, @@ -8233,33 +8238,6 @@ ], "title": "OpenAIResponseObjectStreamResponseWebSearchCallSearching" }, - "ListOpenaiResponsesRequest": { - "type": "object", - "properties": { - "after": { - "type": "string", - "description": "The ID of the last response to return." - }, - "limit": { - "type": "integer", - "description": "The number of responses to return." - }, - "model": { - "type": "string", - "description": "The model to filter responses by." - }, - "order": { - "type": "string", - "enum": [ - "asc", - "desc" - ], - "description": "The order to sort responses by when sorted by created_at ('asc' or 'desc')." - } - }, - "additionalProperties": false, - "title": "ListOpenaiResponsesRequest" - }, "OpenAIDeleteResponseObject": { "type": "object", "properties": { diff --git a/docs/static/llama-stack-spec.yaml b/docs/static/llama-stack-spec.yaml index 3927d3a94..733e2cd21 100644 --- a/docs/static/llama-stack-spec.yaml +++ b/docs/static/llama-stack-spec.yaml @@ -967,11 +967,14 @@ paths: post: responses: '200': - description: A ListOpenAIResponseObject. + description: An OpenAIResponseObject. content: application/json: schema: - $ref: '#/components/schemas/ListOpenAIResponseObject' + $ref: '#/components/schemas/OpenAIResponseObject' + text/event-stream: + schema: + $ref: '#/components/schemas/OpenAIResponseObjectStream' '400': $ref: '#/components/responses/BadRequest400' '429': @@ -984,14 +987,14 @@ paths: $ref: '#/components/responses/DefaultError' tags: - Agents - summary: List all OpenAI responses. - description: List all OpenAI responses. + summary: Create a new OpenAI response. + description: Create a new OpenAI response. parameters: [] requestBody: content: application/json: schema: - $ref: '#/components/schemas/ListOpenaiResponsesRequest' + $ref: '#/components/schemas/CreateOpenaiResponseRequest' required: true deprecated: false /v1/responses/{response_id}: @@ -6196,27 +6199,6 @@ components: - type title: >- OpenAIResponseObjectStreamResponseWebSearchCallSearching - ListOpenaiResponsesRequest: - type: object - properties: - after: - type: string - description: The ID of the last response to return. - limit: - type: integer - description: The number of responses to return. - model: - type: string - description: The model to filter responses by. - order: - type: string - enum: - - asc - - desc - description: >- - The order to sort responses by when sorted by created_at ('asc' or 'desc'). - additionalProperties: false - title: ListOpenaiResponsesRequest OpenAIDeleteResponseObject: type: object properties: diff --git a/docs/static/stainless-llama-stack-spec.html b/docs/static/stainless-llama-stack-spec.html index f921d2c29..72ecb5bb5 100644 --- a/docs/static/stainless-llama-stack-spec.html +++ b/docs/static/stainless-llama-stack-spec.html @@ -1310,11 +1310,16 @@ "post": { "responses": { "200": { - "description": "A ListOpenAIResponseObject.", + "description": "An OpenAIResponseObject.", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ListOpenAIResponseObject" + "$ref": "#/components/schemas/OpenAIResponseObject" + } + }, + "text/event-stream": { + "schema": { + "$ref": "#/components/schemas/OpenAIResponseObjectStream" } } } @@ -1335,14 +1340,14 @@ "tags": [ "Agents" ], - "summary": "List all OpenAI responses.", - "description": "List all OpenAI responses.", + "summary": "Create a new OpenAI response.", + "description": "Create a new OpenAI response.", "parameters": [], "requestBody": { "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ListOpenaiResponsesRequest" + "$ref": "#/components/schemas/CreateOpenaiResponseRequest" } } }, @@ -10242,33 +10247,6 @@ ], "title": "OpenAIResponseObjectStreamResponseWebSearchCallSearching" }, - "ListOpenaiResponsesRequest": { - "type": "object", - "properties": { - "after": { - "type": "string", - "description": "The ID of the last response to return." - }, - "limit": { - "type": "integer", - "description": "The number of responses to return." - }, - "model": { - "type": "string", - "description": "The model to filter responses by." - }, - "order": { - "type": "string", - "enum": [ - "asc", - "desc" - ], - "description": "The order to sort responses by when sorted by created_at ('asc' or 'desc')." - } - }, - "additionalProperties": false, - "title": "ListOpenaiResponsesRequest" - }, "OpenAIDeleteResponseObject": { "type": "object", "properties": { diff --git a/docs/static/stainless-llama-stack-spec.yaml b/docs/static/stainless-llama-stack-spec.yaml index cb43b313b..151ea1029 100644 --- a/docs/static/stainless-llama-stack-spec.yaml +++ b/docs/static/stainless-llama-stack-spec.yaml @@ -970,11 +970,14 @@ paths: post: responses: '200': - description: A ListOpenAIResponseObject. + description: An OpenAIResponseObject. content: application/json: schema: - $ref: '#/components/schemas/ListOpenAIResponseObject' + $ref: '#/components/schemas/OpenAIResponseObject' + text/event-stream: + schema: + $ref: '#/components/schemas/OpenAIResponseObjectStream' '400': $ref: '#/components/responses/BadRequest400' '429': @@ -987,14 +990,14 @@ paths: $ref: '#/components/responses/DefaultError' tags: - Agents - summary: List all OpenAI responses. - description: List all OpenAI responses. + summary: Create a new OpenAI response. + description: Create a new OpenAI response. parameters: [] requestBody: content: application/json: schema: - $ref: '#/components/schemas/ListOpenaiResponsesRequest' + $ref: '#/components/schemas/CreateOpenaiResponseRequest' required: true deprecated: false /v1/responses/{response_id}: @@ -7641,27 +7644,6 @@ components: - type title: >- OpenAIResponseObjectStreamResponseWebSearchCallSearching - ListOpenaiResponsesRequest: - type: object - properties: - after: - type: string - description: The ID of the last response to return. - limit: - type: integer - description: The number of responses to return. - model: - type: string - description: The model to filter responses by. - order: - type: string - enum: - - asc - - desc - description: >- - The order to sort responses by when sorted by created_at ('asc' or 'desc'). - additionalProperties: false - title: ListOpenaiResponsesRequest OpenAIDeleteResponseObject: type: object properties: diff --git a/llama_stack/apis/agents/agents.py b/llama_stack/apis/agents/agents.py index dcd0d83d2..811fe6aa2 100644 --- a/llama_stack/apis/agents/agents.py +++ b/llama_stack/apis/agents/agents.py @@ -816,7 +816,7 @@ class Agents(Protocol): """ ... - @webmethod(route="/openai/v1/responses", method="POST", level=LLAMA_STACK_API_V1, deprecated=True) + @webmethod(route="/openai/v1/responses", method="GET", level=LLAMA_STACK_API_V1, deprecated=True) @webmethod(route="/responses", method="GET", level=LLAMA_STACK_API_V1) async def list_openai_responses( self, From 3df12c69c1db1ba7410a52ccc8573dae256fbf0c Mon Sep 17 00:00:00 2001 From: ehhuang Date: Thu, 2 Oct 2025 13:10:13 -0700 Subject: [PATCH 09/13] chore: fix/add logging categories (#3658) # What does this PR do? These aren't controllable by LLAMA_STACK_LOGGING ``` tests/integration/agents/test_persistence.py::test_delete_agents_and_sessions SKIPPED (This ...) [ 3%] tests/integration/agents/test_persistence.py::test_get_agent_turns_and_steps SKIPPED (This t...) [ 7%] tests/integration/agents/test_openai_responses.py::test_responses_store[openai_client-txt=openai/gpt-4o-tools0-True] instantiating llama_stack_client WARNING 2025-10-02 13:14:33,472 root:258 uncategorized: Unknown logging category: testing. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:33,477 root:258 uncategorized: Unknown logging category: providers::utils. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:33,960 root:258 uncategorized: Unknown logging category: tokenizer_utils. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:33,962 root:258 uncategorized: Unknown logging category: models::llama. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:33,963 root:258 uncategorized: Unknown logging category: models::llama. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:33,968 root:258 uncategorized: Unknown logging category: providers::utils. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:33,974 root:258 uncategorized: Unknown logging category: providers::utils. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:33,978 root:258 uncategorized: Unknown logging category: providers::utils. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:35,350 root:258 uncategorized: Unknown logging category: providers::utils. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:35,366 root:258 uncategorized: Unknown logging category: providers::utils. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:35,489 root:258 uncategorized: Unknown logging category: providers::utils. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:35,490 root:258 uncategorized: Unknown logging category: inference_store. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:35,697 root:258 uncategorized: Unknown logging category: providers::utils. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:35,918 root:258 uncategorized: Unknown logging category: providers::utils. Falling back to default 'root' level: 20 INFO 2025-10-02 13:14:35,945 llama_stack.providers.utils.inference.inference_store:74 inference_store: Write queue disabled for SQLite to avoid concurrency issues WARNING 2025-10-02 13:14:36,172 root:258 uncategorized: Unknown logging category: files. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:36,218 root:258 uncategorized: Unknown logging category: providers::utils. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:36,219 root:258 uncategorized: Unknown logging category: vector_io. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:36,231 root:258 uncategorized: Unknown logging category: vector_io. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:36,255 root:258 uncategorized: Unknown logging category: tool_runtime. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:36,486 root:258 uncategorized: Unknown logging category: responses_store. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:36,503 root:258 uncategorized: Unknown logging category: openai::responses. Falling back to default 'root' level: 20 INFO 2025-10-02 13:14:36,524 llama_stack.providers.utils.responses.responses_store:80 responses_store: Write queue disabled for SQLite to avoid concurrency issues WARNING 2025-10-02 13:14:36,528 root:258 uncategorized: Unknown logging category: providers::utils. Falling back to default 'root' level: 20 WARNING 2025-10-02 13:14:36,703 root:258 uncategorized: Unknown logging category: uncategorized. Falling back to default 'root' level: 20 ``` ## Test Plan --- llama_stack/log.py | 14 ++++++++++++-- .../meta_reference/responses/openai_responses.py | 2 +- .../providers/utils/inference/inference_store.py | 2 +- .../providers/utils/responses/responses_store.py | 2 +- 4 files changed, 15 insertions(+), 5 deletions(-) diff --git a/llama_stack/log.py b/llama_stack/log.py index 2a11516fa..729b2b8c5 100644 --- a/llama_stack/log.py +++ b/llama_stack/log.py @@ -31,7 +31,14 @@ CATEGORIES = [ "client", "telemetry", "openai_responses", + "testing", + "providers", + "models", + "files", + "vector_io", + "tool_runtime", ] +UNCATEGORIZED = "uncategorized" # Initialize category levels with default level _category_levels: dict[str, int] = dict.fromkeys(CATEGORIES, DEFAULT_LOG_LEVEL) @@ -165,7 +172,7 @@ def setup_logging(category_levels: dict[str, int], log_file: str | None) -> None def filter(self, record): if not hasattr(record, "category"): - record.category = "uncategorized" # Default to 'uncategorized' if no category found + record.category = UNCATEGORIZED # Default to 'uncategorized' if no category found return True # Determine the root logger's level (default to WARNING if not specified) @@ -255,7 +262,10 @@ def get_logger( log_level = _category_levels[root_category] else: log_level = _category_levels.get("root", DEFAULT_LOG_LEVEL) - logging.warning(f"Unknown logging category: {category}. Falling back to default 'root' level: {log_level}") + if category != UNCATEGORIZED: + logging.warning( + f"Unknown logging category: {category}. Falling back to default 'root' level: {log_level}" + ) logger.setLevel(log_level) return logging.LoggerAdapter(logger, {"category": category}) diff --git a/llama_stack/providers/inline/agents/meta_reference/responses/openai_responses.py b/llama_stack/providers/inline/agents/meta_reference/responses/openai_responses.py index c27dc8467..1a6d75710 100644 --- a/llama_stack/providers/inline/agents/meta_reference/responses/openai_responses.py +++ b/llama_stack/providers/inline/agents/meta_reference/responses/openai_responses.py @@ -41,7 +41,7 @@ from .utils import ( convert_response_text_to_chat_response_format, ) -logger = get_logger(name=__name__, category="openai::responses") +logger = get_logger(name=__name__, category="openai_responses") class OpenAIResponsePreviousResponseWithInputItems(BaseModel): diff --git a/llama_stack/providers/utils/inference/inference_store.py b/llama_stack/providers/utils/inference/inference_store.py index ffc9f3e11..901f77c67 100644 --- a/llama_stack/providers/utils/inference/inference_store.py +++ b/llama_stack/providers/utils/inference/inference_store.py @@ -22,7 +22,7 @@ from ..sqlstore.api import ColumnDefinition, ColumnType from ..sqlstore.authorized_sqlstore import AuthorizedSqlStore from ..sqlstore.sqlstore import SqlStoreConfig, SqlStoreType, sqlstore_impl -logger = get_logger(name=__name__, category="inference_store") +logger = get_logger(name=__name__, category="inference") class InferenceStore: diff --git a/llama_stack/providers/utils/responses/responses_store.py b/llama_stack/providers/utils/responses/responses_store.py index b9fceb1ab..cb665b88e 100644 --- a/llama_stack/providers/utils/responses/responses_store.py +++ b/llama_stack/providers/utils/responses/responses_store.py @@ -25,7 +25,7 @@ from ..sqlstore.api import ColumnDefinition, ColumnType from ..sqlstore.authorized_sqlstore import AuthorizedSqlStore from ..sqlstore.sqlstore import SqliteSqlStoreConfig, SqlStoreConfig, SqlStoreType, sqlstore_impl -logger = get_logger(name=__name__, category="responses_store") +logger = get_logger(name=__name__, category="openai_responses") class ResponsesStore: From d933d354e417632425fdeddb7f2316c540fc0ae8 Mon Sep 17 00:00:00 2001 From: ehhuang Date: Thu, 2 Oct 2025 14:51:41 -0700 Subject: [PATCH 10/13] chore: fix precommit (#3663) # What does this PR do? ## Test Plan --- docs/docs/providers/agents/index.mdx | 4 ++-- docs/docs/providers/batches/index.mdx | 24 ++++++++++++------------ docs/docs/providers/inference/index.mdx | 12 ++++++------ 3 files changed, 20 insertions(+), 20 deletions(-) diff --git a/docs/docs/providers/agents/index.mdx b/docs/docs/providers/agents/index.mdx index 52b92734e..06eb104af 100644 --- a/docs/docs/providers/agents/index.mdx +++ b/docs/docs/providers/agents/index.mdx @@ -1,7 +1,7 @@ --- description: "Agents -APIs for creating and interacting with agentic systems." + APIs for creating and interacting with agentic systems." sidebar_label: Agents title: Agents --- @@ -12,6 +12,6 @@ title: Agents Agents -APIs for creating and interacting with agentic systems. + APIs for creating and interacting with agentic systems. This section contains documentation for all available providers for the **agents** API. diff --git a/docs/docs/providers/batches/index.mdx b/docs/docs/providers/batches/index.mdx index 18e5e314d..2c64b277f 100644 --- a/docs/docs/providers/batches/index.mdx +++ b/docs/docs/providers/batches/index.mdx @@ -1,14 +1,14 @@ --- description: "The Batches API enables efficient processing of multiple requests in a single operation, -particularly useful for processing large datasets, batch evaluation workflows, and -cost-effective inference at scale. + particularly useful for processing large datasets, batch evaluation workflows, and + cost-effective inference at scale. -The API is designed to allow use of openai client libraries for seamless integration. + The API is designed to allow use of openai client libraries for seamless integration. -This API provides the following extensions: - - idempotent batch creation + This API provides the following extensions: + - idempotent batch creation -Note: This API is currently under active development and may undergo changes." + Note: This API is currently under active development and may undergo changes." sidebar_label: Batches title: Batches --- @@ -18,14 +18,14 @@ title: Batches ## Overview The Batches API enables efficient processing of multiple requests in a single operation, -particularly useful for processing large datasets, batch evaluation workflows, and -cost-effective inference at scale. + particularly useful for processing large datasets, batch evaluation workflows, and + cost-effective inference at scale. -The API is designed to allow use of openai client libraries for seamless integration. + The API is designed to allow use of openai client libraries for seamless integration. -This API provides the following extensions: - - idempotent batch creation + This API provides the following extensions: + - idempotent batch creation -Note: This API is currently under active development and may undergo changes. + Note: This API is currently under active development and may undergo changes. This section contains documentation for all available providers for the **batches** API. diff --git a/docs/docs/providers/inference/index.mdx b/docs/docs/providers/inference/index.mdx index 1dc479675..ebbaf1be1 100644 --- a/docs/docs/providers/inference/index.mdx +++ b/docs/docs/providers/inference/index.mdx @@ -1,9 +1,9 @@ --- description: "Llama Stack Inference API for generating completions, chat completions, and embeddings. -This API provides the raw interface to the underlying models. Two kinds of models are supported: -- LLM models: these models generate \"raw\" and \"chat\" (conversational) completions. -- Embedding models: these models generate embeddings to be used for semantic search." + This API provides the raw interface to the underlying models. Two kinds of models are supported: + - LLM models: these models generate \"raw\" and \"chat\" (conversational) completions. + - Embedding models: these models generate embeddings to be used for semantic search." sidebar_label: Inference title: Inference --- @@ -14,8 +14,8 @@ title: Inference Llama Stack Inference API for generating completions, chat completions, and embeddings. -This API provides the raw interface to the underlying models. Two kinds of models are supported: -- LLM models: these models generate "raw" and "chat" (conversational) completions. -- Embedding models: these models generate embeddings to be used for semantic search. + This API provides the raw interface to the underlying models. Two kinds of models are supported: + - LLM models: these models generate "raw" and "chat" (conversational) completions. + - Embedding models: these models generate embeddings to be used for semantic search. This section contains documentation for all available providers for the **inference** API. From 2e544ecd8a2837402b247d774fbcd9e561d3d1c7 Mon Sep 17 00:00:00 2001 From: Ashwin Bharambe Date: Thu, 2 Oct 2025 15:12:03 -0700 Subject: [PATCH 11/13] feat(tools)!: substantial clean up of "Tool" related datatypes (#3627) This is a sweeping change to clean up some gunk around our "Tool" definitions. First, we had two types `Tool` and `ToolDef`. The first of these was a "Resource" type for the registry but we had stopped registering tools inside the Registry long back (and only registered ToolGroups.) The latter was for specifying tools for the Agents API. This PR removes the former and adds an optional `toolgroup_id` field to the latter. Secondly, as pointed out by @bbrowning in https://github.com/llamastack/llama-stack/pull/3003#issuecomment-3245270132, we were doing a lossy conversion from a full JSON schema from the MCP tool specification into our ToolDefinition to send it to the model. There is no necessity to do this -- we ourselves aren't doing any execution at all but merely passing it to the chat completions API which supports this. By doing this (and by doing it poorly), we encountered limitations like not supporting array items, or not resolving $refs, etc. To fix this, we replaced the `parameters` field by `{ input_schema, output_schema }` which can be full blown JSON schemas. Finally, there were some types in our llama-related chat format conversion which needed some cleanup. We are taking this opportunity to clean those up. This PR is a substantial breaking change to the API. However, given our window for introducing breaking changes, this suits us just fine. I will be landing a concurrent `llama-stack-client` change as well since API shapes are changing. --- docs/static/deprecated-llama-stack-spec.html | 194 +- docs/static/deprecated-llama-stack-spec.yaml | 103 +- .../static/experimental-llama-stack-spec.html | 194 +- .../static/experimental-llama-stack-spec.yaml | 103 +- docs/static/llama-stack-spec.html | 303 +-- docs/static/llama-stack-spec.yaml | 183 +- docs/static/stainless-llama-stack-spec.html | 303 +-- docs/static/stainless-llama-stack-spec.yaml | 183 +- llama_stack/apis/inference/inference.py | 2 - llama_stack/apis/tools/tools.py | 69 +- llama_stack/core/datatypes.py | 9 +- llama_stack/core/routers/tool_runtime.py | 4 +- llama_stack/core/routing_tables/toolgroups.py | 32 +- llama_stack/core/server/server.py | 2 +- llama_stack/core/store/registry.py | 2 +- llama_stack/core/ui/page/playground/tools.py | 2 +- llama_stack/models/llama/datatypes.py | 21 +- .../models/llama/llama3/chat_format.py | 3 +- .../llama3/prompt_templates/system_prompts.py | 112 +- llama_stack/models/llama/llama3/tool_utils.py | 16 +- llama_stack/models/llama/llama3_1/prompts.py | 3 +- llama_stack/models/llama/llama3_3/prompts.py | 3 +- .../models/llama/llama4/chat_format.py | 3 +- .../llama4/prompt_templates/system_prompts.py | 43 +- .../agents/meta_reference/agent_instance.py | 49 +- .../meta_reference/responses/streaming.py | 42 +- .../LocalInferenceImpl/SystemPrompts.swift | 4 +- .../inline/tool_runtime/rag/memory.py | 18 +- .../providers/remote/inference/vllm/vllm.py | 26 +- .../tool_runtime/bing_search/bing_search.py | 18 +- .../tool_runtime/brave_search/brave_search.py | 18 +- .../tavily_search/tavily_search.py | 18 +- .../wolfram_alpha/wolfram_alpha.py | 18 +- .../utils/inference/openai_compat.py | 93 +- .../providers/utils/inference/openai_mixin.py | 52 +- llama_stack/providers/utils/tools/mcp.py | 17 +- tests/common/mcp.py | 14 +- .../inference/test_tools_with_schemas.py | 369 +++ .../recordings/responses/00f70ca112de.json | 4 +- .../recordings/responses/0396786db779.json | 366 +++ .../recordings/responses/044dcd8fdeb1.json | 84 +- .../recordings/responses/04cb9de29e06.json | 366 +++ .../recordings/responses/05e3ebc68306.json | 4 +- .../recordings/responses/08a21ab74e0a.json | 542 ++++ .../recordings/responses/0989d0d62a86.json | 138 + .../recordings/responses/0a29c4085705.json | 124 + .../recordings/responses/0e8f2b001dd9.json | 10 +- .../recordings/responses/0fad19b9d308.json | 93 + .../recordings/responses/178538be60e2.json | 4 +- .../recordings/responses/1a4da7c94fde.json | 4 +- .../recordings/responses/1acd433c05d4.json | 1787 +++++++++++++ .../recordings/responses/1b939935d483.json | 258 ++ .../recordings/responses/21cf30c6181e.json | 119 + .../recordings/responses/239f4768f5aa.json | 10 +- .../recordings/responses/23ad3b9e003e.json | 57 + .../recordings/responses/2717f0003e0a.json | 4 +- .../recordings/responses/278d5568fa92.json | 388 +++ .../recordings/responses/2d187a11704c.json | 208 +- .../recordings/responses/325a72db5755.json | 756 +----- .../recordings/responses/3387f56ccac9.json | 4 +- .../recordings/responses/35a5f1de4bd7.json | 809 ++++++ .../recordings/responses/36badd90238f.json | 366 +++ .../recordings/responses/37706c1729ba.json | 4 +- .../recordings/responses/378412143edb.json | 419 +++ .../recordings/responses/38ea441b5f83.json | 10 +- .../recordings/responses/3a4fb206e68a.json | 986 +++++++ .../recordings/responses/3a81146f2afa.json | 1372 +++++----- .../recordings/responses/3bd4bb58d78a.json | 119 + .../recordings/responses/3ca695048bee.json | 24 +- .../recordings/responses/3f5871e0805d.json | 85 + .../recordings/responses/3fc7de7e822b.json | 119 + .../recordings/responses/41ac2702de6c.json | 4 +- .../recordings/responses/4283d7199d9b.json | 366 +++ .../recordings/responses/4a32ce3da3ce.json | 414 +++ .../recordings/responses/4c651211b0e0.json | 4 +- .../recordings/responses/4ebcaf6c2aee.json | 400 ++- .../recordings/responses/4f00cf740aba.json | 768 +++--- .../recordings/responses/517505777888.json | 768 +++--- .../recordings/responses/559296e84820.json | 4 +- .../recordings/responses/55ae40168378.json | 366 +++ .../recordings/responses/590d43ed64b8.json | 768 +++--- .../recordings/responses/5e8bf88b3c20.json | 804 ++++++ .../recordings/responses/63aa4590a38a.json | 768 +++--- .../recordings/responses/6412295819a1.json | 10 +- .../recordings/responses/6540a315ea8e.json | 119 + .../recordings/responses/65c12de0a1db.json | 10 +- .../recordings/responses/67f94c4f8ba0.json | 228 +- .../recordings/responses/6b3e593ad9b8.json | 4 +- .../recordings/responses/6f90277933e2.json | 419 +++ .../recordings/responses/6f96090aa955.json | 484 +--- .../recordings/responses/71c9c6746a31.json | 809 ++++++ .../recordings/responses/771131fb4c46.json | 4 +- .../recordings/responses/7a047bcf8b19.json | 4 +- .../recordings/responses/7c57049fc13f.json | 57 + .../recordings/responses/7d089a973e08.json | 804 ++++++ .../recordings/responses/7e4bdf20925c.json | 124 + .../recordings/responses/7fc8b6ca483d.json | 57 + .../recordings/responses/80311f244b55.json | 2304 ++++++++--------- .../recordings/responses/80e4404d8987.json | 28 +- .../recordings/responses/84432044194a.json | 414 +++ .../recordings/responses/8486e5b1c6db.json | 276 ++ .../recordings/responses/84fc473e7b29.json | 4 +- .../recordings/responses/87577729d812.json | 4 +- .../recordings/responses/8965c0df9071.json | 119 + .../recordings/responses/8baad1435f9c.json | 4 +- .../recordings/responses/8ce928ad0b85.json | 768 +++--- .../recordings/responses/8d035e153b6f.json | 4 +- .../recordings/responses/8deded211f21.json | 743 ++++++ .../recordings/responses/8f000a878ccd.json | 4 +- .../recordings/responses/920c0495cde6.json | 4 +- .../recordings/responses/92a9a916ef02.json | 10 +- .../recordings/responses/930cf0cec376.json | 1584 ++++++++++++ .../recordings/responses/931ac7158789.json | 86 + .../recordings/responses/9db34836a1a7.json | 119 + .../recordings/responses/9e0b1ac678f6.json | 4 +- .../recordings/responses/9ffc75524647.json | 119 + .../recordings/responses/a0c4df33879f.json | 1636 +----------- .../recordings/responses/a11b11923cc8.json | 119 + .../recordings/responses/a46b77ffd494.json | 4 +- .../recordings/responses/a4c8d19bb1eb.json | 4 +- .../recordings/responses/a689181d64d3.json | 86 + .../recordings/responses/a92b8fc775d5.json | 4 +- .../recordings/responses/adf150be9638.json | 419 +++ .../recordings/responses/b050e5a7e4a3.json | 4 +- .../recordings/responses/b178d000a14a.json | 57 + .../recordings/responses/b28f75bd87dc.json | 4 +- .../recordings/responses/b374fc18c641.json | 258 ++ .../recordings/responses/b57525af4982.json | 119 + .../recordings/responses/b58e35a624b0.json | 4 +- .../recordings/responses/c13d7510774c.json | 768 +++--- .../recordings/responses/c1f63bb6469c.json | 119 + .../recordings/responses/c2ac76cbf66d.json | 4 +- .../recordings/responses/c3dbccc5de74.json | 10 +- .../recordings/responses/c4991de37dfb.json | 78 + .../recordings/responses/c62eb5d7115e.json | 10 +- .../recordings/responses/c6fc83f0a1d5.json | 1922 ++++++++++++++ .../recordings/responses/c7fc52830c4c.json | 119 + .../recordings/responses/c8234a1171f3.json | 4 +- .../recordings/responses/c8e196049fe4.json | 4 +- .../recordings/responses/ca5e40a262f5.json | 4 +- .../recordings/responses/ca92e698d8cd.json | 119 + .../recordings/responses/cb0e0321c53c.json | 414 +++ .../recordings/responses/cca0267555a6.json | 97 + .../recordings/responses/cd0ece88d392.json | 258 ++ .../recordings/responses/cd294c2e0038.json | 4 +- .../recordings/responses/ce21235ebde2.json | 124 + .../recordings/responses/cf776b1aa432.json | 32 +- .../recordings/responses/d7caf68e394e.json | 4 +- .../recordings/responses/d9e8f66e1d85.json | 117 + .../recordings/responses/df20f4b62da7.json | 258 ++ .../recordings/responses/e0c71820f395.json | 122 + .../recordings/responses/e1ccaa261725.json | 414 +++ .../recordings/responses/e25ab43491af.json | 4 +- .../recordings/responses/e3b94833d349.json | 388 +++ .../recordings/responses/e59abd091d90.json | 804 ++++++ .../recordings/responses/e9c8a0e4f0e0.json | 10 +- .../recordings/responses/eeb26200786f.json | 1355 ++++++++++ .../recordings/responses/f22b7da7ad75.json | 1204 +++++++++ .../recordings/responses/f23defea82ec.json | 400 ++- .../recordings/responses/f28a44c97ea7.json | 10 +- .../recordings/responses/f340a394f6e0.json | 4 +- .../recordings/responses/f6a1cb47dfe8.json | 170 ++ .../recordings/responses/f70f30f54211.json | 24 +- .../recordings/responses/f8ba05a5ce61.json | 402 +++ .../recordings/responses/fced8b60ae5f.json | 986 +++++++ .../recordings/responses/feae037e2abd.json | 258 ++ .../models-bd032f995f2a-16718308.json | 843 ++++++ .../tool_runtime/test_builtin_tools.py | 4 +- tests/integration/tool_runtime/test_mcp.py | 4 +- .../tool_runtime/test_mcp_json_schema.py | 404 +++ .../routers/test_routing_tables.py | 7 +- tests/unit/models/test_prompt_adapter.py | 78 +- .../agent/test_meta_reference_agent.py | 83 +- .../meta_reference/test_openai_responses.py | 15 +- .../providers/inference/test_remote_vllm.py | 17 +- .../responses/test_streaming.py | 16 +- .../utils/inference/test_openai_compat.py | 10 +- .../utils/test_openai_compat_conversion.py | 381 +++ tests/unit/tools/test_tools_json_schema.py | 297 +++ 179 files changed, 34186 insertions(+), 9171 deletions(-) create mode 100644 tests/integration/inference/test_tools_with_schemas.py create mode 100644 tests/integration/recordings/responses/0396786db779.json create mode 100644 tests/integration/recordings/responses/04cb9de29e06.json create mode 100644 tests/integration/recordings/responses/08a21ab74e0a.json create mode 100644 tests/integration/recordings/responses/0989d0d62a86.json create mode 100644 tests/integration/recordings/responses/0a29c4085705.json create mode 100644 tests/integration/recordings/responses/0fad19b9d308.json create mode 100644 tests/integration/recordings/responses/1acd433c05d4.json create mode 100644 tests/integration/recordings/responses/1b939935d483.json create mode 100644 tests/integration/recordings/responses/21cf30c6181e.json create mode 100644 tests/integration/recordings/responses/23ad3b9e003e.json create mode 100644 tests/integration/recordings/responses/278d5568fa92.json create mode 100644 tests/integration/recordings/responses/35a5f1de4bd7.json create mode 100644 tests/integration/recordings/responses/36badd90238f.json create mode 100644 tests/integration/recordings/responses/378412143edb.json create mode 100644 tests/integration/recordings/responses/3a4fb206e68a.json create mode 100644 tests/integration/recordings/responses/3bd4bb58d78a.json create mode 100644 tests/integration/recordings/responses/3f5871e0805d.json create mode 100644 tests/integration/recordings/responses/3fc7de7e822b.json create mode 100644 tests/integration/recordings/responses/4283d7199d9b.json create mode 100644 tests/integration/recordings/responses/4a32ce3da3ce.json create mode 100644 tests/integration/recordings/responses/55ae40168378.json create mode 100644 tests/integration/recordings/responses/5e8bf88b3c20.json create mode 100644 tests/integration/recordings/responses/6540a315ea8e.json create mode 100644 tests/integration/recordings/responses/6f90277933e2.json create mode 100644 tests/integration/recordings/responses/71c9c6746a31.json create mode 100644 tests/integration/recordings/responses/7c57049fc13f.json create mode 100644 tests/integration/recordings/responses/7d089a973e08.json create mode 100644 tests/integration/recordings/responses/7e4bdf20925c.json create mode 100644 tests/integration/recordings/responses/7fc8b6ca483d.json create mode 100644 tests/integration/recordings/responses/84432044194a.json create mode 100644 tests/integration/recordings/responses/8486e5b1c6db.json create mode 100644 tests/integration/recordings/responses/8965c0df9071.json create mode 100644 tests/integration/recordings/responses/8deded211f21.json create mode 100644 tests/integration/recordings/responses/930cf0cec376.json create mode 100644 tests/integration/recordings/responses/931ac7158789.json create mode 100644 tests/integration/recordings/responses/9db34836a1a7.json create mode 100644 tests/integration/recordings/responses/9ffc75524647.json create mode 100644 tests/integration/recordings/responses/a11b11923cc8.json create mode 100644 tests/integration/recordings/responses/a689181d64d3.json create mode 100644 tests/integration/recordings/responses/adf150be9638.json create mode 100644 tests/integration/recordings/responses/b178d000a14a.json create mode 100644 tests/integration/recordings/responses/b374fc18c641.json create mode 100644 tests/integration/recordings/responses/b57525af4982.json create mode 100644 tests/integration/recordings/responses/c1f63bb6469c.json create mode 100644 tests/integration/recordings/responses/c4991de37dfb.json create mode 100644 tests/integration/recordings/responses/c6fc83f0a1d5.json create mode 100644 tests/integration/recordings/responses/c7fc52830c4c.json create mode 100644 tests/integration/recordings/responses/ca92e698d8cd.json create mode 100644 tests/integration/recordings/responses/cb0e0321c53c.json create mode 100644 tests/integration/recordings/responses/cca0267555a6.json create mode 100644 tests/integration/recordings/responses/cd0ece88d392.json create mode 100644 tests/integration/recordings/responses/ce21235ebde2.json create mode 100644 tests/integration/recordings/responses/d9e8f66e1d85.json create mode 100644 tests/integration/recordings/responses/df20f4b62da7.json create mode 100644 tests/integration/recordings/responses/e0c71820f395.json create mode 100644 tests/integration/recordings/responses/e1ccaa261725.json create mode 100644 tests/integration/recordings/responses/e3b94833d349.json create mode 100644 tests/integration/recordings/responses/e59abd091d90.json create mode 100644 tests/integration/recordings/responses/eeb26200786f.json create mode 100644 tests/integration/recordings/responses/f22b7da7ad75.json create mode 100644 tests/integration/recordings/responses/f6a1cb47dfe8.json create mode 100644 tests/integration/recordings/responses/f8ba05a5ce61.json create mode 100644 tests/integration/recordings/responses/fced8b60ae5f.json create mode 100644 tests/integration/recordings/responses/feae037e2abd.json create mode 100644 tests/integration/recordings/responses/models-bd032f995f2a-16718308.json create mode 100644 tests/integration/tool_runtime/test_mcp_json_schema.py create mode 100644 tests/unit/providers/utils/test_openai_compat_conversion.py create mode 100644 tests/unit/tools/test_tools_json_schema.py diff --git a/docs/static/deprecated-llama-stack-spec.html b/docs/static/deprecated-llama-stack-spec.html index fe63f78bc..7edfe3f5d 100644 --- a/docs/static/deprecated-llama-stack-spec.html +++ b/docs/static/deprecated-llama-stack-spec.html @@ -4289,6 +4289,10 @@ "ToolDef": { "type": "object", "properties": { + "toolgroup_id": { + "type": "string", + "description": "(Optional) ID of the tool group this tool belongs to" + }, "name": { "type": "string", "description": "Name of the tool" @@ -4297,12 +4301,57 @@ "type": "string", "description": "(Optional) Human-readable description of what the tool does" }, - "parameters": { - "type": "array", - "items": { - "$ref": "#/components/schemas/ToolParameter" + "input_schema": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] }, - "description": "(Optional) List of parameters this tool accepts" + "description": "(Optional) JSON Schema for tool inputs (MCP inputSchema)" + }, + "output_schema": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "(Optional) JSON Schema for tool outputs (MCP outputSchema)" }, "metadata": { "type": "object", @@ -4338,68 +4387,6 @@ "title": "ToolDef", "description": "Tool definition used in runtime contexts." }, - "ToolParameter": { - "type": "object", - "properties": { - "name": { - "type": "string", - "description": "Name of the parameter" - }, - "parameter_type": { - "type": "string", - "description": "Type of the parameter (e.g., string, integer)" - }, - "description": { - "type": "string", - "description": "Human-readable description of what the parameter does" - }, - "required": { - "type": "boolean", - "default": true, - "description": "Whether this parameter is required for tool invocation" - }, - "items": { - "type": "object", - "description": "Type of the elements when parameter_type is array" - }, - "title": { - "type": "string", - "description": "(Optional) Title of the parameter" - }, - "default": { - "oneOf": [ - { - "type": "null" - }, - { - "type": "boolean" - }, - { - "type": "number" - }, - { - "type": "string" - }, - { - "type": "array" - }, - { - "type": "object" - } - ], - "description": "(Optional) Default value for the parameter if not provided" - } - }, - "additionalProperties": false, - "required": [ - "name", - "parameter_type", - "description", - "required" - ], - "title": "ToolParameter", - "description": "Parameter definition for a tool." - }, "TopKSamplingStrategy": { "type": "object", "properties": { @@ -4915,79 +4902,6 @@ ] }, "arguments": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "object", - "additionalProperties": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "integer" - }, - { - "type": "number" - }, - { - "type": "boolean" - }, - { - "type": "null" - }, - { - "type": "array", - "items": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "integer" - }, - { - "type": "number" - }, - { - "type": "boolean" - }, - { - "type": "null" - } - ] - } - }, - { - "type": "object", - "additionalProperties": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "integer" - }, - { - "type": "number" - }, - { - "type": "boolean" - }, - { - "type": "null" - } - ] - } - } - ] - } - } - ] - }, - "arguments_json": { "type": "string" } }, diff --git a/docs/static/deprecated-llama-stack-spec.yaml b/docs/static/deprecated-llama-stack-spec.yaml index 9b1d3eff6..ca832d46b 100644 --- a/docs/static/deprecated-llama-stack-spec.yaml +++ b/docs/static/deprecated-llama-stack-spec.yaml @@ -3143,6 +3143,10 @@ components: ToolDef: type: object properties: + toolgroup_id: + type: string + description: >- + (Optional) ID of the tool group this tool belongs to name: type: string description: Name of the tool @@ -3150,12 +3154,30 @@ components: type: string description: >- (Optional) Human-readable description of what the tool does - parameters: - type: array - items: - $ref: '#/components/schemas/ToolParameter' + input_schema: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object description: >- - (Optional) List of parameters this tool accepts + (Optional) JSON Schema for tool inputs (MCP inputSchema) + output_schema: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) JSON Schema for tool outputs (MCP outputSchema) metadata: type: object additionalProperties: @@ -3174,50 +3196,6 @@ components: title: ToolDef description: >- Tool definition used in runtime contexts. - ToolParameter: - type: object - properties: - name: - type: string - description: Name of the parameter - parameter_type: - type: string - description: >- - Type of the parameter (e.g., string, integer) - description: - type: string - description: >- - Human-readable description of what the parameter does - required: - type: boolean - default: true - description: >- - Whether this parameter is required for tool invocation - items: - type: object - description: >- - Type of the elements when parameter_type is array - title: - type: string - description: (Optional) Title of the parameter - default: - oneOf: - - type: 'null' - - type: boolean - - type: number - - type: string - - type: array - - type: object - description: >- - (Optional) Default value for the parameter if not provided - additionalProperties: false - required: - - name - - parameter_type - - description - - required - title: ToolParameter - description: Parameter definition for a tool. TopKSamplingStrategy: type: object properties: @@ -3630,33 +3608,6 @@ components: title: BuiltinTool - type: string arguments: - oneOf: - - type: string - - type: object - additionalProperties: - oneOf: - - type: string - - type: integer - - type: number - - type: boolean - - type: 'null' - - type: array - items: - oneOf: - - type: string - - type: integer - - type: number - - type: boolean - - type: 'null' - - type: object - additionalProperties: - oneOf: - - type: string - - type: integer - - type: number - - type: boolean - - type: 'null' - arguments_json: type: string additionalProperties: false required: diff --git a/docs/static/experimental-llama-stack-spec.html b/docs/static/experimental-llama-stack-spec.html index fe57f9132..a84226c05 100644 --- a/docs/static/experimental-llama-stack-spec.html +++ b/docs/static/experimental-llama-stack-spec.html @@ -2784,6 +2784,10 @@ "ToolDef": { "type": "object", "properties": { + "toolgroup_id": { + "type": "string", + "description": "(Optional) ID of the tool group this tool belongs to" + }, "name": { "type": "string", "description": "Name of the tool" @@ -2792,12 +2796,57 @@ "type": "string", "description": "(Optional) Human-readable description of what the tool does" }, - "parameters": { - "type": "array", - "items": { - "$ref": "#/components/schemas/ToolParameter" + "input_schema": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] }, - "description": "(Optional) List of parameters this tool accepts" + "description": "(Optional) JSON Schema for tool inputs (MCP inputSchema)" + }, + "output_schema": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "(Optional) JSON Schema for tool outputs (MCP outputSchema)" }, "metadata": { "type": "object", @@ -2833,68 +2882,6 @@ "title": "ToolDef", "description": "Tool definition used in runtime contexts." }, - "ToolParameter": { - "type": "object", - "properties": { - "name": { - "type": "string", - "description": "Name of the parameter" - }, - "parameter_type": { - "type": "string", - "description": "Type of the parameter (e.g., string, integer)" - }, - "description": { - "type": "string", - "description": "Human-readable description of what the parameter does" - }, - "required": { - "type": "boolean", - "default": true, - "description": "Whether this parameter is required for tool invocation" - }, - "items": { - "type": "object", - "description": "Type of the elements when parameter_type is array" - }, - "title": { - "type": "string", - "description": "(Optional) Title of the parameter" - }, - "default": { - "oneOf": [ - { - "type": "null" - }, - { - "type": "boolean" - }, - { - "type": "number" - }, - { - "type": "string" - }, - { - "type": "array" - }, - { - "type": "object" - } - ], - "description": "(Optional) Default value for the parameter if not provided" - } - }, - "additionalProperties": false, - "required": [ - "name", - "parameter_type", - "description", - "required" - ], - "title": "ToolParameter", - "description": "Parameter definition for a tool." - }, "TopKSamplingStrategy": { "type": "object", "properties": { @@ -3410,79 +3397,6 @@ ] }, "arguments": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "object", - "additionalProperties": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "integer" - }, - { - "type": "number" - }, - { - "type": "boolean" - }, - { - "type": "null" - }, - { - "type": "array", - "items": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "integer" - }, - { - "type": "number" - }, - { - "type": "boolean" - }, - { - "type": "null" - } - ] - } - }, - { - "type": "object", - "additionalProperties": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "integer" - }, - { - "type": "number" - }, - { - "type": "boolean" - }, - { - "type": "null" - } - ] - } - } - ] - } - } - ] - }, - "arguments_json": { "type": "string" } }, diff --git a/docs/static/experimental-llama-stack-spec.yaml b/docs/static/experimental-llama-stack-spec.yaml index 85129336f..a08c0cc87 100644 --- a/docs/static/experimental-llama-stack-spec.yaml +++ b/docs/static/experimental-llama-stack-spec.yaml @@ -2002,6 +2002,10 @@ components: ToolDef: type: object properties: + toolgroup_id: + type: string + description: >- + (Optional) ID of the tool group this tool belongs to name: type: string description: Name of the tool @@ -2009,12 +2013,30 @@ components: type: string description: >- (Optional) Human-readable description of what the tool does - parameters: - type: array - items: - $ref: '#/components/schemas/ToolParameter' + input_schema: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object description: >- - (Optional) List of parameters this tool accepts + (Optional) JSON Schema for tool inputs (MCP inputSchema) + output_schema: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) JSON Schema for tool outputs (MCP outputSchema) metadata: type: object additionalProperties: @@ -2033,50 +2055,6 @@ components: title: ToolDef description: >- Tool definition used in runtime contexts. - ToolParameter: - type: object - properties: - name: - type: string - description: Name of the parameter - parameter_type: - type: string - description: >- - Type of the parameter (e.g., string, integer) - description: - type: string - description: >- - Human-readable description of what the parameter does - required: - type: boolean - default: true - description: >- - Whether this parameter is required for tool invocation - items: - type: object - description: >- - Type of the elements when parameter_type is array - title: - type: string - description: (Optional) Title of the parameter - default: - oneOf: - - type: 'null' - - type: boolean - - type: number - - type: string - - type: array - - type: object - description: >- - (Optional) Default value for the parameter if not provided - additionalProperties: false - required: - - name - - parameter_type - - description - - required - title: ToolParameter - description: Parameter definition for a tool. TopKSamplingStrategy: type: object properties: @@ -2489,33 +2467,6 @@ components: title: BuiltinTool - type: string arguments: - oneOf: - - type: string - - type: object - additionalProperties: - oneOf: - - type: string - - type: integer - - type: number - - type: boolean - - type: 'null' - - type: array - items: - oneOf: - - type: string - - type: integer - - type: number - - type: boolean - - type: 'null' - - type: object - additionalProperties: - oneOf: - - type: string - - type: integer - - type: number - - type: boolean - - type: 'null' - arguments_json: type: string additionalProperties: false required: diff --git a/docs/static/llama-stack-spec.html b/docs/static/llama-stack-spec.html index fa16e62ee..4693d39e0 100644 --- a/docs/static/llama-stack-spec.html +++ b/docs/static/llama-stack-spec.html @@ -2404,11 +2404,11 @@ "get": { "responses": { "200": { - "description": "A ListToolsResponse.", + "description": "A ListToolDefsResponse.", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ListToolsResponse" + "$ref": "#/components/schemas/ListToolDefsResponse" } } } @@ -2449,11 +2449,11 @@ "get": { "responses": { "200": { - "description": "A Tool.", + "description": "A ToolDef.", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Tool" + "$ref": "#/components/schemas/ToolDef" } } } @@ -8490,79 +8490,6 @@ ] }, "arguments": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "object", - "additionalProperties": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "integer" - }, - { - "type": "number" - }, - { - "type": "boolean" - }, - { - "type": "null" - }, - { - "type": "array", - "items": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "integer" - }, - { - "type": "number" - }, - { - "type": "boolean" - }, - { - "type": "null" - } - ] - } - }, - { - "type": "object", - "additionalProperties": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "integer" - }, - { - "type": "number" - }, - { - "type": "boolean" - }, - { - "type": "null" - } - ] - } - } - ] - } - } - ] - }, - "arguments_json": { "type": "string" } }, @@ -10156,6 +10083,10 @@ "ToolDef": { "type": "object", "properties": { + "toolgroup_id": { + "type": "string", + "description": "(Optional) ID of the tool group this tool belongs to" + }, "name": { "type": "string", "description": "Name of the tool" @@ -10164,12 +10095,57 @@ "type": "string", "description": "(Optional) Human-readable description of what the tool does" }, - "parameters": { - "type": "array", - "items": { - "$ref": "#/components/schemas/ToolParameter" + "input_schema": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] }, - "description": "(Optional) List of parameters this tool accepts" + "description": "(Optional) JSON Schema for tool inputs (MCP inputSchema)" + }, + "output_schema": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "(Optional) JSON Schema for tool outputs (MCP outputSchema)" }, "metadata": { "type": "object", @@ -10205,68 +10181,6 @@ "title": "ToolDef", "description": "Tool definition used in runtime contexts." }, - "ToolParameter": { - "type": "object", - "properties": { - "name": { - "type": "string", - "description": "Name of the parameter" - }, - "parameter_type": { - "type": "string", - "description": "Type of the parameter (e.g., string, integer)" - }, - "description": { - "type": "string", - "description": "Human-readable description of what the parameter does" - }, - "required": { - "type": "boolean", - "default": true, - "description": "Whether this parameter is required for tool invocation" - }, - "items": { - "type": "object", - "description": "Type of the elements when parameter_type is array" - }, - "title": { - "type": "string", - "description": "(Optional) Title of the parameter" - }, - "default": { - "oneOf": [ - { - "type": "null" - }, - { - "type": "boolean" - }, - { - "type": "number" - }, - { - "type": "string" - }, - { - "type": "array" - }, - { - "type": "object" - } - ], - "description": "(Optional) Default value for the parameter if not provided" - } - }, - "additionalProperties": false, - "required": [ - "name", - "parameter_type", - "description", - "required" - ], - "title": "ToolParameter", - "description": "Parameter definition for a tool." - }, "ListToolDefsResponse": { "type": "object", "properties": { @@ -10761,107 +10675,6 @@ ], "title": "RegisterToolGroupRequest" }, - "Tool": { - "type": "object", - "properties": { - "identifier": { - "type": "string" - }, - "provider_resource_id": { - "type": "string" - }, - "provider_id": { - "type": "string" - }, - "type": { - "type": "string", - "enum": [ - "model", - "shield", - "vector_db", - "dataset", - "scoring_function", - "benchmark", - "tool", - "tool_group", - "prompt" - ], - "const": "tool", - "default": "tool", - "description": "Type of resource, always 'tool'" - }, - "toolgroup_id": { - "type": "string", - "description": "ID of the tool group this tool belongs to" - }, - "description": { - "type": "string", - "description": "Human-readable description of what the tool does" - }, - "parameters": { - "type": "array", - "items": { - "$ref": "#/components/schemas/ToolParameter" - }, - "description": "List of parameters this tool accepts" - }, - "metadata": { - "type": "object", - "additionalProperties": { - "oneOf": [ - { - "type": "null" - }, - { - "type": "boolean" - }, - { - "type": "number" - }, - { - "type": "string" - }, - { - "type": "array" - }, - { - "type": "object" - } - ] - }, - "description": "(Optional) Additional metadata about the tool" - } - }, - "additionalProperties": false, - "required": [ - "identifier", - "provider_id", - "type", - "toolgroup_id", - "description", - "parameters" - ], - "title": "Tool", - "description": "A tool that can be invoked by agents." - }, - "ListToolsResponse": { - "type": "object", - "properties": { - "data": { - "type": "array", - "items": { - "$ref": "#/components/schemas/Tool" - }, - "description": "List of tools" - } - }, - "additionalProperties": false, - "required": [ - "data" - ], - "title": "ListToolsResponse", - "description": "Response containing a list of tools." - }, "VectorDB": { "type": "object", "properties": { diff --git a/docs/static/llama-stack-spec.yaml b/docs/static/llama-stack-spec.yaml index 733e2cd21..7d275a221 100644 --- a/docs/static/llama-stack-spec.yaml +++ b/docs/static/llama-stack-spec.yaml @@ -1753,11 +1753,11 @@ paths: get: responses: '200': - description: A ListToolsResponse. + description: A ListToolDefsResponse. content: application/json: schema: - $ref: '#/components/schemas/ListToolsResponse' + $ref: '#/components/schemas/ListToolDefsResponse' '400': $ref: '#/components/responses/BadRequest400' '429': @@ -1785,11 +1785,11 @@ paths: get: responses: '200': - description: A Tool. + description: A ToolDef. content: application/json: schema: - $ref: '#/components/schemas/Tool' + $ref: '#/components/schemas/ToolDef' '400': $ref: '#/components/responses/BadRequest400' '429': @@ -6398,33 +6398,6 @@ components: title: BuiltinTool - type: string arguments: - oneOf: - - type: string - - type: object - additionalProperties: - oneOf: - - type: string - - type: integer - - type: number - - type: boolean - - type: 'null' - - type: array - items: - oneOf: - - type: string - - type: integer - - type: number - - type: boolean - - type: 'null' - - type: object - additionalProperties: - oneOf: - - type: string - - type: integer - - type: number - - type: boolean - - type: 'null' - arguments_json: type: string additionalProperties: false required: @@ -7552,6 +7525,10 @@ components: ToolDef: type: object properties: + toolgroup_id: + type: string + description: >- + (Optional) ID of the tool group this tool belongs to name: type: string description: Name of the tool @@ -7559,12 +7536,30 @@ components: type: string description: >- (Optional) Human-readable description of what the tool does - parameters: - type: array - items: - $ref: '#/components/schemas/ToolParameter' + input_schema: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object description: >- - (Optional) List of parameters this tool accepts + (Optional) JSON Schema for tool inputs (MCP inputSchema) + output_schema: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) JSON Schema for tool outputs (MCP outputSchema) metadata: type: object additionalProperties: @@ -7583,50 +7578,6 @@ components: title: ToolDef description: >- Tool definition used in runtime contexts. - ToolParameter: - type: object - properties: - name: - type: string - description: Name of the parameter - parameter_type: - type: string - description: >- - Type of the parameter (e.g., string, integer) - description: - type: string - description: >- - Human-readable description of what the parameter does - required: - type: boolean - default: true - description: >- - Whether this parameter is required for tool invocation - items: - type: object - description: >- - Type of the elements when parameter_type is array - title: - type: string - description: (Optional) Title of the parameter - default: - oneOf: - - type: 'null' - - type: boolean - - type: number - - type: string - - type: array - - type: object - description: >- - (Optional) Default value for the parameter if not provided - additionalProperties: false - required: - - name - - parameter_type - - description - - required - title: ToolParameter - description: Parameter definition for a tool. ListToolDefsResponse: type: object properties: @@ -8002,78 +7953,6 @@ components: - toolgroup_id - provider_id title: RegisterToolGroupRequest - Tool: - type: object - properties: - identifier: - type: string - provider_resource_id: - type: string - provider_id: - type: string - type: - type: string - enum: - - model - - shield - - vector_db - - dataset - - scoring_function - - benchmark - - tool - - tool_group - - prompt - const: tool - default: tool - description: Type of resource, always 'tool' - toolgroup_id: - type: string - description: >- - ID of the tool group this tool belongs to - description: - type: string - description: >- - Human-readable description of what the tool does - parameters: - type: array - items: - $ref: '#/components/schemas/ToolParameter' - description: List of parameters this tool accepts - metadata: - type: object - additionalProperties: - oneOf: - - type: 'null' - - type: boolean - - type: number - - type: string - - type: array - - type: object - description: >- - (Optional) Additional metadata about the tool - additionalProperties: false - required: - - identifier - - provider_id - - type - - toolgroup_id - - description - - parameters - title: Tool - description: A tool that can be invoked by agents. - ListToolsResponse: - type: object - properties: - data: - type: array - items: - $ref: '#/components/schemas/Tool' - description: List of tools - additionalProperties: false - required: - - data - title: ListToolsResponse - description: Response containing a list of tools. VectorDB: type: object properties: diff --git a/docs/static/stainless-llama-stack-spec.html b/docs/static/stainless-llama-stack-spec.html index 72ecb5bb5..1ae477e7e 100644 --- a/docs/static/stainless-llama-stack-spec.html +++ b/docs/static/stainless-llama-stack-spec.html @@ -2404,11 +2404,11 @@ "get": { "responses": { "200": { - "description": "A ListToolsResponse.", + "description": "A ListToolDefsResponse.", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ListToolsResponse" + "$ref": "#/components/schemas/ListToolDefsResponse" } } } @@ -2449,11 +2449,11 @@ "get": { "responses": { "200": { - "description": "A Tool.", + "description": "A ToolDef.", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/Tool" + "$ref": "#/components/schemas/ToolDef" } } } @@ -10499,79 +10499,6 @@ ] }, "arguments": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "object", - "additionalProperties": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "integer" - }, - { - "type": "number" - }, - { - "type": "boolean" - }, - { - "type": "null" - }, - { - "type": "array", - "items": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "integer" - }, - { - "type": "number" - }, - { - "type": "boolean" - }, - { - "type": "null" - } - ] - } - }, - { - "type": "object", - "additionalProperties": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "integer" - }, - { - "type": "number" - }, - { - "type": "boolean" - }, - { - "type": "null" - } - ] - } - } - ] - } - } - ] - }, - "arguments_json": { "type": "string" } }, @@ -12165,6 +12092,10 @@ "ToolDef": { "type": "object", "properties": { + "toolgroup_id": { + "type": "string", + "description": "(Optional) ID of the tool group this tool belongs to" + }, "name": { "type": "string", "description": "Name of the tool" @@ -12173,12 +12104,57 @@ "type": "string", "description": "(Optional) Human-readable description of what the tool does" }, - "parameters": { - "type": "array", - "items": { - "$ref": "#/components/schemas/ToolParameter" + "input_schema": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] }, - "description": "(Optional) List of parameters this tool accepts" + "description": "(Optional) JSON Schema for tool inputs (MCP inputSchema)" + }, + "output_schema": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "type": "number" + }, + { + "type": "string" + }, + { + "type": "array" + }, + { + "type": "object" + } + ] + }, + "description": "(Optional) JSON Schema for tool outputs (MCP outputSchema)" }, "metadata": { "type": "object", @@ -12214,68 +12190,6 @@ "title": "ToolDef", "description": "Tool definition used in runtime contexts." }, - "ToolParameter": { - "type": "object", - "properties": { - "name": { - "type": "string", - "description": "Name of the parameter" - }, - "parameter_type": { - "type": "string", - "description": "Type of the parameter (e.g., string, integer)" - }, - "description": { - "type": "string", - "description": "Human-readable description of what the parameter does" - }, - "required": { - "type": "boolean", - "default": true, - "description": "Whether this parameter is required for tool invocation" - }, - "items": { - "type": "object", - "description": "Type of the elements when parameter_type is array" - }, - "title": { - "type": "string", - "description": "(Optional) Title of the parameter" - }, - "default": { - "oneOf": [ - { - "type": "null" - }, - { - "type": "boolean" - }, - { - "type": "number" - }, - { - "type": "string" - }, - { - "type": "array" - }, - { - "type": "object" - } - ], - "description": "(Optional) Default value for the parameter if not provided" - } - }, - "additionalProperties": false, - "required": [ - "name", - "parameter_type", - "description", - "required" - ], - "title": "ToolParameter", - "description": "Parameter definition for a tool." - }, "ListToolDefsResponse": { "type": "object", "properties": { @@ -12770,107 +12684,6 @@ ], "title": "RegisterToolGroupRequest" }, - "Tool": { - "type": "object", - "properties": { - "identifier": { - "type": "string" - }, - "provider_resource_id": { - "type": "string" - }, - "provider_id": { - "type": "string" - }, - "type": { - "type": "string", - "enum": [ - "model", - "shield", - "vector_db", - "dataset", - "scoring_function", - "benchmark", - "tool", - "tool_group", - "prompt" - ], - "const": "tool", - "default": "tool", - "description": "Type of resource, always 'tool'" - }, - "toolgroup_id": { - "type": "string", - "description": "ID of the tool group this tool belongs to" - }, - "description": { - "type": "string", - "description": "Human-readable description of what the tool does" - }, - "parameters": { - "type": "array", - "items": { - "$ref": "#/components/schemas/ToolParameter" - }, - "description": "List of parameters this tool accepts" - }, - "metadata": { - "type": "object", - "additionalProperties": { - "oneOf": [ - { - "type": "null" - }, - { - "type": "boolean" - }, - { - "type": "number" - }, - { - "type": "string" - }, - { - "type": "array" - }, - { - "type": "object" - } - ] - }, - "description": "(Optional) Additional metadata about the tool" - } - }, - "additionalProperties": false, - "required": [ - "identifier", - "provider_id", - "type", - "toolgroup_id", - "description", - "parameters" - ], - "title": "Tool", - "description": "A tool that can be invoked by agents." - }, - "ListToolsResponse": { - "type": "object", - "properties": { - "data": { - "type": "array", - "items": { - "$ref": "#/components/schemas/Tool" - }, - "description": "List of tools" - } - }, - "additionalProperties": false, - "required": [ - "data" - ], - "title": "ListToolsResponse", - "description": "Response containing a list of tools." - }, "VectorDB": { "type": "object", "properties": { diff --git a/docs/static/stainless-llama-stack-spec.yaml b/docs/static/stainless-llama-stack-spec.yaml index 151ea1029..cb2584d8a 100644 --- a/docs/static/stainless-llama-stack-spec.yaml +++ b/docs/static/stainless-llama-stack-spec.yaml @@ -1756,11 +1756,11 @@ paths: get: responses: '200': - description: A ListToolsResponse. + description: A ListToolDefsResponse. content: application/json: schema: - $ref: '#/components/schemas/ListToolsResponse' + $ref: '#/components/schemas/ListToolDefsResponse' '400': $ref: '#/components/responses/BadRequest400' '429': @@ -1788,11 +1788,11 @@ paths: get: responses: '200': - description: A Tool. + description: A ToolDef. content: application/json: schema: - $ref: '#/components/schemas/Tool' + $ref: '#/components/schemas/ToolDef' '400': $ref: '#/components/responses/BadRequest400' '429': @@ -7843,33 +7843,6 @@ components: title: BuiltinTool - type: string arguments: - oneOf: - - type: string - - type: object - additionalProperties: - oneOf: - - type: string - - type: integer - - type: number - - type: boolean - - type: 'null' - - type: array - items: - oneOf: - - type: string - - type: integer - - type: number - - type: boolean - - type: 'null' - - type: object - additionalProperties: - oneOf: - - type: string - - type: integer - - type: number - - type: boolean - - type: 'null' - arguments_json: type: string additionalProperties: false required: @@ -8997,6 +8970,10 @@ components: ToolDef: type: object properties: + toolgroup_id: + type: string + description: >- + (Optional) ID of the tool group this tool belongs to name: type: string description: Name of the tool @@ -9004,12 +8981,30 @@ components: type: string description: >- (Optional) Human-readable description of what the tool does - parameters: - type: array - items: - $ref: '#/components/schemas/ToolParameter' + input_schema: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object description: >- - (Optional) List of parameters this tool accepts + (Optional) JSON Schema for tool inputs (MCP inputSchema) + output_schema: + type: object + additionalProperties: + oneOf: + - type: 'null' + - type: boolean + - type: number + - type: string + - type: array + - type: object + description: >- + (Optional) JSON Schema for tool outputs (MCP outputSchema) metadata: type: object additionalProperties: @@ -9028,50 +9023,6 @@ components: title: ToolDef description: >- Tool definition used in runtime contexts. - ToolParameter: - type: object - properties: - name: - type: string - description: Name of the parameter - parameter_type: - type: string - description: >- - Type of the parameter (e.g., string, integer) - description: - type: string - description: >- - Human-readable description of what the parameter does - required: - type: boolean - default: true - description: >- - Whether this parameter is required for tool invocation - items: - type: object - description: >- - Type of the elements when parameter_type is array - title: - type: string - description: (Optional) Title of the parameter - default: - oneOf: - - type: 'null' - - type: boolean - - type: number - - type: string - - type: array - - type: object - description: >- - (Optional) Default value for the parameter if not provided - additionalProperties: false - required: - - name - - parameter_type - - description - - required - title: ToolParameter - description: Parameter definition for a tool. ListToolDefsResponse: type: object properties: @@ -9447,78 +9398,6 @@ components: - toolgroup_id - provider_id title: RegisterToolGroupRequest - Tool: - type: object - properties: - identifier: - type: string - provider_resource_id: - type: string - provider_id: - type: string - type: - type: string - enum: - - model - - shield - - vector_db - - dataset - - scoring_function - - benchmark - - tool - - tool_group - - prompt - const: tool - default: tool - description: Type of resource, always 'tool' - toolgroup_id: - type: string - description: >- - ID of the tool group this tool belongs to - description: - type: string - description: >- - Human-readable description of what the tool does - parameters: - type: array - items: - $ref: '#/components/schemas/ToolParameter' - description: List of parameters this tool accepts - metadata: - type: object - additionalProperties: - oneOf: - - type: 'null' - - type: boolean - - type: number - - type: string - - type: array - - type: object - description: >- - (Optional) Additional metadata about the tool - additionalProperties: false - required: - - identifier - - provider_id - - type - - toolgroup_id - - description - - parameters - title: Tool - description: A tool that can be invoked by agents. - ListToolsResponse: - type: object - properties: - data: - type: array - items: - $ref: '#/components/schemas/Tool' - description: List of tools - additionalProperties: false - required: - - data - title: ListToolsResponse - description: Response containing a list of tools. VectorDB: type: object properties: diff --git a/llama_stack/apis/inference/inference.py b/llama_stack/apis/inference/inference.py index d71aea38e..829a94a6a 100644 --- a/llama_stack/apis/inference/inference.py +++ b/llama_stack/apis/inference/inference.py @@ -27,14 +27,12 @@ from llama_stack.models.llama.datatypes import ( StopReason, ToolCall, ToolDefinition, - ToolParamDefinition, ToolPromptFormat, ) from llama_stack.providers.utils.telemetry.trace_protocol import trace_protocol from llama_stack.schema_utils import json_schema_type, register_schema, webmethod register_schema(ToolCall) -register_schema(ToolParamDefinition) register_schema(ToolDefinition) from enum import StrEnum diff --git a/llama_stack/apis/tools/tools.py b/llama_stack/apis/tools/tools.py index 0ebbe8c50..b6a1a2543 100644 --- a/llama_stack/apis/tools/tools.py +++ b/llama_stack/apis/tools/tools.py @@ -7,7 +7,7 @@ from enum import Enum from typing import Any, Literal, Protocol -from pydantic import BaseModel, Field +from pydantic import BaseModel from typing_extensions import runtime_checkable from llama_stack.apis.common.content_types import URL, InterleavedContent @@ -19,59 +19,23 @@ from llama_stack.schema_utils import json_schema_type, webmethod from .rag_tool import RAGToolRuntime -@json_schema_type -class ToolParameter(BaseModel): - """Parameter definition for a tool. - - :param name: Name of the parameter - :param parameter_type: Type of the parameter (e.g., string, integer) - :param description: Human-readable description of what the parameter does - :param required: Whether this parameter is required for tool invocation - :param items: Type of the elements when parameter_type is array - :param title: (Optional) Title of the parameter - :param default: (Optional) Default value for the parameter if not provided - """ - - name: str - parameter_type: str - description: str - required: bool = Field(default=True) - items: dict | None = None - title: str | None = None - default: Any | None = None - - -@json_schema_type -class Tool(Resource): - """A tool that can be invoked by agents. - - :param type: Type of resource, always 'tool' - :param toolgroup_id: ID of the tool group this tool belongs to - :param description: Human-readable description of what the tool does - :param parameters: List of parameters this tool accepts - :param metadata: (Optional) Additional metadata about the tool - """ - - type: Literal[ResourceType.tool] = ResourceType.tool - toolgroup_id: str - description: str - parameters: list[ToolParameter] - metadata: dict[str, Any] | None = None - - @json_schema_type class ToolDef(BaseModel): """Tool definition used in runtime contexts. :param name: Name of the tool :param description: (Optional) Human-readable description of what the tool does - :param parameters: (Optional) List of parameters this tool accepts + :param input_schema: (Optional) JSON Schema for tool inputs (MCP inputSchema) + :param output_schema: (Optional) JSON Schema for tool outputs (MCP outputSchema) :param metadata: (Optional) Additional metadata about the tool + :param toolgroup_id: (Optional) ID of the tool group this tool belongs to """ + toolgroup_id: str | None = None name: str description: str | None = None - parameters: list[ToolParameter] | None = None + input_schema: dict[str, Any] | None = None + output_schema: dict[str, Any] | None = None metadata: dict[str, Any] | None = None @@ -122,7 +86,7 @@ class ToolInvocationResult(BaseModel): class ToolStore(Protocol): - async def get_tool(self, tool_name: str) -> Tool: ... + async def get_tool(self, tool_name: str) -> ToolDef: ... async def get_tool_group(self, toolgroup_id: str) -> ToolGroup: ... @@ -135,15 +99,6 @@ class ListToolGroupsResponse(BaseModel): data: list[ToolGroup] -class ListToolsResponse(BaseModel): - """Response containing a list of tools. - - :param data: List of tools - """ - - data: list[Tool] - - class ListToolDefsResponse(BaseModel): """Response containing a list of tool definitions. @@ -194,11 +149,11 @@ class ToolGroups(Protocol): ... @webmethod(route="/tools", method="GET", level=LLAMA_STACK_API_V1) - async def list_tools(self, toolgroup_id: str | None = None) -> ListToolsResponse: + async def list_tools(self, toolgroup_id: str | None = None) -> ListToolDefsResponse: """List tools with optional tool group. :param toolgroup_id: The ID of the tool group to list tools for. - :returns: A ListToolsResponse. + :returns: A ListToolDefsResponse. """ ... @@ -206,11 +161,11 @@ class ToolGroups(Protocol): async def get_tool( self, tool_name: str, - ) -> Tool: + ) -> ToolDef: """Get a tool by its name. :param tool_name: The name of the tool to get. - :returns: A Tool. + :returns: A ToolDef. """ ... diff --git a/llama_stack/core/datatypes.py b/llama_stack/core/datatypes.py index 6a297f012..930cf2646 100644 --- a/llama_stack/core/datatypes.py +++ b/llama_stack/core/datatypes.py @@ -22,7 +22,7 @@ from llama_stack.apis.safety import Safety from llama_stack.apis.scoring import Scoring from llama_stack.apis.scoring_functions import ScoringFn, ScoringFnInput from llama_stack.apis.shields import Shield, ShieldInput -from llama_stack.apis.tools import Tool, ToolGroup, ToolGroupInput, ToolRuntime +from llama_stack.apis.tools import ToolGroup, ToolGroupInput, ToolRuntime from llama_stack.apis.vector_dbs import VectorDB, VectorDBInput from llama_stack.apis.vector_io import VectorIO from llama_stack.core.access_control.datatypes import AccessRule @@ -84,15 +84,11 @@ class BenchmarkWithOwner(Benchmark, ResourceWithOwner): pass -class ToolWithOwner(Tool, ResourceWithOwner): - pass - - class ToolGroupWithOwner(ToolGroup, ResourceWithOwner): pass -RoutableObject = Model | Shield | VectorDB | Dataset | ScoringFn | Benchmark | Tool | ToolGroup +RoutableObject = Model | Shield | VectorDB | Dataset | ScoringFn | Benchmark | ToolGroup RoutableObjectWithProvider = Annotated[ ModelWithOwner @@ -101,7 +97,6 @@ RoutableObjectWithProvider = Annotated[ | DatasetWithOwner | ScoringFnWithOwner | BenchmarkWithOwner - | ToolWithOwner | ToolGroupWithOwner, Field(discriminator="type"), ] diff --git a/llama_stack/core/routers/tool_runtime.py b/llama_stack/core/routers/tool_runtime.py index fd606f33b..ad82293e5 100644 --- a/llama_stack/core/routers/tool_runtime.py +++ b/llama_stack/core/routers/tool_runtime.py @@ -11,7 +11,7 @@ from llama_stack.apis.common.content_types import ( InterleavedContent, ) from llama_stack.apis.tools import ( - ListToolsResponse, + ListToolDefsResponse, RAGDocument, RAGQueryConfig, RAGQueryResult, @@ -86,6 +86,6 @@ class ToolRuntimeRouter(ToolRuntime): async def list_runtime_tools( self, tool_group_id: str | None = None, mcp_endpoint: URL | None = None - ) -> ListToolsResponse: + ) -> ListToolDefsResponse: logger.debug(f"ToolRuntimeRouter.list_runtime_tools: {tool_group_id}") return await self.routing_table.list_tools(tool_group_id) diff --git a/llama_stack/core/routing_tables/toolgroups.py b/llama_stack/core/routing_tables/toolgroups.py index 8172b9b5f..2d47bbb17 100644 --- a/llama_stack/core/routing_tables/toolgroups.py +++ b/llama_stack/core/routing_tables/toolgroups.py @@ -8,7 +8,7 @@ from typing import Any from llama_stack.apis.common.content_types import URL from llama_stack.apis.common.errors import ToolGroupNotFoundError -from llama_stack.apis.tools import ListToolGroupsResponse, ListToolsResponse, Tool, ToolGroup, ToolGroups +from llama_stack.apis.tools import ListToolDefsResponse, ListToolGroupsResponse, ToolDef, ToolGroup, ToolGroups from llama_stack.core.datatypes import AuthenticationRequiredError, ToolGroupWithOwner from llama_stack.log import get_logger @@ -27,7 +27,7 @@ def parse_toolgroup_from_toolgroup_name_pair(toolgroup_name_with_maybe_tool_name class ToolGroupsRoutingTable(CommonRoutingTableImpl, ToolGroups): - toolgroups_to_tools: dict[str, list[Tool]] = {} + toolgroups_to_tools: dict[str, list[ToolDef]] = {} tool_to_toolgroup: dict[str, str] = {} # overridden @@ -43,7 +43,7 @@ class ToolGroupsRoutingTable(CommonRoutingTableImpl, ToolGroups): routing_key = self.tool_to_toolgroup[routing_key] return await super().get_provider_impl(routing_key, provider_id) - async def list_tools(self, toolgroup_id: str | None = None) -> ListToolsResponse: + async def list_tools(self, toolgroup_id: str | None = None) -> ListToolDefsResponse: if toolgroup_id: if group_id := parse_toolgroup_from_toolgroup_name_pair(toolgroup_id): toolgroup_id = group_id @@ -68,30 +68,19 @@ class ToolGroupsRoutingTable(CommonRoutingTableImpl, ToolGroups): continue all_tools.extend(self.toolgroups_to_tools[toolgroup.identifier]) - return ListToolsResponse(data=all_tools) + return ListToolDefsResponse(data=all_tools) async def _index_tools(self, toolgroup: ToolGroup): provider_impl = await super().get_provider_impl(toolgroup.identifier, toolgroup.provider_id) tooldefs_response = await provider_impl.list_runtime_tools(toolgroup.identifier, toolgroup.mcp_endpoint) - # TODO: kill this Tool vs ToolDef distinction tooldefs = tooldefs_response.data - tools = [] for t in tooldefs: - tools.append( - Tool( - identifier=t.name, - toolgroup_id=toolgroup.identifier, - description=t.description or "", - parameters=t.parameters or [], - metadata=t.metadata, - provider_id=toolgroup.provider_id, - ) - ) + t.toolgroup_id = toolgroup.identifier - self.toolgroups_to_tools[toolgroup.identifier] = tools - for tool in tools: - self.tool_to_toolgroup[tool.identifier] = toolgroup.identifier + self.toolgroups_to_tools[toolgroup.identifier] = tooldefs + for tool in tooldefs: + self.tool_to_toolgroup[tool.name] = toolgroup.identifier async def list_tool_groups(self) -> ListToolGroupsResponse: return ListToolGroupsResponse(data=await self.get_all_with_type("tool_group")) @@ -102,12 +91,12 @@ class ToolGroupsRoutingTable(CommonRoutingTableImpl, ToolGroups): raise ToolGroupNotFoundError(toolgroup_id) return tool_group - async def get_tool(self, tool_name: str) -> Tool: + async def get_tool(self, tool_name: str) -> ToolDef: if tool_name in self.tool_to_toolgroup: toolgroup_id = self.tool_to_toolgroup[tool_name] tools = self.toolgroups_to_tools[toolgroup_id] for tool in tools: - if tool.identifier == tool_name: + if tool.name == tool_name: return tool raise ValueError(f"Tool '{tool_name}' not found") @@ -132,7 +121,6 @@ class ToolGroupsRoutingTable(CommonRoutingTableImpl, ToolGroups): # baked in some of the code and tests right now. if not toolgroup.mcp_endpoint: await self._index_tools(toolgroup) - return toolgroup async def unregister_toolgroup(self, toolgroup_id: str) -> None: await self.unregister_object(await self.get_tool_group(toolgroup_id)) diff --git a/llama_stack/core/server/server.py b/llama_stack/core/server/server.py index 7d119c139..873335775 100644 --- a/llama_stack/core/server/server.py +++ b/llama_stack/core/server/server.py @@ -257,7 +257,7 @@ def create_dynamic_typed_route(func: Any, method: str, route: str) -> Callable: return result except Exception as e: - if logger.isEnabledFor(logging.DEBUG): + if logger.isEnabledFor(logging.INFO): logger.exception(f"Error executing endpoint {route=} {method=}") else: logger.error(f"Error executing endpoint {route=} {method=}: {str(e)}") diff --git a/llama_stack/core/store/registry.py b/llama_stack/core/store/registry.py index 5f4abe9aa..624dbd176 100644 --- a/llama_stack/core/store/registry.py +++ b/llama_stack/core/store/registry.py @@ -36,7 +36,7 @@ class DistributionRegistry(Protocol): REGISTER_PREFIX = "distributions:registry" -KEY_VERSION = "v9" +KEY_VERSION = "v10" KEY_FORMAT = f"{REGISTER_PREFIX}:{KEY_VERSION}::" + "{type}:{identifier}" diff --git a/llama_stack/core/ui/page/playground/tools.py b/llama_stack/core/ui/page/playground/tools.py index 602c9eea1..4ee9d2204 100644 --- a/llama_stack/core/ui/page/playground/tools.py +++ b/llama_stack/core/ui/page/playground/tools.py @@ -81,7 +81,7 @@ def tool_chat_page(): for toolgroup_id in toolgroup_selection: tools = client.tools.list(toolgroup_id=toolgroup_id) - grouped_tools[toolgroup_id] = [tool.identifier for tool in tools] + grouped_tools[toolgroup_id] = [tool.name for tool in tools] total_tools += len(tools) st.markdown(f"Active Tools: 🛠 {total_tools}") diff --git a/llama_stack/models/llama/datatypes.py b/llama_stack/models/llama/datatypes.py index 0baa6e55b..7cb7aa7bd 100644 --- a/llama_stack/models/llama/datatypes.py +++ b/llama_stack/models/llama/datatypes.py @@ -37,14 +37,7 @@ RecursiveType = Primitive | list[Primitive] | dict[str, Primitive] class ToolCall(BaseModel): call_id: str tool_name: BuiltinTool | str - # Plan is to deprecate the Dict in favor of a JSON string - # that is parsed on the client side instead of trying to manage - # the recursive type here. - # Making this a union so that client side can start prepping for this change. - # Eventually, we will remove both the Dict and arguments_json field, - # and arguments will just be a str - arguments: str | dict[str, RecursiveType] - arguments_json: str | None = None + arguments: str @field_validator("tool_name", mode="before") @classmethod @@ -88,19 +81,11 @@ class StopReason(Enum): out_of_tokens = "out_of_tokens" -class ToolParamDefinition(BaseModel): - param_type: str - description: str | None = None - required: bool | None = True - items: Any | None = None - title: str | None = None - default: Any | None = None - - class ToolDefinition(BaseModel): tool_name: BuiltinTool | str description: str | None = None - parameters: dict[str, ToolParamDefinition] | None = None + input_schema: dict[str, Any] | None = None + output_schema: dict[str, Any] | None = None @field_validator("tool_name", mode="before") @classmethod diff --git a/llama_stack/models/llama/llama3/chat_format.py b/llama_stack/models/llama/llama3/chat_format.py index 1f88a1699..d65865cb5 100644 --- a/llama_stack/models/llama/llama3/chat_format.py +++ b/llama_stack/models/llama/llama3/chat_format.py @@ -232,8 +232,7 @@ class ChatFormat: ToolCall( call_id=call_id, tool_name=tool_name, - arguments=tool_arguments, - arguments_json=json.dumps(tool_arguments), + arguments=json.dumps(tool_arguments), ) ) content = "" diff --git a/llama_stack/models/llama/llama3/prompt_templates/system_prompts.py b/llama_stack/models/llama/llama3/prompt_templates/system_prompts.py index ab626e5af..11a5993e9 100644 --- a/llama_stack/models/llama/llama3/prompt_templates/system_prompts.py +++ b/llama_stack/models/llama/llama3/prompt_templates/system_prompts.py @@ -18,7 +18,6 @@ from typing import Any from llama_stack.apis.inference import ( BuiltinTool, ToolDefinition, - ToolParamDefinition, ) from .base import PromptTemplate, PromptTemplateGeneratorBase @@ -101,11 +100,8 @@ class JsonCustomToolGenerator(PromptTemplateGeneratorBase): {# manually setting up JSON because jinja sorts keys in unexpected ways -#} {%- set tname = t.tool_name -%} {%- set tdesc = t.description -%} - {%- set tparams = t.parameters -%} - {%- set required_params = [] -%} - {%- for name, param in tparams.items() if param.required == true -%} - {%- set _ = required_params.append(name) -%} - {%- endfor -%} + {%- set tprops = t.input_schema.get('properties', {}) -%} + {%- set required_params = t.input_schema.get('required', []) -%} { "type": "function", "function": { @@ -114,11 +110,11 @@ class JsonCustomToolGenerator(PromptTemplateGeneratorBase): "parameters": { "type": "object", "properties": [ - {%- for name, param in tparams.items() %} + {%- for name, param in tprops.items() %} { "{{name}}": { "type": "object", - "description": "{{param.description}}" + "description": "{{param.get('description', '')}}" } }{% if not loop.last %},{% endif %} {%- endfor %} @@ -143,17 +139,19 @@ class JsonCustomToolGenerator(PromptTemplateGeneratorBase): ToolDefinition( tool_name="trending_songs", description="Returns the trending songs on a Music site", - parameters={ - "n": ToolParamDefinition( - param_type="int", - description="The number of songs to return", - required=True, - ), - "genre": ToolParamDefinition( - param_type="str", - description="The genre of the songs to return", - required=False, - ), + input_schema={ + "type": "object", + "properties": { + "n": { + "type": "int", + "description": "The number of songs to return", + }, + "genre": { + "type": "str", + "description": "The genre of the songs to return", + }, + }, + "required": ["n"], }, ), ] @@ -170,11 +168,14 @@ class FunctionTagCustomToolGenerator(PromptTemplateGeneratorBase): {#- manually setting up JSON because jinja sorts keys in unexpected ways -#} {%- set tname = t.tool_name -%} {%- set tdesc = t.description -%} - {%- set modified_params = t.parameters.copy() -%} - {%- for key, value in modified_params.items() -%} - {%- if 'default' in value -%} - {%- set _ = value.pop('default', None) -%} + {%- set tprops = t.input_schema.get('properties', {}) -%} + {%- set modified_params = {} -%} + {%- for key, value in tprops.items() -%} + {%- set param_copy = value.copy() -%} + {%- if 'default' in param_copy -%} + {%- set _ = param_copy.pop('default', None) -%} {%- endif -%} + {%- set _ = modified_params.update({key: param_copy}) -%} {%- endfor -%} {%- set tparams = modified_params | tojson -%} Use the function '{{ tname }}' to '{{ tdesc }}': @@ -205,17 +206,19 @@ class FunctionTagCustomToolGenerator(PromptTemplateGeneratorBase): ToolDefinition( tool_name="trending_songs", description="Returns the trending songs on a Music site", - parameters={ - "n": ToolParamDefinition( - param_type="int", - description="The number of songs to return", - required=True, - ), - "genre": ToolParamDefinition( - param_type="str", - description="The genre of the songs to return", - required=False, - ), + input_schema={ + "type": "object", + "properties": { + "n": { + "type": "int", + "description": "The number of songs to return", + }, + "genre": { + "type": "str", + "description": "The genre of the songs to return", + }, + }, + "required": ["n"], }, ), ] @@ -255,11 +258,8 @@ class PythonListCustomToolGenerator(PromptTemplateGeneratorBase): # noqa: N801 {# manually setting up JSON because jinja sorts keys in unexpected ways -#} {%- set tname = t.tool_name -%} {%- set tdesc = t.description -%} - {%- set tparams = t.parameters -%} - {%- set required_params = [] -%} - {%- for name, param in tparams.items() if param.required == true -%} - {%- set _ = required_params.append(name) -%} - {%- endfor -%} + {%- set tprops = (t.input_schema or {}).get('properties', {}) -%} + {%- set required_params = (t.input_schema or {}).get('required', []) -%} { "name": "{{tname}}", "description": "{{tdesc}}", @@ -267,11 +267,11 @@ class PythonListCustomToolGenerator(PromptTemplateGeneratorBase): # noqa: N801 "type": "dict", "required": {{ required_params | tojson }}, "properties": { - {%- for name, param in tparams.items() %} + {%- for name, param in tprops.items() %} "{{name}}": { - "type": "{{param.param_type}}", - "description": "{{param.description}}"{% if param.default %}, - "default": "{{param.default}}"{% endif %} + "type": "{{param.get('type', 'string')}}", + "description": "{{param.get('description', '')}}"{% if param.get('default') %}, + "default": "{{param.get('default')}}"{% endif %} }{% if not loop.last %},{% endif %} {%- endfor %} } @@ -299,18 +299,20 @@ class PythonListCustomToolGenerator(PromptTemplateGeneratorBase): # noqa: N801 ToolDefinition( tool_name="get_weather", description="Get weather info for places", - parameters={ - "city": ToolParamDefinition( - param_type="string", - description="The name of the city to get the weather for", - required=True, - ), - "metric": ToolParamDefinition( - param_type="string", - description="The metric for weather. Options are: celsius, fahrenheit", - required=False, - default="celsius", - ), + input_schema={ + "type": "object", + "properties": { + "city": { + "type": "string", + "description": "The name of the city to get the weather for", + }, + "metric": { + "type": "string", + "description": "The metric for weather. Options are: celsius, fahrenheit", + "default": "celsius", + }, + }, + "required": ["city"], }, ), ] diff --git a/llama_stack/models/llama/llama3/tool_utils.py b/llama_stack/models/llama/llama3/tool_utils.py index d0e3e7671..8c12fe680 100644 --- a/llama_stack/models/llama/llama3/tool_utils.py +++ b/llama_stack/models/llama/llama3/tool_utils.py @@ -220,17 +220,18 @@ class ToolUtils: @staticmethod def encode_tool_call(t: ToolCall, tool_prompt_format: ToolPromptFormat) -> str: + args = json.loads(t.arguments) if t.tool_name == BuiltinTool.brave_search: - q = t.arguments["query"] + q = args["query"] return f'brave_search.call(query="{q}")' elif t.tool_name == BuiltinTool.wolfram_alpha: - q = t.arguments["query"] + q = args["query"] return f'wolfram_alpha.call(query="{q}")' elif t.tool_name == BuiltinTool.photogen: - q = t.arguments["query"] + q = args["query"] return f'photogen.call(query="{q}")' elif t.tool_name == BuiltinTool.code_interpreter: - return t.arguments["code"] + return args["code"] else: fname = t.tool_name @@ -239,12 +240,11 @@ class ToolUtils: { "type": "function", "name": fname, - "parameters": t.arguments, + "parameters": args, } ) elif tool_prompt_format == ToolPromptFormat.function_tag: - args = json.dumps(t.arguments) - return f"{args}" + return f"{t.arguments}" elif tool_prompt_format == ToolPromptFormat.python_list: @@ -260,7 +260,7 @@ class ToolUtils: else: raise ValueError(f"Unsupported type: {type(value)}") - args_str = ", ".join(f"{k}={format_value(v)}" for k, v in t.arguments.items()) + args_str = ", ".join(f"{k}={format_value(v)}" for k, v in args.items()) return f"[{fname}({args_str})]" else: raise ValueError(f"Unsupported tool prompt format: {tool_prompt_format}") diff --git a/llama_stack/models/llama/llama3_1/prompts.py b/llama_stack/models/llama/llama3_1/prompts.py index 579a5ee02..433c62d86 100644 --- a/llama_stack/models/llama/llama3_1/prompts.py +++ b/llama_stack/models/llama/llama3_1/prompts.py @@ -11,6 +11,7 @@ # top-level folder for each specific model found within the models/ directory at # the top-level of this source tree. +import json import textwrap from llama_stack.models.llama.datatypes import ( @@ -184,7 +185,7 @@ def usecases() -> list[UseCase | str]: ToolCall( call_id="tool_call_id", tool_name=BuiltinTool.wolfram_alpha, - arguments={"query": "100th decimal of pi"}, + arguments=json.dumps({"query": "100th decimal of pi"}), ) ], ), diff --git a/llama_stack/models/llama/llama3_3/prompts.py b/llama_stack/models/llama/llama3_3/prompts.py index 85796608a..0470e3218 100644 --- a/llama_stack/models/llama/llama3_3/prompts.py +++ b/llama_stack/models/llama/llama3_3/prompts.py @@ -11,6 +11,7 @@ # top-level folder for each specific model found within the models/ directory at # the top-level of this source tree. +import json import textwrap from llama_stack.models.llama.datatypes import ( @@ -185,7 +186,7 @@ def usecases() -> list[UseCase | str]: ToolCall( call_id="tool_call_id", tool_name=BuiltinTool.wolfram_alpha, - arguments={"query": "100th decimal of pi"}, + arguments=json.dumps({"query": "100th decimal of pi"}), ) ], ), diff --git a/llama_stack/models/llama/llama4/chat_format.py b/llama_stack/models/llama/llama4/chat_format.py index 96ebd0881..3864f6438 100644 --- a/llama_stack/models/llama/llama4/chat_format.py +++ b/llama_stack/models/llama/llama4/chat_format.py @@ -298,8 +298,7 @@ class ChatFormat: ToolCall( call_id=call_id, tool_name=tool_name, - arguments=tool_arguments, - arguments_json=json.dumps(tool_arguments), + arguments=json.dumps(tool_arguments), ) ) content = "" diff --git a/llama_stack/models/llama/llama4/prompt_templates/system_prompts.py b/llama_stack/models/llama/llama4/prompt_templates/system_prompts.py index 9c19f89ae..1ee570933 100644 --- a/llama_stack/models/llama/llama4/prompt_templates/system_prompts.py +++ b/llama_stack/models/llama/llama4/prompt_templates/system_prompts.py @@ -13,7 +13,7 @@ import textwrap -from llama_stack.apis.inference import ToolDefinition, ToolParamDefinition +from llama_stack.apis.inference import ToolDefinition from llama_stack.models.llama.llama3.prompt_templates.base import ( PromptTemplate, PromptTemplateGeneratorBase, @@ -81,11 +81,8 @@ class PythonListCustomToolGenerator(PromptTemplateGeneratorBase): # noqa: N801 {# manually setting up JSON because jinja sorts keys in unexpected ways -#} {%- set tname = t.tool_name -%} {%- set tdesc = t.description -%} - {%- set tparams = t.parameters -%} - {%- set required_params = [] -%} - {%- for name, param in tparams.items() if param.required == true -%} - {%- set _ = required_params.append(name) -%} - {%- endfor -%} + {%- set tprops = t.input_schema.get('properties', {}) -%} + {%- set required_params = t.input_schema.get('required', []) -%} { "name": "{{tname}}", "description": "{{tdesc}}", @@ -93,11 +90,11 @@ class PythonListCustomToolGenerator(PromptTemplateGeneratorBase): # noqa: N801 "type": "dict", "required": {{ required_params | tojson }}, "properties": { - {%- for name, param in tparams.items() %} + {%- for name, param in tprops.items() %} "{{name}}": { - "type": "{{param.param_type}}", - "description": "{{param.description}}"{% if param.default %}, - "default": "{{param.default}}"{% endif %} + "type": "{{param.get('type', 'string')}}", + "description": "{{param.get('description', '')}}"{% if param.get('default') %}, + "default": "{{param.get('default')}}"{% endif %} }{% if not loop.last %},{% endif %} {%- endfor %} } @@ -119,18 +116,20 @@ class PythonListCustomToolGenerator(PromptTemplateGeneratorBase): # noqa: N801 ToolDefinition( tool_name="get_weather", description="Get weather info for places", - parameters={ - "city": ToolParamDefinition( - param_type="string", - description="The name of the city to get the weather for", - required=True, - ), - "metric": ToolParamDefinition( - param_type="string", - description="The metric for weather. Options are: celsius, fahrenheit", - required=False, - default="celsius", - ), + input_schema={ + "type": "object", + "properties": { + "city": { + "type": "string", + "description": "The name of the city to get the weather for", + }, + "metric": { + "type": "string", + "description": "The metric for weather. Options are: celsius, fahrenheit", + "default": "celsius", + }, + }, + "required": ["city"], }, ), ] diff --git a/llama_stack/providers/inline/agents/meta_reference/agent_instance.py b/llama_stack/providers/inline/agents/meta_reference/agent_instance.py index 32c59ba2c..207f0daec 100644 --- a/llama_stack/providers/inline/agents/meta_reference/agent_instance.py +++ b/llama_stack/providers/inline/agents/meta_reference/agent_instance.py @@ -60,7 +60,6 @@ from llama_stack.apis.inference import ( StopReason, SystemMessage, ToolDefinition, - ToolParamDefinition, ToolResponse, ToolResponseMessage, UserMessage, @@ -866,20 +865,12 @@ class ChatAgent(ShieldRunnerMixin): for tool_def in self.agent_config.client_tools: if tool_name_to_def.get(tool_def.name, None): raise ValueError(f"Tool {tool_def.name} already exists") + + # Use input_schema from ToolDef directly tool_name_to_def[tool_def.name] = ToolDefinition( tool_name=tool_def.name, description=tool_def.description, - parameters={ - param.name: ToolParamDefinition( - param_type=param.parameter_type, - description=param.description, - required=param.required, - items=param.items, - title=param.title, - default=param.default, - ) - for param in tool_def.parameters - }, + input_schema=tool_def.input_schema, ) for toolgroup_name_with_maybe_tool_name in agent_config_toolgroups: toolgroup_name, input_tool_name = self._parse_toolgroup_name(toolgroup_name_with_maybe_tool_name) @@ -889,44 +880,34 @@ class ChatAgent(ShieldRunnerMixin): [t.identifier for t in (await self.tool_groups_api.list_tool_groups()).data] ) raise ValueError(f"Toolgroup {toolgroup_name} not found, available toolgroups: {available_tool_groups}") - if input_tool_name is not None and not any(tool.identifier == input_tool_name for tool in tools.data): + if input_tool_name is not None and not any(tool.name == input_tool_name for tool in tools.data): raise ValueError( - f"Tool {input_tool_name} not found in toolgroup {toolgroup_name}. Available tools: {', '.join([tool.identifier for tool in tools.data])}" + f"Tool {input_tool_name} not found in toolgroup {toolgroup_name}. Available tools: {', '.join([tool.name for tool in tools.data])}" ) for tool_def in tools.data: if toolgroup_name.startswith("builtin") and toolgroup_name != RAG_TOOL_GROUP: - identifier: str | BuiltinTool | None = tool_def.identifier + identifier: str | BuiltinTool | None = tool_def.name if identifier == "web_search": identifier = BuiltinTool.brave_search else: identifier = BuiltinTool(identifier) else: # add if tool_name is unspecified or the tool_def identifier is the same as the tool_name - if input_tool_name in (None, tool_def.identifier): - identifier = tool_def.identifier + if input_tool_name in (None, tool_def.name): + identifier = tool_def.name else: identifier = None if tool_name_to_def.get(identifier, None): raise ValueError(f"Tool {identifier} already exists") if identifier: - tool_name_to_def[tool_def.identifier] = ToolDefinition( + tool_name_to_def[identifier] = ToolDefinition( tool_name=identifier, description=tool_def.description, - parameters={ - param.name: ToolParamDefinition( - param_type=param.parameter_type, - description=param.description, - required=param.required, - items=param.items, - title=param.title, - default=param.default, - ) - for param in tool_def.parameters - }, + input_schema=tool_def.input_schema, ) - tool_name_to_args[tool_def.identifier] = toolgroup_to_args.get(toolgroup_name, {}) + tool_name_to_args[identifier] = toolgroup_to_args.get(toolgroup_name, {}) self.tool_defs, self.tool_name_to_args = ( list(tool_name_to_def.values()), @@ -970,12 +951,18 @@ class ChatAgent(ShieldRunnerMixin): tool_name_str = tool_name logger.info(f"executing tool call: {tool_name_str} with args: {tool_call.arguments}") + + try: + args = json.loads(tool_call.arguments) + except json.JSONDecodeError as e: + raise ValueError(f"Failed to parse arguments for tool call: {tool_call.arguments}") from e + result = await self.tool_runtime_api.invoke_tool( tool_name=tool_name_str, kwargs={ "session_id": session_id, # get the arguments generated by the model and augment with toolgroup arg overrides for the agent - **tool_call.arguments, + **args, **self.tool_name_to_args.get(tool_name_str, {}), }, ) diff --git a/llama_stack/providers/inline/agents/meta_reference/responses/streaming.py b/llama_stack/providers/inline/agents/meta_reference/responses/streaming.py index 7eaf08e13..732ad708e 100644 --- a/llama_stack/providers/inline/agents/meta_reference/responses/streaming.py +++ b/llama_stack/providers/inline/agents/meta_reference/responses/streaming.py @@ -62,22 +62,13 @@ def convert_tooldef_to_chat_tool(tool_def): ChatCompletionToolParam suitable for OpenAI chat completion """ - from llama_stack.models.llama.datatypes import ToolDefinition, ToolParamDefinition + from llama_stack.models.llama.datatypes import ToolDefinition from llama_stack.providers.utils.inference.openai_compat import convert_tooldef_to_openai_tool internal_tool_def = ToolDefinition( tool_name=tool_def.name, description=tool_def.description, - parameters={ - param.name: ToolParamDefinition( - param_type=param.parameter_type, - description=param.description, - required=param.required, - default=param.default, - items=param.items, - ) - for param in tool_def.parameters - }, + input_schema=tool_def.input_schema, ) return convert_tooldef_to_openai_tool(internal_tool_def) @@ -528,23 +519,15 @@ class StreamingResponseOrchestrator: """Process all tools and emit appropriate streaming events.""" from openai.types.chat import ChatCompletionToolParam - from llama_stack.apis.tools import Tool - from llama_stack.models.llama.datatypes import ToolDefinition, ToolParamDefinition + from llama_stack.apis.tools import ToolDef + from llama_stack.models.llama.datatypes import ToolDefinition from llama_stack.providers.utils.inference.openai_compat import convert_tooldef_to_openai_tool - def make_openai_tool(tool_name: str, tool: Tool) -> ChatCompletionToolParam: + def make_openai_tool(tool_name: str, tool: ToolDef) -> ChatCompletionToolParam: tool_def = ToolDefinition( tool_name=tool_name, description=tool.description, - parameters={ - param.name: ToolParamDefinition( - param_type=param.parameter_type, - description=param.description, - required=param.required, - default=param.default, - ) - for param in tool.parameters - }, + input_schema=tool.input_schema, ) return convert_tooldef_to_openai_tool(tool_def) @@ -631,16 +614,11 @@ class StreamingResponseOrchestrator: MCPListToolsTool( name=t.name, description=t.description, - input_schema={ + input_schema=t.input_schema + or { "type": "object", - "properties": { - p.name: { - "type": p.parameter_type, - "description": p.description, - } - for p in t.parameters - }, - "required": [p.name for p in t.parameters if p.required], + "properties": {}, + "required": [], }, ) ) diff --git a/llama_stack/providers/inline/ios/inference/LocalInferenceImpl/SystemPrompts.swift b/llama_stack/providers/inline/ios/inference/LocalInferenceImpl/SystemPrompts.swift index 88c0218b0..8bae3582b 100644 --- a/llama_stack/providers/inline/ios/inference/LocalInferenceImpl/SystemPrompts.swift +++ b/llama_stack/providers/inline/ios/inference/LocalInferenceImpl/SystemPrompts.swift @@ -68,9 +68,7 @@ public class FunctionTagCustomToolGenerator { { "name": "{{t.tool_name}}", "description": "{{t.description}}", - "parameters": { - "type": "dict", - "properties": { {{t.parameters}} } + "input_schema": { {{t.input_schema}} } } {{/let}} diff --git a/llama_stack/providers/inline/tool_runtime/rag/memory.py b/llama_stack/providers/inline/tool_runtime/rag/memory.py index bc68f198d..c8499a9b8 100644 --- a/llama_stack/providers/inline/tool_runtime/rag/memory.py +++ b/llama_stack/providers/inline/tool_runtime/rag/memory.py @@ -33,7 +33,6 @@ from llama_stack.apis.tools import ( ToolDef, ToolGroup, ToolInvocationResult, - ToolParameter, ToolRuntime, ) from llama_stack.apis.vector_io import ( @@ -301,13 +300,16 @@ class MemoryToolRuntimeImpl(ToolGroupsProtocolPrivate, ToolRuntime, RAGToolRunti ToolDef( name="knowledge_search", description="Search for information in a database.", - parameters=[ - ToolParameter( - name="query", - description="The query to search for. Can be a natural language sentence or keywords.", - parameter_type="string", - ), - ], + input_schema={ + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "The query to search for. Can be a natural language sentence or keywords.", + } + }, + "required": ["query"], + }, ), ] ) diff --git a/llama_stack/providers/remote/inference/vllm/vllm.py b/llama_stack/providers/remote/inference/vllm/vllm.py index 44b3dc3db..2b58b4262 100644 --- a/llama_stack/providers/remote/inference/vllm/vllm.py +++ b/llama_stack/providers/remote/inference/vllm/vllm.py @@ -89,8 +89,7 @@ def _convert_to_vllm_tool_calls_in_response( ToolCall( call_id=call.id, tool_name=call.function.name, - arguments=json.loads(call.function.arguments), - arguments_json=call.function.arguments, + arguments=call.function.arguments, ) for call in tool_calls ] @@ -100,18 +99,6 @@ def _convert_to_vllm_tools_in_request(tools: list[ToolDefinition]) -> list[dict] compat_tools = [] for tool in tools: - properties = {} - compat_required = [] - if tool.parameters: - for tool_key, tool_param in tool.parameters.items(): - properties[tool_key] = {"type": tool_param.param_type} - if tool_param.description: - properties[tool_key]["description"] = tool_param.description - if tool_param.default: - properties[tool_key]["default"] = tool_param.default - if tool_param.required: - compat_required.append(tool_key) - # The tool.tool_name can be a str or a BuiltinTool enum. If # it's the latter, convert to a string. tool_name = tool.tool_name @@ -123,10 +110,11 @@ def _convert_to_vllm_tools_in_request(tools: list[ToolDefinition]) -> list[dict] "function": { "name": tool_name, "description": tool.description, - "parameters": { + "parameters": tool.input_schema + or { "type": "object", - "properties": properties, - "required": compat_required, + "properties": {}, + "required": [], }, }, } @@ -161,7 +149,6 @@ def _process_vllm_chat_completion_end_of_stream( for _index, tool_call_buf in sorted(tool_call_bufs.items()): args_str = tool_call_buf.arguments or "{}" try: - args = json.loads(args_str) chunks.append( ChatCompletionResponseStreamChunk( event=ChatCompletionResponseEvent( @@ -170,8 +157,7 @@ def _process_vllm_chat_completion_end_of_stream( tool_call=ToolCall( call_id=tool_call_buf.call_id, tool_name=tool_call_buf.tool_name, - arguments=args, - arguments_json=args_str, + arguments=args_str, ), parse_status=ToolCallParseStatus.succeeded, ), diff --git a/llama_stack/providers/remote/tool_runtime/bing_search/bing_search.py b/llama_stack/providers/remote/tool_runtime/bing_search/bing_search.py index e40903969..9a98964b7 100644 --- a/llama_stack/providers/remote/tool_runtime/bing_search/bing_search.py +++ b/llama_stack/providers/remote/tool_runtime/bing_search/bing_search.py @@ -15,7 +15,6 @@ from llama_stack.apis.tools import ( ToolDef, ToolGroup, ToolInvocationResult, - ToolParameter, ToolRuntime, ) from llama_stack.core.request_headers import NeedsRequestProviderData @@ -57,13 +56,16 @@ class BingSearchToolRuntimeImpl(ToolGroupsProtocolPrivate, ToolRuntime, NeedsReq ToolDef( name="web_search", description="Search the web using Bing Search API", - parameters=[ - ToolParameter( - name="query", - description="The query to search for", - parameter_type="string", - ) - ], + input_schema={ + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "The query to search for", + } + }, + "required": ["query"], + }, ) ] ) diff --git a/llama_stack/providers/remote/tool_runtime/brave_search/brave_search.py b/llama_stack/providers/remote/tool_runtime/brave_search/brave_search.py index ba3b910d5..02e5b5c69 100644 --- a/llama_stack/providers/remote/tool_runtime/brave_search/brave_search.py +++ b/llama_stack/providers/remote/tool_runtime/brave_search/brave_search.py @@ -14,7 +14,6 @@ from llama_stack.apis.tools import ( ToolDef, ToolGroup, ToolInvocationResult, - ToolParameter, ToolRuntime, ) from llama_stack.core.request_headers import NeedsRequestProviderData @@ -56,13 +55,16 @@ class BraveSearchToolRuntimeImpl(ToolGroupsProtocolPrivate, ToolRuntime, NeedsRe ToolDef( name="web_search", description="Search the web for information", - parameters=[ - ToolParameter( - name="query", - description="The query to search for", - parameter_type="string", - ) - ], + input_schema={ + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "The query to search for", + } + }, + "required": ["query"], + }, built_in_type=BuiltinTool.brave_search, ) ] diff --git a/llama_stack/providers/remote/tool_runtime/tavily_search/tavily_search.py b/llama_stack/providers/remote/tool_runtime/tavily_search/tavily_search.py index 976ec9c57..ca629fced 100644 --- a/llama_stack/providers/remote/tool_runtime/tavily_search/tavily_search.py +++ b/llama_stack/providers/remote/tool_runtime/tavily_search/tavily_search.py @@ -15,7 +15,6 @@ from llama_stack.apis.tools import ( ToolDef, ToolGroup, ToolInvocationResult, - ToolParameter, ToolRuntime, ) from llama_stack.core.request_headers import NeedsRequestProviderData @@ -56,13 +55,16 @@ class TavilySearchToolRuntimeImpl(ToolGroupsProtocolPrivate, ToolRuntime, NeedsR ToolDef( name="web_search", description="Search the web for information", - parameters=[ - ToolParameter( - name="query", - description="The query to search for", - parameter_type="string", - ) - ], + input_schema={ + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "The query to search for", + } + }, + "required": ["query"], + }, ) ] ) diff --git a/llama_stack/providers/remote/tool_runtime/wolfram_alpha/wolfram_alpha.py b/llama_stack/providers/remote/tool_runtime/wolfram_alpha/wolfram_alpha.py index f12a44958..410e34195 100644 --- a/llama_stack/providers/remote/tool_runtime/wolfram_alpha/wolfram_alpha.py +++ b/llama_stack/providers/remote/tool_runtime/wolfram_alpha/wolfram_alpha.py @@ -15,7 +15,6 @@ from llama_stack.apis.tools import ( ToolDef, ToolGroup, ToolInvocationResult, - ToolParameter, ToolRuntime, ) from llama_stack.core.request_headers import NeedsRequestProviderData @@ -57,13 +56,16 @@ class WolframAlphaToolRuntimeImpl(ToolGroupsProtocolPrivate, ToolRuntime, NeedsR ToolDef( name="wolfram_alpha", description="Query WolframAlpha for computational knowledge", - parameters=[ - ToolParameter( - name="query", - description="The query to compute", - parameter_type="string", - ) - ], + input_schema={ + "type": "object", + "properties": { + "query": { + "type": "string", + "description": "The query to compute", + } + }, + "required": ["query"], + }, ) ] ) diff --git a/llama_stack/providers/utils/inference/openai_compat.py b/llama_stack/providers/utils/inference/openai_compat.py index da97d7c79..d863eb53a 100644 --- a/llama_stack/providers/utils/inference/openai_compat.py +++ b/llama_stack/providers/utils/inference/openai_compat.py @@ -125,7 +125,6 @@ from llama_stack.models.llama.datatypes import ( StopReason, ToolCall, ToolDefinition, - ToolParamDefinition, ) from llama_stack.providers.utils.inference.prompt_adapter import ( convert_image_content_to_url, @@ -537,18 +536,13 @@ async def convert_message_to_openai_dict(message: Message, download: bool = Fals if isinstance(tool_name, BuiltinTool): tool_name = tool_name.value - # arguments_json can be None, so attempt it first and fall back to arguments - if hasattr(tc, "arguments_json") and tc.arguments_json: - arguments = tc.arguments_json - else: - arguments = json.dumps(tc.arguments) result["tool_calls"].append( { "id": tc.call_id, "type": "function", "function": { "name": tool_name, - "arguments": arguments, + "arguments": tc.arguments, }, } ) @@ -641,7 +635,7 @@ async def convert_message_to_openai_dict_new( id=tool.call_id, function=OpenAIFunction( name=(tool.tool_name if not isinstance(tool.tool_name, BuiltinTool) else tool.tool_name.value), - arguments=json.dumps(tool.arguments), + arguments=tool.arguments, # Already a JSON string, don't double-encode ), type="function", ) @@ -684,8 +678,7 @@ def convert_tool_call( valid_tool_call = ToolCall( call_id=tool_call.id, tool_name=tool_call.function.name, - arguments=json.loads(tool_call.function.arguments), - arguments_json=tool_call.function.arguments, + arguments=tool_call.function.arguments, ) except Exception: return UnparseableToolCall( @@ -745,14 +738,8 @@ def convert_tooldef_to_openai_tool(tool: ToolDefinition) -> dict: ToolDefinition: tool_name: str | BuiltinTool description: Optional[str] - parameters: Optional[Dict[str, ToolParamDefinition]] - - ToolParamDefinition: - param_type: str - description: Optional[str] - required: Optional[bool] - default: Optional[Any] - + input_schema: Optional[Dict[str, Any]] # JSON Schema + output_schema: Optional[Dict[str, Any]] # JSON Schema (not used by OpenAI) OpenAI spec - @@ -761,20 +748,11 @@ def convert_tooldef_to_openai_tool(tool: ToolDefinition) -> dict: "function": { "name": tool_name, "description": description, - "parameters": { - "type": "object", - "properties": { - param_name: { - "type": param_type, - "description": description, - "default": default, - }, - ... - }, - "required": [param_name, ...], - }, + "parameters": {}, }, } + + NOTE: OpenAI does not support output_schema, so it is dropped here. """ out = { "type": "function", @@ -783,37 +761,19 @@ def convert_tooldef_to_openai_tool(tool: ToolDefinition) -> dict: function = out["function"] if isinstance(tool.tool_name, BuiltinTool): - function.update(name=tool.tool_name.value) # TODO(mf): is this sufficient? + function["name"] = tool.tool_name.value else: - function.update(name=tool.tool_name) + function["name"] = tool.tool_name if tool.description: - function.update(description=tool.description) + function["description"] = tool.description - if tool.parameters: - parameters = { - "type": "object", - "properties": {}, - } - properties = parameters["properties"] - required = [] - for param_name, param in tool.parameters.items(): - properties[param_name] = to_openai_param_type(param.param_type) - if param.description: - properties[param_name].update(description=param.description) - if param.default: - properties[param_name].update(default=param.default) - if param.items: - properties[param_name].update(items=param.items) - if param.title: - properties[param_name].update(title=param.title) - if param.required: - required.append(param_name) + if tool.input_schema: + # Pass through the entire JSON Schema as-is + function["parameters"] = tool.input_schema - if required: - parameters.update(required=required) - - function.update(parameters=parameters) + # NOTE: OpenAI does not support output_schema, so we drop it here + # It's stored in LlamaStack for validation and other provider usage return out @@ -874,22 +834,12 @@ def _convert_openai_request_tools(tools: list[dict[str, Any]] | None = None) -> tool_fn = tool.get("function", {}) tool_name = tool_fn.get("name", None) tool_desc = tool_fn.get("description", None) - tool_params = tool_fn.get("parameters", None) - lls_tool_params = {} - if tool_params is not None: - tool_param_properties = tool_params.get("properties", {}) - for tool_param_key, tool_param_value in tool_param_properties.items(): - tool_param_def = ToolParamDefinition( - param_type=str(tool_param_value.get("type", None)), - description=tool_param_value.get("description", None), - ) - lls_tool_params[tool_param_key] = tool_param_def lls_tool = ToolDefinition( tool_name=tool_name, description=tool_desc, - parameters=lls_tool_params, + input_schema=tool_params, # Pass through entire JSON Schema ) lls_tools.append(lls_tool) return lls_tools @@ -939,8 +889,7 @@ def _convert_openai_tool_calls( ToolCall( call_id=call.id, tool_name=call.function.name, - arguments=json.loads(call.function.arguments), - arguments_json=call.function.arguments, + arguments=call.function.arguments, ) for call in tool_calls ] @@ -1222,12 +1171,10 @@ async def convert_openai_chat_completion_stream( ) try: - arguments = json.loads(buffer["arguments"]) tool_call = ToolCall( call_id=buffer["call_id"], tool_name=buffer["name"], - arguments=arguments, - arguments_json=buffer["arguments"], + arguments=buffer["arguments"], ) yield ChatCompletionResponseStreamChunk( event=ChatCompletionResponseEvent( @@ -1390,7 +1337,7 @@ class OpenAIChatCompletionToLlamaStackMixin: openai_tool_call = OpenAIChoiceDeltaToolCall( index=0, function=OpenAIChoiceDeltaToolCallFunction( - arguments=tool_call.arguments_json, + arguments=tool_call.arguments, ), ) delta = OpenAIChoiceDelta(tool_calls=[openai_tool_call]) diff --git a/llama_stack/providers/utils/inference/openai_mixin.py b/llama_stack/providers/utils/inference/openai_mixin.py index becec5fb3..3ff7d5cc6 100644 --- a/llama_stack/providers/utils/inference/openai_mixin.py +++ b/llama_stack/providers/utils/inference/openai_mixin.py @@ -286,34 +286,34 @@ class OpenAIMixin(ModelRegistryHelper, NeedsRequestProviderData, ABC): messages = [await _localize_image_url(m) for m in messages] - resp = await self.client.chat.completions.create( - **await prepare_openai_completion_params( - model=await self._get_provider_model_id(model), - messages=messages, - frequency_penalty=frequency_penalty, - function_call=function_call, - functions=functions, - logit_bias=logit_bias, - logprobs=logprobs, - max_completion_tokens=max_completion_tokens, - max_tokens=max_tokens, - n=n, - parallel_tool_calls=parallel_tool_calls, - presence_penalty=presence_penalty, - response_format=response_format, - seed=seed, - stop=stop, - stream=stream, - stream_options=stream_options, - temperature=temperature, - tool_choice=tool_choice, - tools=tools, - top_logprobs=top_logprobs, - top_p=top_p, - user=user, - ) + params = await prepare_openai_completion_params( + model=await self._get_provider_model_id(model), + messages=messages, + frequency_penalty=frequency_penalty, + function_call=function_call, + functions=functions, + logit_bias=logit_bias, + logprobs=logprobs, + max_completion_tokens=max_completion_tokens, + max_tokens=max_tokens, + n=n, + parallel_tool_calls=parallel_tool_calls, + presence_penalty=presence_penalty, + response_format=response_format, + seed=seed, + stop=stop, + stream=stream, + stream_options=stream_options, + temperature=temperature, + tool_choice=tool_choice, + tools=tools, + top_logprobs=top_logprobs, + top_p=top_p, + user=user, ) + resp = await self.client.chat.completions.create(**params) + return await self._maybe_overwrite_id(resp, stream) # type: ignore[no-any-return] async def openai_embeddings( diff --git a/llama_stack/providers/utils/tools/mcp.py b/llama_stack/providers/utils/tools/mcp.py index 155f7eff8..48f07cb19 100644 --- a/llama_stack/providers/utils/tools/mcp.py +++ b/llama_stack/providers/utils/tools/mcp.py @@ -20,7 +20,6 @@ from llama_stack.apis.tools import ( ListToolDefsResponse, ToolDef, ToolInvocationResult, - ToolParameter, ) from llama_stack.core.datatypes import AuthenticationRequiredError from llama_stack.log import get_logger @@ -113,24 +112,12 @@ async def list_mcp_tools(endpoint: str, headers: dict[str, str]) -> ListToolDefs async with client_wrapper(endpoint, headers) as session: tools_result = await session.list_tools() for tool in tools_result.tools: - parameters = [] - for param_name, param_schema in tool.inputSchema.get("properties", {}).items(): - parameters.append( - ToolParameter( - name=param_name, - parameter_type=param_schema.get("type", "string"), - description=param_schema.get("description", ""), - required="default" not in param_schema, - items=param_schema.get("items", None), - title=param_schema.get("title", None), - default=param_schema.get("default", None), - ) - ) tools.append( ToolDef( name=tool.name, description=tool.description, - parameters=parameters, + input_schema=tool.inputSchema, + output_schema=getattr(tool, "outputSchema", None), metadata={ "endpoint": endpoint, }, diff --git a/tests/common/mcp.py b/tests/common/mcp.py index f65f7c952..357ea4d41 100644 --- a/tests/common/mcp.py +++ b/tests/common/mcp.py @@ -222,16 +222,16 @@ def make_mcp_server(required_auth_token: str | None = None, tools: dict[str, Cal def run_server(): try: - logger.info(f"Starting MCP server on port {port}") + logger.debug(f"Starting MCP server on port {port}") server_instance.run() - logger.info(f"MCP server on port {port} has stopped") + logger.debug(f"MCP server on port {port} has stopped") except Exception as e: logger.error(f"MCP server failed to start on port {port}: {e}") raise # Start the server in a new thread server_thread = threading.Thread(target=run_server, daemon=True) - logger.info(f"Starting MCP server thread on port {port}") + logger.debug(f"Starting MCP server thread on port {port}") server_thread.start() # Polling until the server is ready @@ -239,13 +239,13 @@ def make_mcp_server(required_auth_token: str | None = None, tools: dict[str, Cal start_time = time.time() server_url = f"http://localhost:{port}/sse" - logger.info(f"Waiting for MCP server to be ready at {server_url}") + logger.debug(f"Waiting for MCP server to be ready at {server_url}") while time.time() - start_time < timeout: try: response = httpx.get(server_url) if response.status_code in [200, 401]: - logger.info(f"MCP server is ready on port {port} (status: {response.status_code})") + logger.debug(f"MCP server is ready on port {port} (status: {response.status_code})") break except httpx.RequestError as e: logger.debug(f"Server not ready yet, retrying... ({e})") @@ -261,14 +261,14 @@ def make_mcp_server(required_auth_token: str | None = None, tools: dict[str, Cal try: yield {"server_url": server_url} finally: - logger.info(f"Shutting down MCP server on port {port}") + logger.debug(f"Shutting down MCP server on port {port}") server_instance.should_exit = True time.sleep(0.5) # Force shutdown if still running if server_thread.is_alive(): try: - logger.info("Force shutting down server thread") + logger.debug("Force shutting down server thread") if hasattr(server_instance, "servers") and server_instance.servers: for srv in server_instance.servers: srv.close() diff --git a/tests/integration/inference/test_tools_with_schemas.py b/tests/integration/inference/test_tools_with_schemas.py new file mode 100644 index 000000000..b144a5196 --- /dev/null +++ b/tests/integration/inference/test_tools_with_schemas.py @@ -0,0 +1,369 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + +""" +Integration tests for inference/chat completion with JSON Schema-based tools. +Tests that tools pass through correctly to various LLM providers. +""" + +import json + +import pytest + +from llama_stack import LlamaStackAsLibraryClient +from llama_stack.models.llama.datatypes import ToolDefinition +from tests.common.mcp import make_mcp_server + +AUTH_TOKEN = "test-token" + + +class TestChatCompletionWithTools: + """Test chat completion with tools that have complex schemas.""" + + def test_simple_tool_call(self, llama_stack_client, text_model_id): + """Test basic tool calling with simple input schema.""" + tools = [ + { + "type": "function", + "function": { + "name": "get_weather", + "description": "Get weather for a location", + "parameters": { + "type": "object", + "properties": {"location": {"type": "string", "description": "City name"}}, + "required": ["location"], + }, + }, + } + ] + + response = llama_stack_client.chat.completions.create( + model=text_model_id, + messages=[{"role": "user", "content": "What's the weather in San Francisco?"}], + tools=tools, + ) + + assert response is not None + + def test_tool_with_complex_schema(self, llama_stack_client, text_model_id): + """Test tool calling with complex schema including $ref and $defs.""" + tools = [ + { + "type": "function", + "function": { + "name": "book_flight", + "description": "Book a flight", + "parameters": { + "type": "object", + "properties": { + "flight": {"$ref": "#/$defs/FlightInfo"}, + "passenger": {"$ref": "#/$defs/Passenger"}, + }, + "required": ["flight", "passenger"], + "$defs": { + "FlightInfo": { + "type": "object", + "properties": { + "from": {"type": "string"}, + "to": {"type": "string"}, + "date": {"type": "string", "format": "date"}, + }, + }, + "Passenger": { + "type": "object", + "properties": {"name": {"type": "string"}, "age": {"type": "integer"}}, + }, + }, + }, + }, + } + ] + + response = llama_stack_client.chat.completions.create( + model=text_model_id, + messages=[{"role": "user", "content": "Book a flight from SFO to JFK for John Doe"}], + tools=tools, + ) + + # The key test: No errors during schema processing + # The LLM received a valid, complete schema with $ref/$defs + assert response is not None + + +class TestOpenAICompatibility: + """Test OpenAI-compatible endpoints with new schema format.""" + + def test_openai_chat_completion_with_tools(self, compat_client, text_model_id): + """Test OpenAI-compatible chat completion with tools.""" + from openai import OpenAI + + if not isinstance(compat_client, OpenAI): + pytest.skip("OpenAI client required") + + tools = [ + { + "type": "function", + "function": { + "name": "get_weather", + "description": "Get weather information", + "parameters": { + "type": "object", + "properties": {"location": {"type": "string", "description": "City name"}}, + "required": ["location"], + }, + }, + } + ] + + response = compat_client.chat.completions.create( + model=text_model_id, messages=[{"role": "user", "content": "What's the weather in Tokyo?"}], tools=tools + ) + + assert response is not None + assert response.choices is not None + + def test_openai_format_preserves_complex_schemas(self, compat_client, text_model_id): + """Test that complex schemas work through OpenAI-compatible API.""" + from openai import OpenAI + + if not isinstance(compat_client, OpenAI): + pytest.skip("OpenAI client required") + + tools = [ + { + "type": "function", + "function": { + "name": "process_data", + "description": "Process structured data", + "parameters": { + "type": "object", + "properties": {"data": {"$ref": "#/$defs/DataObject"}}, + "$defs": { + "DataObject": { + "type": "object", + "properties": {"values": {"type": "array", "items": {"type": "number"}}}, + } + }, + }, + }, + } + ] + + response = compat_client.chat.completions.create( + model=text_model_id, messages=[{"role": "user", "content": "Process this data"}], tools=tools + ) + + assert response is not None + + +class TestMCPToolsInChatCompletion: + """Test using MCP tools in chat completion.""" + + @pytest.fixture + def mcp_with_schemas(self): + """MCP server for chat completion tests.""" + from mcp.server.fastmcp import Context + + async def calculate(x: float, y: float, operation: str, ctx: Context) -> float: + ops = {"add": x + y, "sub": x - y, "mul": x * y, "div": x / y if y != 0 else None} + return ops.get(operation, 0) + + with make_mcp_server(required_auth_token=AUTH_TOKEN, tools={"calculate": calculate}) as server: + yield server + + def test_mcp_tools_in_inference(self, llama_stack_client, text_model_id, mcp_with_schemas): + """Test that MCP tools can be used in inference.""" + if not isinstance(llama_stack_client, LlamaStackAsLibraryClient): + pytest.skip("Library client required for local MCP server") + + test_toolgroup_id = "mcp::calc" + uri = mcp_with_schemas["server_url"] + + try: + llama_stack_client.toolgroups.unregister(toolgroup_id=test_toolgroup_id) + except Exception: + pass + + llama_stack_client.toolgroups.register( + toolgroup_id=test_toolgroup_id, + provider_id="model-context-protocol", + mcp_endpoint=dict(uri=uri), + ) + + provider_data = {"mcp_headers": {uri: {"Authorization": f"Bearer {AUTH_TOKEN}"}}} + auth_headers = { + "X-LlamaStack-Provider-Data": json.dumps(provider_data), + } + + # Get the tools from MCP + tools_response = llama_stack_client.tool_runtime.list_tools( + tool_group_id=test_toolgroup_id, + extra_headers=auth_headers, + ) + + # Convert to OpenAI format for inference + tools = [] + for tool in tools_response: + tools.append( + { + "type": "function", + "function": { + "name": tool.name, + "description": tool.description, + "parameters": tool.input_schema or {}, + }, + } + ) + + # Use in chat completion + response = llama_stack_client.chat.completions.create( + model=text_model_id, + messages=[{"role": "user", "content": "Calculate 5 + 3"}], + tools=tools, + ) + + # Schema should have been passed through correctly + assert response is not None + + +class TestProviderSpecificBehavior: + """Test provider-specific handling of schemas.""" + + def test_openai_provider_drops_output_schema(self, llama_stack_client, text_model_id): + """Test that OpenAI provider doesn't send output_schema (API limitation).""" + # This is more of a documentation test + # OpenAI API doesn't support output schemas, so we drop them + + _tool = ToolDefinition( + tool_name="test", + input_schema={"type": "object", "properties": {"x": {"type": "string"}}}, + output_schema={"type": "object", "properties": {"y": {"type": "number"}}}, + ) + + # When this tool is sent to OpenAI provider, output_schema is dropped + # But input_schema is preserved + # This test documents the expected behavior + + # We can't easily test this without mocking, but the unit tests cover it + pass + + def test_gemini_array_support(self): + """Test that Gemini receives array schemas correctly (issue from commit 65f7b81e).""" + # This was the original bug that led to adding 'items' field + # Now with full JSON Schema pass-through, arrays should work + + tool = ToolDefinition( + tool_name="tag_processor", + input_schema={ + "type": "object", + "properties": {"tags": {"type": "array", "items": {"type": "string"}, "description": "List of tags"}}, + }, + ) + + # With new approach, the complete schema with items is preserved + assert tool.input_schema["properties"]["tags"]["type"] == "array" + assert tool.input_schema["properties"]["tags"]["items"]["type"] == "string" + + +class TestStreamingWithTools: + """Test streaming chat completion with tools.""" + + def test_streaming_tool_calls(self, llama_stack_client, text_model_id): + """Test that tool schemas work correctly in streaming mode.""" + tools = [ + { + "type": "function", + "function": { + "name": "get_time", + "description": "Get current time", + "parameters": {"type": "object", "properties": {"timezone": {"type": "string"}}}, + }, + } + ] + + response_stream = llama_stack_client.chat.completions.create( + model=text_model_id, + messages=[{"role": "user", "content": "What time is it in UTC?"}], + tools=tools, + stream=True, + ) + + # Should be able to iterate through stream + chunks = [] + for chunk in response_stream: + chunks.append(chunk) + + # Should have received some chunks + assert len(chunks) >= 0 + + +class TestEdgeCases: + """Test edge cases in inference with tools.""" + + def test_tool_without_schema(self, llama_stack_client, text_model_id): + """Test tool with no input_schema.""" + tools = [ + { + "type": "function", + "function": { + "name": "no_args_tool", + "description": "Tool with no arguments", + "parameters": {"type": "object", "properties": {}}, + }, + } + ] + + response = llama_stack_client.chat.completions.create( + model=text_model_id, + messages=[{"role": "user", "content": "Call the no args tool"}], + tools=tools, + ) + + assert response is not None + + def test_multiple_tools_with_different_schemas(self, llama_stack_client, text_model_id): + """Test multiple tools with different schema complexities.""" + tools = [ + { + "type": "function", + "function": { + "name": "simple", + "parameters": {"type": "object", "properties": {"x": {"type": "string"}}}, + }, + }, + { + "type": "function", + "function": { + "name": "complex", + "parameters": { + "type": "object", + "properties": {"data": {"$ref": "#/$defs/Complex"}}, + "$defs": { + "Complex": { + "type": "object", + "properties": {"nested": {"type": "array", "items": {"type": "number"}}}, + } + }, + }, + }, + }, + { + "type": "function", + "function": { + "name": "with_output", + "parameters": {"type": "object", "properties": {"input": {"type": "string"}}}, + }, + }, + ] + + response = llama_stack_client.chat.completions.create( + model=text_model_id, + messages=[{"role": "user", "content": "Use one of the available tools"}], + tools=tools, + ) + + # All tools should have been processed without errors + assert response is not None diff --git a/tests/integration/recordings/responses/00f70ca112de.json b/tests/integration/recordings/responses/00f70ca112de.json index 1036976c3..d6fb13295 100644 --- a/tests/integration/recordings/responses/00f70ca112de.json +++ b/tests/integration/recordings/responses/00f70ca112de.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-282", + "id": "chatcmpl-281", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759245124, + "created": 1759437798, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/0396786db779.json b/tests/integration/recordings/responses/0396786db779.json new file mode 100644 index 000000000..e2d40c100 --- /dev/null +++ b/tests/integration/recordings/responses/0396786db779.json @@ -0,0 +1,366 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant Always respond with tool calls no matter what. <|eot_id|><|start_header_id|>user<|end_header_id|>\n\nGet the boiling point of polyjuice with a tool call.<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0001, + "top_p": 0.9 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.228595Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "[", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.272966Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "get", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.315637Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_bo", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.356564Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "iling", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.397939Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_point", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.438829Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "(", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.479679Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "liquid", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.520682Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_name", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.56207Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "='", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.603054Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "poly", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.644749Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ju", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.685399Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ice", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.7267Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "',", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.77062Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " cel", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.813947Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ci", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.854591Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "us", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.896278Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "=True", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.937449Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ")]", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:57.979031Z", + "done": true, + "done_reason": "stop", + "total_duration": 944600833, + "load_duration": 83227667, + "prompt_eval_count": 369, + "prompt_eval_duration": 109699916, + "eval_count": 19, + "eval_duration": 751096500, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/044dcd8fdeb1.json b/tests/integration/recordings/responses/044dcd8fdeb1.json index 7e8b92202..b85900d6a 100644 --- a/tests/integration/recordings/responses/044dcd8fdeb1.json +++ b/tests/integration/recordings/responses/044dcd8fdeb1.json @@ -28,7 +28,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-122", + "id": "chatcmpl-130", "choices": [ { "delta": { @@ -43,7 +43,7 @@ "logprobs": null } ], - "created": 1759427013, + "created": 1759437810, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -54,7 +54,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-122", + "id": "chatcmpl-130", "choices": [ { "delta": { @@ -69,7 +69,7 @@ "logprobs": null } ], - "created": 1759427013, + "created": 1759437810, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -80,7 +80,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-122", + "id": "chatcmpl-130", "choices": [ { "delta": { @@ -95,7 +95,7 @@ "logprobs": null } ], - "created": 1759427013, + "created": 1759437810, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -106,7 +106,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-122", + "id": "chatcmpl-130", "choices": [ { "delta": { @@ -121,7 +121,7 @@ "logprobs": null } ], - "created": 1759427013, + "created": 1759437810, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -132,7 +132,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-122", + "id": "chatcmpl-130", "choices": [ { "delta": { @@ -147,7 +147,7 @@ "logprobs": null } ], - "created": 1759427013, + "created": 1759437810, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -158,11 +158,11 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-122", + "id": "chatcmpl-130", "choices": [ { "delta": { - "content": " us", + "content": " me", "function_call": null, "refusal": null, "role": "assistant", @@ -173,7 +173,7 @@ "logprobs": null } ], - "created": 1759427013, + "created": 1759437810, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -184,7 +184,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-122", + "id": "chatcmpl-130", "choices": [ { "delta": { @@ -199,7 +199,7 @@ "logprobs": null } ], - "created": 1759427013, + "created": 1759437810, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -210,7 +210,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-122", + "id": "chatcmpl-130", "choices": [ { "delta": { @@ -225,7 +225,7 @@ "logprobs": null } ], - "created": 1759427013, + "created": 1759437810, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -236,7 +236,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-122", + "id": "chatcmpl-130", "choices": [ { "delta": { @@ -251,7 +251,7 @@ "logprobs": null } ], - "created": 1759427013, + "created": 1759437810, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -262,7 +262,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-122", + "id": "chatcmpl-130", "choices": [ { "delta": { @@ -277,7 +277,7 @@ "logprobs": null } ], - "created": 1759427013, + "created": 1759437810, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -288,7 +288,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-122", + "id": "chatcmpl-130", "choices": [ { "delta": { @@ -303,7 +303,7 @@ "logprobs": null } ], - "created": 1759427013, + "created": 1759437810, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -314,7 +314,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-122", + "id": "chatcmpl-130", "choices": [ { "delta": { @@ -329,7 +329,7 @@ "logprobs": null } ], - "created": 1759427013, + "created": 1759437810, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -340,7 +340,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-122", + "id": "chatcmpl-130", "choices": [ { "delta": { @@ -355,7 +355,7 @@ "logprobs": null } ], - "created": 1759427013, + "created": 1759437810, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -366,11 +366,11 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-122", + "id": "chatcmpl-130", "choices": [ { "delta": { - "content": " we", + "content": " I", "function_call": null, "refusal": null, "role": "assistant", @@ -381,7 +381,7 @@ "logprobs": null } ], - "created": 1759427013, + "created": 1759437810, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -392,7 +392,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-122", + "id": "chatcmpl-130", "choices": [ { "delta": { @@ -407,7 +407,7 @@ "logprobs": null } ], - "created": 1759427013, + "created": 1759437810, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -418,7 +418,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-122", + "id": "chatcmpl-130", "choices": [ { "delta": { @@ -433,7 +433,7 @@ "logprobs": null } ], - "created": 1759427013, + "created": 1759437810, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -444,7 +444,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-122", + "id": "chatcmpl-130", "choices": [ { "delta": { @@ -459,7 +459,7 @@ "logprobs": null } ], - "created": 1759427013, + "created": 1759437810, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -470,7 +470,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-122", + "id": "chatcmpl-130", "choices": [ { "delta": { @@ -485,7 +485,7 @@ "logprobs": null } ], - "created": 1759427013, + "created": 1759437810, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -496,7 +496,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-122", + "id": "chatcmpl-130", "choices": [ { "delta": { @@ -511,7 +511,7 @@ "logprobs": null } ], - "created": 1759427013, + "created": 1759437811, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -522,7 +522,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-122", + "id": "chatcmpl-130", "choices": [ { "delta": { @@ -537,7 +537,7 @@ "logprobs": null } ], - "created": 1759427013, + "created": 1759437811, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, diff --git a/tests/integration/recordings/responses/04cb9de29e06.json b/tests/integration/recordings/responses/04cb9de29e06.json new file mode 100644 index 000000000..0fdc6f8b9 --- /dev/null +++ b/tests/integration/recordings/responses/04cb9de29e06.json @@ -0,0 +1,366 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant\nYou MUST use the tool `get_boiling_point` to answer the user query.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nWhat is the boiling point of the liquid polyjuice in celsius?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0001, + "top_p": 0.9 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:08.682181Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "[", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:08.728326Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "get", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:08.775162Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_bo", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:08.820267Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "iling", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:08.864362Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_point", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:08.906797Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "(", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:08.950158Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "liquid", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:08.992796Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_name", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:09.034691Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "='", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:09.07709Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "poly", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:09.119534Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ju", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:09.161661Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ice", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:09.204749Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "',", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:09.247334Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " cel", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:09.29011Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ci", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:09.331776Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "us", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:09.374076Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "=True", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:09.416672Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ")]", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:09.458519Z", + "done": true, + "done_reason": "stop", + "total_duration": 1437962792, + "load_duration": 129009042, + "prompt_eval_count": 379, + "prompt_eval_duration": 530416042, + "eval_count": 19, + "eval_duration": 777491375, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/05e3ebc68306.json b/tests/integration/recordings/responses/05e3ebc68306.json index b7d0a6e8e..665ea3012 100644 --- a/tests/integration/recordings/responses/05e3ebc68306.json +++ b/tests/integration/recordings/responses/05e3ebc68306.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-447", + "id": "chatcmpl-249", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759282456, + "created": 1759441157, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/08a21ab74e0a.json b/tests/integration/recordings/responses/08a21ab74e0a.json new file mode 100644 index 000000000..3645efabd --- /dev/null +++ b/tests/integration/recordings/responses/08a21ab74e0a.json @@ -0,0 +1,542 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant." + }, + { + "role": "user", + "content": "Say hi to the world. Use tools to do so." + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_qvp9u80l", + "type": "function", + "function": { + "name": "greet_everyone", + "arguments": "{\"url\":\"world\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_qvp9u80l", + "content": [ + { + "type": "text", + "text": "Hello, world!" + } + ] + } + ], + "max_tokens": 0, + "stream": true, + "tool_choice": "auto", + "tools": [ + { + "type": "function", + "function": { + "name": "greet_everyone", + "parameters": { + "properties": { + "url": { + "title": "Url", + "type": "string" + } + }, + "required": [ + "url" + ], + "title": "greet_everyoneArguments", + "type": "object" + } + } + }, + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "\n Returns the boiling point of a liquid in Celsius or Fahrenheit.\n\n :param liquid_name: The name of the liquid\n :param celsius: Whether to return the boiling point in Celsius\n :return: The boiling point of the liquid in Celcius or Fahrenheit\n ", + "parameters": { + "properties": { + "liquid_name": { + "title": "Liquid Name", + "type": "string" + }, + "celsius": { + "default": true, + "title": "Celsius", + "type": "boolean" + } + }, + "required": [ + "liquid_name" + ], + "title": "get_boiling_pointArguments", + "type": "object" + } + } + } + ] + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-714", + "choices": [ + { + "delta": { + "content": "<|python_tag|>", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437845, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-714", + "choices": [ + { + "delta": { + "content": "{\"", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437845, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-714", + "choices": [ + { + "delta": { + "content": "message", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437845, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-714", + "choices": [ + { + "delta": { + "content": "\":", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437845, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-714", + "choices": [ + { + "delta": { + "content": " \"", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437845, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-714", + "choices": [ + { + "delta": { + "content": "Hello", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437845, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-714", + "choices": [ + { + "delta": { + "content": ",", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437845, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-714", + "choices": [ + { + "delta": { + "content": " world", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437845, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-714", + "choices": [ + { + "delta": { + "content": "!\",", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437845, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-714", + "choices": [ + { + "delta": { + "content": " \"", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437846, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-714", + "choices": [ + { + "delta": { + "content": "type", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437846, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-714", + "choices": [ + { + "delta": { + "content": "\":", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437846, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-714", + "choices": [ + { + "delta": { + "content": " \"", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437846, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-714", + "choices": [ + { + "delta": { + "content": "hello", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437846, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-714", + "choices": [ + { + "delta": { + "content": "_world", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437846, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-714", + "choices": [ + { + "delta": { + "content": "\"}", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437846, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-714", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759437846, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/0989d0d62a86.json b/tests/integration/recordings/responses/0989d0d62a86.json new file mode 100644 index 000000000..0c2a321d9 --- /dev/null +++ b/tests/integration/recordings/responses/0989d0d62a86.json @@ -0,0 +1,138 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant." + }, + { + "role": "user", + "content": "Say hi to the world. Use tools to do so." + } + ], + "max_tokens": 0, + "stream": true, + "tool_choice": "auto", + "tools": [ + { + "type": "function", + "function": { + "name": "greet_everyone", + "parameters": { + "properties": { + "url": { + "title": "Url", + "type": "string" + } + }, + "required": [ + "url" + ], + "title": "greet_everyoneArguments", + "type": "object" + } + } + }, + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "\n Returns the boiling point of a liquid in Celsius or Fahrenheit.\n\n :param liquid_name: The name of the liquid\n :param celsius: Whether to return the boiling point in Celsius\n :return: The boiling point of the liquid in Celcius or Fahrenheit\n ", + "parameters": { + "properties": { + "liquid_name": { + "title": "Liquid Name", + "type": "string" + }, + "celsius": { + "default": true, + "title": "Celsius", + "type": "boolean" + } + }, + "required": [ + "liquid_name" + ], + "title": "get_boiling_pointArguments", + "type": "object" + } + } + } + ] + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-359", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": [ + { + "index": 0, + "id": "call_qvp9u80l", + "function": { + "arguments": "{\"url\":\"world\"}", + "name": "greet_everyone" + }, + "type": "function" + } + ] + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437845, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-359", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "tool_calls", + "index": 0, + "logprobs": null + } + ], + "created": 1759437845, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/0a29c4085705.json b/tests/integration/recordings/responses/0a29c4085705.json new file mode 100644 index 000000000..b4e8505d4 --- /dev/null +++ b/tests/integration/recordings/responses/0a29c4085705.json @@ -0,0 +1,124 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "What is the boiling point of the liquid polyjuice in celsius?" + } + ], + "max_tokens": 0, + "stream": true, + "temperature": 0.0001, + "tool_choice": { + "type": "function", + "function": { + "name": "get_boiling_point" + } + }, + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-865", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": [ + { + "index": 0, + "id": "call_tipirynt", + "function": { + "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}", + "name": "get_boiling_point" + }, + "type": "function" + } + ] + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429354, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-865", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "tool_calls", + "index": 0, + "logprobs": null + } + ], + "created": 1759429354, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/0e8f2b001dd9.json b/tests/integration/recordings/responses/0e8f2b001dd9.json index 6bcdfdfed..1067ed88e 100644 --- a/tests/integration/recordings/responses/0e8f2b001dd9.json +++ b/tests/integration/recordings/responses/0e8f2b001dd9.json @@ -20,14 +20,14 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-161", + "id": "chatcmpl-870", "choices": [ { "finish_reason": "stop", "index": 0, "logprobs": null, "message": { - "content": "The answer is Saturn.", + "content": "The planet Saturn has rings.", "refusal": null, "role": "assistant", "annotations": null, @@ -37,15 +37,15 @@ } } ], - "created": 1756921364, + "created": 1759437883, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion", "service_tier": null, "system_fingerprint": "fp_ollama", "usage": { - "completion_tokens": 6, + "completion_tokens": 7, "prompt_tokens": 39, - "total_tokens": 45, + "total_tokens": 46, "completion_tokens_details": null, "prompt_tokens_details": null } diff --git a/tests/integration/recordings/responses/0fad19b9d308.json b/tests/integration/recordings/responses/0fad19b9d308.json new file mode 100644 index 000000000..486fd0b8f --- /dev/null +++ b/tests/integration/recordings/responses/0fad19b9d308.json @@ -0,0 +1,93 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "user", + "content": "What time is it in UTC?" + } + ], + "stream": true, + "tools": [ + { + "type": "function", + "function": { + "name": "get_time", + "description": "Get current time", + "parameters": { + "type": "object", + "properties": { + "timezone": { + "type": "string" + } + } + } + } + } + ] + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-567", + "choices": [ + { + "delta": { + "content": "{\"name\":\"get_time\",\"parameters\\\":{\\\"timezone\\\":\\\"UTC\\\"}}", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437807, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-567", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759437807, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/178538be60e2.json b/tests/integration/recordings/responses/178538be60e2.json index 41cb76164..aaba1cbd2 100644 --- a/tests/integration/recordings/responses/178538be60e2.json +++ b/tests/integration/recordings/responses/178538be60e2.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-261", + "id": "chatcmpl-239", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759245125, + "created": 1759437799, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/1a4da7c94fde.json b/tests/integration/recordings/responses/1a4da7c94fde.json index ca24f20d2..0f5734bd9 100644 --- a/tests/integration/recordings/responses/1a4da7c94fde.json +++ b/tests/integration/recordings/responses/1a4da7c94fde.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-478", + "id": "chatcmpl-466", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759282396, + "created": 1759373692, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/1acd433c05d4.json b/tests/integration/recordings/responses/1acd433c05d4.json new file mode 100644 index 000000000..5ab638216 --- /dev/null +++ b/tests/integration/recordings/responses/1acd433c05d4.json @@ -0,0 +1,1787 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"book_flight\",\n \"description\": \"\n Book a flight with passenger and payment information.\n\n This tool uses JSON Schema $ref and $defs for type reuse.\n \",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"flight\", \"passengers\", \"payment\"],\n \"properties\": {\n \"flight\": {\n \"type\": \"object\",\n \"description\": \"\"\n },\n \"passengers\": {\n \"type\": \"array\",\n \"description\": \"\"\n },\n \"payment\": {\n \"type\": \"object\",\n \"description\": \"\"\n }\n }\n }\n },\n {\n \"name\": \"process_order\",\n \"description\": \"\n Process an order with nested address information.\n\n Uses nested objects and $ref.\n \",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"order_data\"],\n \"properties\": {\n \"order_data\": {\n \"type\": \"object\",\n \"description\": \"\"\n }\n }\n }\n },\n {\n \"name\": \"flexible_contact\",\n \"description\": \"\n Accept flexible contact (email or phone).\n\n Uses anyOf schema.\n \",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"contact_info\"],\n \"properties\": {\n \"contact_info\": {\n \"type\": \"string\",\n \"description\": \"\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant that can process orders and book flights.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nProcess an order with 2 widgets going to 123 Main St, San Francisco<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n[process_order(order_data={order_id=1, customer_name=\"John Doe\", address={street=\"123 Main St\", city=\"San Francisco\"}})]<|eot_id|><|start_header_id|>ipython<|end_header_id|>\n\n{\n \"order_id\": \"ORD789\",\n \"status\": \"processing\",\n \"data\": {\n \"order_id\": 1,\n \"customer_name\": \"John Doe\",\n \"address\": {\n \"street\": \"123 Main St\",\n \"city\": \"San Francisco\"\n }\n }\n}<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n[book_flight(flight={flight_number=\"AA101\", departure=\"New York\", arrival=\"Los Angeles\", passengers=[{name=\"John Doe\", email=\"johndoe@example.com\"}], payment={method=\"credit_card\", card_number=\"1234567890123456\"}})]<|eot_id|><|start_header_id|>ipython<|end_header_id|>\n\nError executing tool book_flight: 2 validation errors for book_flightArguments\npassengers\n Field required [type=missing, input_value={'session_id': '7ee11e0c-...': '1234567890123456'}}}, input_type=dict]\n For further information visit https://errors.pydantic.dev/2.11/v/missing\npayment\n Field required [type=missing, input_value={'session_id': '7ee11e0c-...': '1234567890123456'}}}, input_type=dict]\n For further information visit https://errors.pydantic.dev/2.11/v/missing<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:57.713027Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "[", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:57.75795Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "process", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:57.802534Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_order", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:57.847491Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "(order", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:57.893508Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_data", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:57.939651Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "={", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:57.984535Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "order", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:58.028599Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_id", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:58.073398Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "=", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:58.117854Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "1", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:58.161781Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ",", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:58.206772Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " customer", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:58.25349Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_name", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:58.298963Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "=\"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:58.344779Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "John", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:58.389936Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " Doe", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:58.437317Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\",", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:58.48249Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " address", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:58.529399Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "={", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:58.576296Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "street", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:58.620844Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "=\"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:58.66531Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "123", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:58.709756Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " Main", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:58.754076Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " St", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:58.797921Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\",", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:58.842653Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " city", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:58.887035Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "=\"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:58.930907Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "San", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:58.975Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " Francisco", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:59.019589Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\"}}", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:59.064177Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ")]\n", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:59.109025Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "{\n", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:59.153911Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " ", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:59.197854Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:59.244999Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "order", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:59.291864Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_id", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:59.337792Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\":", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:59.382092Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:59.426921Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ORD", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:59.471944Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "789", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:59.516816Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\",\n", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:59.560907Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " ", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:59.604707Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:59.649026Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "status", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:59.693453Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\":", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:59.738699Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:59.783077Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "processing", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:59.82803Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\",\n", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:59.873239Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " ", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:59.918932Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:58:59.964192Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "data", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:00.009316Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\":", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:00.055147Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " {\n", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:00.100799Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " ", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:00.146772Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:00.193478Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "order", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:00.240171Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_id", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:00.287971Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\":", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:00.333459Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " ", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:00.37832Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "1", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:00.423158Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ",\n", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:00.468091Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " ", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:00.51265Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:00.557925Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "customer", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:00.60244Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_name", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:00.647203Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\":", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:00.692055Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:00.737131Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "John", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:00.781687Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " Doe", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:00.828788Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\",\n", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:00.874402Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " ", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:00.922888Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:00.976299Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "address", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:01.024037Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\":", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:01.071372Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " {\n", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:01.11661Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " ", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:01.161193Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:01.205589Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "street", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:01.252464Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\":", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:01.298844Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:01.34424Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "123", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:01.388967Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " Main", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:01.433822Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " St", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:01.478032Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\",\n", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:01.523181Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " ", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:01.567586Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:01.611862Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "city", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:01.655861Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\":", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:01.699861Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:01.74517Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "San", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:01.789381Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " Francisco", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:01.833655Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\"\n", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:01.878329Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " ", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:01.923823Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " }\n", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:01.968755Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " ", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:02.012573Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " }\n", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:02.056287Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "}", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T22:59:02.100074Z", + "done": true, + "done_reason": "stop", + "total_duration": 4820442250, + "load_duration": 79949333, + "prompt_eval_count": 866, + "prompt_eval_duration": 352139708, + "eval_count": 98, + "eval_duration": 4387637875, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/1b939935d483.json b/tests/integration/recordings/responses/1b939935d483.json new file mode 100644 index 000000000..1eed51400 --- /dev/null +++ b/tests/integration/recordings/responses/1b939935d483.json @@ -0,0 +1,258 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant\nYou MUST use one of the provided functions/tools to answer the user query.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nWhat is the boiling point of the liquid polyjuice in celsius?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n[get_boiling_point(liquid_name=\"polyjuice\", celcius=True)]<|eot_id|><|start_header_id|>ipython<|end_header_id|>\n\n-100<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0001, + "top_p": 0.9 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:01.957108Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "The", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:01.998746Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " boiling", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:02.040281Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " point", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:02.081567Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " of", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:02.122945Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " poly", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:02.16406Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ju", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:02.205051Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ice", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:02.246393Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " is", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:02.288195Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " -", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:02.331557Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "100", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:02.373397Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\u00b0C", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:02.414856Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ".", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:02.456059Z", + "done": true, + "done_reason": "stop", + "total_duration": 669686292, + "load_duration": 96788459, + "prompt_eval_count": 408, + "prompt_eval_duration": 72865250, + "eval_count": 13, + "eval_duration": 499470042, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/21cf30c6181e.json b/tests/integration/recordings/responses/21cf30c6181e.json new file mode 100644 index 000000000..e982edb47 --- /dev/null +++ b/tests/integration/recordings/responses/21cf30c6181e.json @@ -0,0 +1,119 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant Always respond with tool calls no matter what. " + }, + { + "role": "user", + "content": "Get the boiling point of polyjuice with a tool call." + } + ], + "max_tokens": 0, + "stream": true, + "temperature": 0.0001, + "tool_choice": "auto", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "str", + "description": "The name of the liquid" + }, + "celcius": { + "type": "bool", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-922", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": [ + { + "index": 0, + "id": "call_34cofb9p", + "function": { + "arguments": "{\"celcius\":\"true\",\"liquid_name\":\"polyjuice\"}", + "name": "get_boiling_point" + }, + "type": "function" + } + ] + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759425219, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-922", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "tool_calls", + "index": 0, + "logprobs": null + } + ], + "created": 1759425219, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/239f4768f5aa.json b/tests/integration/recordings/responses/239f4768f5aa.json index ce540db3f..38f483090 100644 --- a/tests/integration/recordings/responses/239f4768f5aa.json +++ b/tests/integration/recordings/responses/239f4768f5aa.json @@ -53,14 +53,14 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-433", + "id": "chatcmpl-497", "choices": [ { "finish_reason": "stop", "index": 0, "logprobs": null, "message": { - "content": "{\"first_name\": \"Michael\", \"last_name\": \"Jordan\", \"year_of_birth\": 1963}\n\n \t\t\t\t\t\t\t\t\t\t\t \t\t ", + "content": "{\"first_name\": \"Michael\", \"last_name\": \"Jordan\", \"year_of_birth\": 1963}", "refusal": null, "role": "assistant", "annotations": null, @@ -70,15 +70,15 @@ } } ], - "created": 1758979490, + "created": 1759376618, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion", "service_tier": null, "system_fingerprint": "fp_ollama", "usage": { - "completion_tokens": 31, + "completion_tokens": 26, "prompt_tokens": 60, - "total_tokens": 91, + "total_tokens": 86, "completion_tokens_details": null, "prompt_tokens_details": null } diff --git a/tests/integration/recordings/responses/23ad3b9e003e.json b/tests/integration/recordings/responses/23ad3b9e003e.json new file mode 100644 index 000000000..50c46c5b1 --- /dev/null +++ b/tests/integration/recordings/responses/23ad3b9e003e.json @@ -0,0 +1,57 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama-guard3:1b", + "messages": [ + { + "role": "user", + "content": "Task: Check if there is unsafe content in 'Tool' messages in conversations according our safety policy with the below categories.\n\n\nS1: Violent Crimes.\nS2: Non-Violent Crimes.\nS3: Sex Crimes.\nS4: Child Exploitation.\nS5: Defamation.\nS6: Specialized Advice.\nS7: Privacy.\nS8: Intellectual Property.\nS9: Indiscriminate Weapons.\nS10: Hate.\nS11: Self-Harm.\nS12: Sexual Content.\nS13: Elections.\nS14: Code Interpreter Abuse.\n\n\n\n\nUser: What is the boiling point of the liquid polyjuice in celsius?\n\nAssistant: \n\nTool: Error when running tool: 'ToolCall' object has no attribute 'arguments_json'\n\n\n\nProvide your safety assessment for ONLY THE LAST Tool message in the above conversation:\n - First line must read 'safe' or 'unsafe'.\n - If unsafe, a second line must include a comma-separated list of violated categories." + } + ], + "stream": false, + "temperature": 0.0 + }, + "endpoint": "/v1/chat/completions", + "model": "llama-guard3:1b" + }, + "response": { + "body": { + "__type__": "openai.types.chat.chat_completion.ChatCompletion", + "__data__": { + "id": "chatcmpl-651", + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "safe", + "refusal": null, + "role": "assistant", + "annotations": null, + "audio": null, + "function_call": null, + "tool_calls": null + } + } + ], + "created": 1759437831, + "model": "llama-guard3:1b", + "object": "chat.completion", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": { + "completion_tokens": 2, + "prompt_tokens": 420, + "total_tokens": 422, + "completion_tokens_details": null, + "prompt_tokens_details": null + } + } + }, + "is_streaming": false + } +} diff --git a/tests/integration/recordings/responses/2717f0003e0a.json b/tests/integration/recordings/responses/2717f0003e0a.json index 69d5d7c64..56a9333c6 100644 --- a/tests/integration/recordings/responses/2717f0003e0a.json +++ b/tests/integration/recordings/responses/2717f0003e0a.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-588", + "id": "chatcmpl-531", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759245128, + "created": 1759437800, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/278d5568fa92.json b/tests/integration/recordings/responses/278d5568fa92.json new file mode 100644 index 000000000..85866aefa --- /dev/null +++ b/tests/integration/recordings/responses/278d5568fa92.json @@ -0,0 +1,388 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "Call get_boiling_point tool and answer What is the boiling point of polyjuice?" + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_d1i5ou69", + "type": "function", + "function": { + "name": "get_boiling_point", + "arguments": "{\"celcius\":null,\"liquid_name\":\"polyjuice\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_d1i5ou69", + "content": "-212" + } + ], + "max_tokens": 512, + "stream": true, + "temperature": 0.0001, + "tool_choice": "auto", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-704", + "choices": [ + { + "delta": { + "content": "The", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441676, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-704", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441676, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-704", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441676, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-704", + "choices": [ + { + "delta": { + "content": " of", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441676, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-704", + "choices": [ + { + "delta": { + "content": " poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441676, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-704", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441676, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-704", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441676, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-704", + "choices": [ + { + "delta": { + "content": " is", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441676, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-704", + "choices": [ + { + "delta": { + "content": " -", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441676, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-704", + "choices": [ + { + "delta": { + "content": "212", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441676, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-704", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441676, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-704", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759441676, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/2d187a11704c.json b/tests/integration/recordings/responses/2d187a11704c.json index ecce0ec80..0c12271fd 100644 --- a/tests/integration/recordings/responses/2d187a11704c.json +++ b/tests/integration/recordings/responses/2d187a11704c.json @@ -22,7 +22,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:11.444139198Z", + "created_at": "2025-10-02T02:55:03.175181Z", "done": false, "done_reason": null, "total_duration": null, @@ -40,7 +40,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:11.631417419Z", + "created_at": "2025-10-02T02:55:03.21666Z", "done": false, "done_reason": null, "total_duration": null, @@ -58,7 +58,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:11.837785952Z", + "created_at": "2025-10-02T02:55:03.258841Z", "done": false, "done_reason": null, "total_duration": null, @@ -76,7 +76,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:12.035361735Z", + "created_at": "2025-10-02T02:55:03.299188Z", "done": false, "done_reason": null, "total_duration": null, @@ -94,7 +94,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:12.231459021Z", + "created_at": "2025-10-02T02:55:03.339415Z", "done": false, "done_reason": null, "total_duration": null, @@ -112,7 +112,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:12.437587336Z", + "created_at": "2025-10-02T02:55:03.379794Z", "done": false, "done_reason": null, "total_duration": null, @@ -130,7 +130,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:12.645814233Z", + "created_at": "2025-10-02T02:55:03.420354Z", "done": false, "done_reason": null, "total_duration": null, @@ -148,7 +148,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:12.857399802Z", + "created_at": "2025-10-02T02:55:03.460933Z", "done": false, "done_reason": null, "total_duration": null, @@ -166,7 +166,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:13.069748955Z", + "created_at": "2025-10-02T02:55:03.501777Z", "done": false, "done_reason": null, "total_duration": null, @@ -184,7 +184,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:13.275446646Z", + "created_at": "2025-10-02T02:55:03.542402Z", "done": false, "done_reason": null, "total_duration": null, @@ -202,7 +202,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:13.472121232Z", + "created_at": "2025-10-02T02:55:03.582816Z", "done": false, "done_reason": null, "total_duration": null, @@ -220,7 +220,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:13.665744046Z", + "created_at": "2025-10-02T02:55:03.623108Z", "done": false, "done_reason": null, "total_duration": null, @@ -238,7 +238,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:13.861581737Z", + "created_at": "2025-10-02T02:55:03.663532Z", "done": false, "done_reason": null, "total_duration": null, @@ -256,7 +256,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:14.057543582Z", + "created_at": "2025-10-02T02:55:03.704651Z", "done": false, "done_reason": null, "total_duration": null, @@ -274,7 +274,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:14.250235864Z", + "created_at": "2025-10-02T02:55:03.746321Z", "done": false, "done_reason": null, "total_duration": null, @@ -292,7 +292,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:14.440950519Z", + "created_at": "2025-10-02T02:55:03.787213Z", "done": false, "done_reason": null, "total_duration": null, @@ -310,7 +310,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:14.633159237Z", + "created_at": "2025-10-02T02:55:03.829153Z", "done": false, "done_reason": null, "total_duration": null, @@ -328,7 +328,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:14.824645544Z", + "created_at": "2025-10-02T02:55:03.869545Z", "done": false, "done_reason": null, "total_duration": null, @@ -346,7 +346,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:15.015421713Z", + "created_at": "2025-10-02T02:55:03.909839Z", "done": false, "done_reason": null, "total_duration": null, @@ -364,7 +364,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:15.21010827Z", + "created_at": "2025-10-02T02:55:03.950296Z", "done": false, "done_reason": null, "total_duration": null, @@ -382,7 +382,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:15.406911964Z", + "created_at": "2025-10-02T02:55:03.990725Z", "done": false, "done_reason": null, "total_duration": null, @@ -400,7 +400,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:15.599086606Z", + "created_at": "2025-10-02T02:55:04.031037Z", "done": false, "done_reason": null, "total_duration": null, @@ -418,7 +418,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:15.789596143Z", + "created_at": "2025-10-02T02:55:04.071398Z", "done": false, "done_reason": null, "total_duration": null, @@ -436,7 +436,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:15.981551476Z", + "created_at": "2025-10-02T02:55:04.111908Z", "done": false, "done_reason": null, "total_duration": null, @@ -454,7 +454,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:16.170823008Z", + "created_at": "2025-10-02T02:55:04.153461Z", "done": false, "done_reason": null, "total_duration": null, @@ -472,7 +472,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:16.361099362Z", + "created_at": "2025-10-02T02:55:04.195941Z", "done": false, "done_reason": null, "total_duration": null, @@ -490,7 +490,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:16.554187248Z", + "created_at": "2025-10-02T02:55:04.236433Z", "done": false, "done_reason": null, "total_duration": null, @@ -508,7 +508,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:16.746364193Z", + "created_at": "2025-10-02T02:55:04.27718Z", "done": false, "done_reason": null, "total_duration": null, @@ -526,7 +526,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:16.937784556Z", + "created_at": "2025-10-02T02:55:04.317743Z", "done": false, "done_reason": null, "total_duration": null, @@ -544,7 +544,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:17.130739694Z", + "created_at": "2025-10-02T02:55:04.358602Z", "done": false, "done_reason": null, "total_duration": null, @@ -562,7 +562,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:17.324485154Z", + "created_at": "2025-10-02T02:55:04.399212Z", "done": false, "done_reason": null, "total_duration": null, @@ -580,7 +580,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:17.513221988Z", + "created_at": "2025-10-02T02:55:04.439733Z", "done": false, "done_reason": null, "total_duration": null, @@ -598,7 +598,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:17.704588587Z", + "created_at": "2025-10-02T02:55:04.480639Z", "done": false, "done_reason": null, "total_duration": null, @@ -616,7 +616,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:17.89491876Z", + "created_at": "2025-10-02T02:55:04.521251Z", "done": false, "done_reason": null, "total_duration": null, @@ -634,7 +634,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:18.085415685Z", + "created_at": "2025-10-02T02:55:04.56195Z", "done": false, "done_reason": null, "total_duration": null, @@ -652,7 +652,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:18.291123534Z", + "created_at": "2025-10-02T02:55:04.60257Z", "done": false, "done_reason": null, "total_duration": null, @@ -670,7 +670,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:18.481091772Z", + "created_at": "2025-10-02T02:55:04.643071Z", "done": false, "done_reason": null, "total_duration": null, @@ -688,7 +688,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:18.669330853Z", + "created_at": "2025-10-02T02:55:04.684195Z", "done": false, "done_reason": null, "total_duration": null, @@ -706,7 +706,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:18.862203802Z", + "created_at": "2025-10-02T02:55:04.725008Z", "done": false, "done_reason": null, "total_duration": null, @@ -724,7 +724,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:19.050586441Z", + "created_at": "2025-10-02T02:55:04.766299Z", "done": false, "done_reason": null, "total_duration": null, @@ -742,7 +742,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:19.243400941Z", + "created_at": "2025-10-02T02:55:04.807076Z", "done": false, "done_reason": null, "total_duration": null, @@ -760,7 +760,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:19.438492404Z", + "created_at": "2025-10-02T02:55:04.848963Z", "done": false, "done_reason": null, "total_duration": null, @@ -778,7 +778,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:19.625091169Z", + "created_at": "2025-10-02T02:55:04.889928Z", "done": false, "done_reason": null, "total_duration": null, @@ -796,7 +796,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:19.817882725Z", + "created_at": "2025-10-02T02:55:04.934326Z", "done": false, "done_reason": null, "total_duration": null, @@ -814,7 +814,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:20.006228518Z", + "created_at": "2025-10-02T02:55:04.977276Z", "done": false, "done_reason": null, "total_duration": null, @@ -832,7 +832,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:20.195451511Z", + "created_at": "2025-10-02T02:55:05.020601Z", "done": false, "done_reason": null, "total_duration": null, @@ -850,7 +850,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:20.38583856Z", + "created_at": "2025-10-02T02:55:05.063018Z", "done": false, "done_reason": null, "total_duration": null, @@ -868,7 +868,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:20.574736342Z", + "created_at": "2025-10-02T02:55:05.104224Z", "done": false, "done_reason": null, "total_duration": null, @@ -886,7 +886,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:20.770260046Z", + "created_at": "2025-10-02T02:55:05.144777Z", "done": false, "done_reason": null, "total_duration": null, @@ -904,7 +904,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:20.961391185Z", + "created_at": "2025-10-02T02:55:05.184974Z", "done": false, "done_reason": null, "total_duration": null, @@ -922,7 +922,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:21.15136915Z", + "created_at": "2025-10-02T02:55:05.225424Z", "done": false, "done_reason": null, "total_duration": null, @@ -940,7 +940,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:21.34012064Z", + "created_at": "2025-10-02T02:55:05.2659Z", "done": false, "done_reason": null, "total_duration": null, @@ -958,7 +958,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:21.530394237Z", + "created_at": "2025-10-02T02:55:05.306482Z", "done": false, "done_reason": null, "total_duration": null, @@ -976,7 +976,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:21.721043618Z", + "created_at": "2025-10-02T02:55:05.346838Z", "done": false, "done_reason": null, "total_duration": null, @@ -994,7 +994,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:21.911611623Z", + "created_at": "2025-10-02T02:55:05.387059Z", "done": false, "done_reason": null, "total_duration": null, @@ -1012,7 +1012,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:22.100940877Z", + "created_at": "2025-10-02T02:55:05.427541Z", "done": false, "done_reason": null, "total_duration": null, @@ -1030,7 +1030,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:22.289910353Z", + "created_at": "2025-10-02T02:55:05.467788Z", "done": false, "done_reason": null, "total_duration": null, @@ -1048,7 +1048,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:22.476827205Z", + "created_at": "2025-10-02T02:55:05.508102Z", "done": false, "done_reason": null, "total_duration": null, @@ -1066,7 +1066,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:22.663529325Z", + "created_at": "2025-10-02T02:55:05.548521Z", "done": false, "done_reason": null, "total_duration": null, @@ -1084,7 +1084,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:22.851128482Z", + "created_at": "2025-10-02T02:55:05.588742Z", "done": false, "done_reason": null, "total_duration": null, @@ -1102,7 +1102,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:23.042424694Z", + "created_at": "2025-10-02T02:55:05.629266Z", "done": false, "done_reason": null, "total_duration": null, @@ -1120,7 +1120,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:23.234415016Z", + "created_at": "2025-10-02T02:55:05.674214Z", "done": false, "done_reason": null, "total_duration": null, @@ -1138,7 +1138,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:23.422767727Z", + "created_at": "2025-10-02T02:55:05.71804Z", "done": false, "done_reason": null, "total_duration": null, @@ -1156,7 +1156,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:23.611953916Z", + "created_at": "2025-10-02T02:55:05.761666Z", "done": false, "done_reason": null, "total_duration": null, @@ -1174,7 +1174,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:23.802138602Z", + "created_at": "2025-10-02T02:55:05.80432Z", "done": false, "done_reason": null, "total_duration": null, @@ -1192,7 +1192,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:23.993446989Z", + "created_at": "2025-10-02T02:55:05.846217Z", "done": false, "done_reason": null, "total_duration": null, @@ -1210,7 +1210,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:24.186705934Z", + "created_at": "2025-10-02T02:55:05.88931Z", "done": false, "done_reason": null, "total_duration": null, @@ -1228,7 +1228,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:24.39236955Z", + "created_at": "2025-10-02T02:55:05.93282Z", "done": false, "done_reason": null, "total_duration": null, @@ -1246,7 +1246,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:24.579916625Z", + "created_at": "2025-10-02T02:55:05.976513Z", "done": false, "done_reason": null, "total_duration": null, @@ -1264,7 +1264,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:24.768821839Z", + "created_at": "2025-10-02T02:55:06.020886Z", "done": false, "done_reason": null, "total_duration": null, @@ -1282,7 +1282,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:24.957792215Z", + "created_at": "2025-10-02T02:55:06.063597Z", "done": false, "done_reason": null, "total_duration": null, @@ -1300,7 +1300,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:25.147895529Z", + "created_at": "2025-10-02T02:55:06.106054Z", "done": false, "done_reason": null, "total_duration": null, @@ -1318,7 +1318,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:25.337348777Z", + "created_at": "2025-10-02T02:55:06.148232Z", "done": false, "done_reason": null, "total_duration": null, @@ -1336,7 +1336,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:25.528043056Z", + "created_at": "2025-10-02T02:55:06.190334Z", "done": false, "done_reason": null, "total_duration": null, @@ -1354,7 +1354,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:25.720598024Z", + "created_at": "2025-10-02T02:55:06.231933Z", "done": false, "done_reason": null, "total_duration": null, @@ -1372,7 +1372,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:25.908813849Z", + "created_at": "2025-10-02T02:55:06.27373Z", "done": false, "done_reason": null, "total_duration": null, @@ -1390,7 +1390,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:26.102538985Z", + "created_at": "2025-10-02T02:55:06.315435Z", "done": false, "done_reason": null, "total_duration": null, @@ -1408,7 +1408,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:26.296587284Z", + "created_at": "2025-10-02T02:55:06.35848Z", "done": false, "done_reason": null, "total_duration": null, @@ -1426,7 +1426,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:26.48997969Z", + "created_at": "2025-10-02T02:55:06.400959Z", "done": false, "done_reason": null, "total_duration": null, @@ -1444,7 +1444,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:26.68461717Z", + "created_at": "2025-10-02T02:55:06.441214Z", "done": false, "done_reason": null, "total_duration": null, @@ -1462,7 +1462,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:26.877976002Z", + "created_at": "2025-10-02T02:55:06.481409Z", "done": false, "done_reason": null, "total_duration": null, @@ -1480,7 +1480,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:27.071304424Z", + "created_at": "2025-10-02T02:55:06.522518Z", "done": false, "done_reason": null, "total_duration": null, @@ -1498,7 +1498,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:27.267083009Z", + "created_at": "2025-10-02T02:55:06.564666Z", "done": false, "done_reason": null, "total_duration": null, @@ -1516,7 +1516,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:27.458752902Z", + "created_at": "2025-10-02T02:55:06.605895Z", "done": false, "done_reason": null, "total_duration": null, @@ -1534,7 +1534,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:27.651757232Z", + "created_at": "2025-10-02T02:55:06.646978Z", "done": false, "done_reason": null, "total_duration": null, @@ -1552,7 +1552,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:27.84093711Z", + "created_at": "2025-10-02T02:55:06.68904Z", "done": false, "done_reason": null, "total_duration": null, @@ -1570,7 +1570,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:28.031166547Z", + "created_at": "2025-10-02T02:55:06.730173Z", "done": false, "done_reason": null, "total_duration": null, @@ -1588,7 +1588,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:28.222014814Z", + "created_at": "2025-10-02T02:55:06.772861Z", "done": false, "done_reason": null, "total_duration": null, @@ -1606,7 +1606,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:28.412024854Z", + "created_at": "2025-10-02T02:55:06.816599Z", "done": false, "done_reason": null, "total_duration": null, @@ -1624,7 +1624,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:28.603242201Z", + "created_at": "2025-10-02T02:55:06.859503Z", "done": false, "done_reason": null, "total_duration": null, @@ -1642,7 +1642,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:28.793015428Z", + "created_at": "2025-10-02T02:55:06.901146Z", "done": false, "done_reason": null, "total_duration": null, @@ -1660,7 +1660,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:28.98105341Z", + "created_at": "2025-10-02T02:55:06.943698Z", "done": false, "done_reason": null, "total_duration": null, @@ -1678,7 +1678,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:29.171562052Z", + "created_at": "2025-10-02T02:55:06.985619Z", "done": false, "done_reason": null, "total_duration": null, @@ -1696,7 +1696,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:29.359960218Z", + "created_at": "2025-10-02T02:55:07.027092Z", "done": false, "done_reason": null, "total_duration": null, @@ -1714,7 +1714,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:29.547663965Z", + "created_at": "2025-10-02T02:55:07.068654Z", "done": false, "done_reason": null, "total_duration": null, @@ -1732,7 +1732,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:29.737967784Z", + "created_at": "2025-10-02T02:55:07.109785Z", "done": false, "done_reason": null, "total_duration": null, @@ -1750,7 +1750,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:29.926196503Z", + "created_at": "2025-10-02T02:55:07.151491Z", "done": false, "done_reason": null, "total_duration": null, @@ -1768,7 +1768,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:30.117904197Z", + "created_at": "2025-10-02T02:55:07.192762Z", "done": false, "done_reason": null, "total_duration": null, @@ -1786,7 +1786,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:30.309146475Z", + "created_at": "2025-10-02T02:55:07.2337Z", "done": false, "done_reason": null, "total_duration": null, @@ -1804,15 +1804,15 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:35:30.497677975Z", + "created_at": "2025-10-02T02:55:07.276074Z", "done": true, "done_reason": "stop", - "total_duration": 21228194411, - "load_duration": 46730034, + "total_duration": 4260353875, + "load_duration": 95584041, "prompt_eval_count": 36, - "prompt_eval_duration": 2125755306, + "prompt_eval_duration": 62641958, "eval_count": 100, - "eval_duration": 19055134812, + "eval_duration": 4101499250, "response": "", "thinking": null, "context": null diff --git a/tests/integration/recordings/responses/325a72db5755.json b/tests/integration/recordings/responses/325a72db5755.json index ca3eea2f3..1341efc51 100644 --- a/tests/integration/recordings/responses/325a72db5755.json +++ b/tests/integration/recordings/responses/325a72db5755.json @@ -21,7 +21,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-923", + "id": "chatcmpl-735", "choices": [ { "delta": { @@ -36,7 +36,7 @@ "logprobs": null } ], - "created": 1756921364, + "created": 1759437883, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -47,7 +47,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-923", + "id": "chatcmpl-735", "choices": [ { "delta": { @@ -62,7 +62,7 @@ "logprobs": null } ], - "created": 1756921364, + "created": 1759437883, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -73,7 +73,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-923", + "id": "chatcmpl-735", "choices": [ { "delta": { @@ -88,7 +88,7 @@ "logprobs": null } ], - "created": 1756921364, + "created": 1759437883, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -99,7 +99,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-923", + "id": "chatcmpl-735", "choices": [ { "delta": { @@ -114,7 +114,7 @@ "logprobs": null } ], - "created": 1756921364, + "created": 1759437883, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -125,7 +125,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-923", + "id": "chatcmpl-735", "choices": [ { "delta": { @@ -140,7 +140,7 @@ "logprobs": null } ], - "created": 1756921364, + "created": 1759437883, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -151,7 +151,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-923", + "id": "chatcmpl-735", "choices": [ { "delta": { @@ -166,7 +166,7 @@ "logprobs": null } ], - "created": 1756921364, + "created": 1759437883, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -177,7 +177,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-923", + "id": "chatcmpl-735", "choices": [ { "delta": { @@ -192,7 +192,7 @@ "logprobs": null } ], - "created": 1756921364, + "created": 1759437883, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -203,7 +203,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-923", + "id": "chatcmpl-735", "choices": [ { "delta": { @@ -218,7 +218,7 @@ "logprobs": null } ], - "created": 1756921364, + "created": 1759437883, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -229,7 +229,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-923", + "id": "chatcmpl-735", "choices": [ { "delta": { @@ -244,7 +244,7 @@ "logprobs": null } ], - "created": 1756921364, + "created": 1759437883, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -255,7 +255,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-923", + "id": "chatcmpl-735", "choices": [ { "delta": { @@ -270,7 +270,7 @@ "logprobs": null } ], - "created": 1756921364, + "created": 1759437883, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -281,7 +281,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-923", + "id": "chatcmpl-735", "choices": [ { "delta": { @@ -296,7 +296,7 @@ "logprobs": null } ], - "created": 1756921364, + "created": 1759437883, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -307,7 +307,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-923", + "id": "chatcmpl-735", "choices": [ { "delta": { @@ -322,7 +322,7 @@ "logprobs": null } ], - "created": 1756921364, + "created": 1759437883, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -333,7 +333,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-923", + "id": "chatcmpl-735", "choices": [ { "delta": { @@ -348,7 +348,7 @@ "logprobs": null } ], - "created": 1756921364, + "created": 1759437883, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -359,7 +359,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-923", + "id": "chatcmpl-735", "choices": [ { "delta": { @@ -374,7 +374,7 @@ "logprobs": null } ], - "created": 1756921364, + "created": 1759437883, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -385,7 +385,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-923", + "id": "chatcmpl-735", "choices": [ { "delta": { @@ -400,7 +400,7 @@ "logprobs": null } ], - "created": 1756921364, + "created": 1759437883, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -411,7 +411,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-923", + "id": "chatcmpl-735", "choices": [ { "delta": { @@ -426,7 +426,7 @@ "logprobs": null } ], - "created": 1756921365, + "created": 1759437883, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -437,7 +437,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-923", + "id": "chatcmpl-735", "choices": [ { "delta": { @@ -452,7 +452,7 @@ "logprobs": null } ], - "created": 1756921365, + "created": 1759437883, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -463,7 +463,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-923", + "id": "chatcmpl-735", "choices": [ { "delta": { @@ -478,7 +478,7 @@ "logprobs": null } ], - "created": 1756921365, + "created": 1759437883, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -489,7 +489,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-923", + "id": "chatcmpl-735", "choices": [ { "delta": { @@ -504,7 +504,7 @@ "logprobs": null } ], - "created": 1756921365, + "created": 1759437884, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -515,683 +515,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": " It", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921365, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": "'s", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921365, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": " a", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921365, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": " federally", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921365, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": " owned", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921365, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": " district", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921365, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": " that", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921365, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": " serves", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921365, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": " as", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921365, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921365, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": " seat", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921365, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": " of", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921365, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921365, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": " federal", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921365, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": " government", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921365, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921365, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": " housing", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921365, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": " many", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921365, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": " national", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921365, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": " landmarks", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921365, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921365, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": " institutions", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921366, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921366, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": " and", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921366, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": " offices", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921366, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921366, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-923", + "id": "chatcmpl-735", "choices": [ { "delta": { @@ -1206,7 +530,7 @@ "logprobs": null } ], - "created": 1756921366, + "created": 1759437884, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, diff --git a/tests/integration/recordings/responses/3387f56ccac9.json b/tests/integration/recordings/responses/3387f56ccac9.json index 9b8ba7d4e..14891a91b 100644 --- a/tests/integration/recordings/responses/3387f56ccac9.json +++ b/tests/integration/recordings/responses/3387f56ccac9.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-200", + "id": "chatcmpl-141", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759368386, + "created": 1759441670, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/35a5f1de4bd7.json b/tests/integration/recordings/responses/35a5f1de4bd7.json new file mode 100644 index 000000000..960cb2d4e --- /dev/null +++ b/tests/integration/recordings/responses/35a5f1de4bd7.json @@ -0,0 +1,809 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "What is the boiling point of the liquid polyjuice in celsius?" + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_tipirynt", + "type": "function", + "function": { + "name": "get_boiling_point", + "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_tipirynt", + "content": "Error when running tool: 'ToolCall' object has no attribute 'arguments_json'" + } + ], + "max_tokens": 0, + "stream": true, + "temperature": 0.0001, + "tool_choice": { + "type": "function", + "function": { + "name": "get_boiling_point" + } + }, + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": "I", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429355, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": " was", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429355, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": " unable", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429355, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": " to", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429355, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": " find", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429355, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": " the", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429355, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429355, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429355, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": " of", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429356, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": " liquid", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429356, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": " poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429356, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429356, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429356, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": " in", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429356, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": " Celsius", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429356, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429356, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": " The", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429356, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429356, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429356, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": " could", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429356, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": " not", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429356, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": " be", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429356, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": " located", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429356, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": " in", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429356, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": " my", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429356, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": " database", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429356, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429356, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-932", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759429356, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/36badd90238f.json b/tests/integration/recordings/responses/36badd90238f.json new file mode 100644 index 000000000..c3760805b --- /dev/null +++ b/tests/integration/recordings/responses/36badd90238f.json @@ -0,0 +1,366 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nCall get_boiling_point tool and answer What is the boiling point of polyjuice?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0001, + "top_p": 0.9 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.266524Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "[", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.307779Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "get", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.349588Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_bo", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.392007Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "iling", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.435225Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_point", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.47687Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "(", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.518854Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "liquid", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.560093Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_name", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.601376Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "='", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.642613Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "poly", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.686473Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ju", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.728965Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ice", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.770498Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "',", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.812614Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " cel", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.854407Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ci", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.896933Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "us", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.938059Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "=True", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:11.980332Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ")]", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.021812Z", + "done": true, + "done_reason": "stop", + "total_duration": 900445208, + "load_duration": 78206917, + "prompt_eval_count": 364, + "prompt_eval_duration": 65645917, + "eval_count": 19, + "eval_duration": 755986375, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/37706c1729ba.json b/tests/integration/recordings/responses/37706c1729ba.json index 74caaadf1..7bb9784f5 100644 --- a/tests/integration/recordings/responses/37706c1729ba.json +++ b/tests/integration/recordings/responses/37706c1729ba.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-923", + "id": "chatcmpl-905", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759282470, + "created": 1759441160, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/378412143edb.json b/tests/integration/recordings/responses/378412143edb.json new file mode 100644 index 000000000..bbd3517d5 --- /dev/null +++ b/tests/integration/recordings/responses/378412143edb.json @@ -0,0 +1,419 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "What is the boiling point of the liquid polyjuice in celsius?" + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_ay3w6qne", + "type": "function", + "function": { + "name": "get_boiling_point", + "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_ay3w6qne", + "content": "-100" + } + ], + "max_tokens": 0, + "stream": true, + "temperature": 0.0001, + "tool_choice": { + "type": "function", + "function": { + "name": "get_boiling_point" + } + }, + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-250", + "choices": [ + { + "delta": { + "content": "The", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759428020, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-250", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759428020, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-250", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759428020, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-250", + "choices": [ + { + "delta": { + "content": " of", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759428020, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-250", + "choices": [ + { + "delta": { + "content": " Poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759428021, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-250", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759428021, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-250", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759428021, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-250", + "choices": [ + { + "delta": { + "content": " is", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759428021, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-250", + "choices": [ + { + "delta": { + "content": " -", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759428021, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-250", + "choices": [ + { + "delta": { + "content": "100", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759428021, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-250", + "choices": [ + { + "delta": { + "content": "\u00b0C", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759428021, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-250", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759428021, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-250", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759428021, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/38ea441b5f83.json b/tests/integration/recordings/responses/38ea441b5f83.json index 79886b389..03229846b 100644 --- a/tests/integration/recordings/responses/38ea441b5f83.json +++ b/tests/integration/recordings/responses/38ea441b5f83.json @@ -46,7 +46,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-761", + "id": "chatcmpl-236", "choices": [ { "finish_reason": "tool_calls", @@ -61,7 +61,7 @@ "function_call": null, "tool_calls": [ { - "id": "call_cj8ownwc", + "id": "call_u4ydewqv", "function": { "arguments": "{\"location\":\"San Francisco, CA\"}", "name": "get_weather" @@ -73,15 +73,15 @@ } } ], - "created": 1758975113, + "created": 1759376610, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion", "service_tier": null, "system_fingerprint": "fp_ollama", "usage": { - "completion_tokens": 18, + "completion_tokens": 20, "prompt_tokens": 185, - "total_tokens": 203, + "total_tokens": 205, "completion_tokens_details": null, "prompt_tokens_details": null } diff --git a/tests/integration/recordings/responses/3a4fb206e68a.json b/tests/integration/recordings/responses/3a4fb206e68a.json new file mode 100644 index 000000000..6b180d892 --- /dev/null +++ b/tests/integration/recordings/responses/3a4fb206e68a.json @@ -0,0 +1,986 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant Always respond with tool calls no matter what. " + }, + { + "role": "user", + "content": "Get the boiling point of polyjuice with a tool call." + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_l2ovyvtm", + "type": "function", + "function": { + "name": "get_boiling_point", + "arguments": "{\"celcius\":\"true\",\"liquid_name\":\"polyjuice\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_l2ovyvtm", + "content": "Error when running tool: 'ToolCall' object has no attribute 'arguments_json'" + } + ], + "max_tokens": 0, + "stream": true, + "temperature": 0.0001, + "tool_choice": "auto", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": "I", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429343, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": " apologize", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429343, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": " for", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429343, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": " the", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429343, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": " error", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429343, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429343, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": " Here", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429343, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": " is", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429343, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": " the", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429343, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": " revised", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429343, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": " tool", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429344, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": " call", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429344, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": ":\n\n", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429344, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": "{\"", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429344, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": "name", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429344, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": "\":", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429344, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": " \"", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429344, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": "get", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429344, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": "_bo", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429344, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": "iling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429344, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": "_point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429344, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": "\",", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429344, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": " \"", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429344, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": "parameters", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429344, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": "\":", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429344, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": " {\"", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429344, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": "liquid", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429344, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": "_name", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429344, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": "\":", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429344, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": " \"", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429344, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": "poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429344, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429344, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429344, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": "\"}}", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429344, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-329", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759429344, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/3a81146f2afa.json b/tests/integration/recordings/responses/3a81146f2afa.json index e2d2d52d6..237cc27fe 100644 --- a/tests/integration/recordings/responses/3a81146f2afa.json +++ b/tests/integration/recordings/responses/3a81146f2afa.json @@ -18,7 +18,7 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-439", + "id": "cmpl-676", "choices": [ { "finish_reason": null, @@ -27,7 +27,7 @@ "text": "Blue" } ], - "created": 1757857132, + "created": 1759437793, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -37,7 +37,7 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-439", + "id": "cmpl-676", "choices": [ { "finish_reason": null, @@ -46,7 +46,7 @@ "text": ".\n\n" } ], - "created": 1757857132, + "created": 1759437793, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -56,7 +56,7 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-439", + "id": "cmpl-676", "choices": [ { "finish_reason": null, @@ -65,7 +65,7 @@ "text": "The" } ], - "created": 1757857132, + "created": 1759437793, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -75,16 +75,16 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-439", + "id": "cmpl-676", "choices": [ { "finish_reason": null, "index": 0, "logprobs": null, - "text": " completed" + "text": " classic" } ], - "created": 1757857132, + "created": 1759437793, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -94,16 +94,16 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-439", + "id": "cmpl-676", "choices": [ { "finish_reason": null, "index": 0, "logprobs": null, - "text": " sentence" + "text": " rh" } ], - "created": 1757857132, + "created": 1759437793, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -113,7 +113,83 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-439", + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": "ym" + } + ], + "created": 1759437793, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": "ing" + } + ], + "created": 1759437793, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " couple" + } + ], + "created": 1759437793, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": "t" + } + ], + "created": 1759437793, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", "choices": [ { "finish_reason": null, @@ -122,7 +198,7 @@ "text": " is" } ], - "created": 1757857132, + "created": 1759437793, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -132,7 +208,7 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-439", + "id": "cmpl-676", "choices": [ { "finish_reason": null, @@ -141,7 +217,7 @@ "text": " a" } ], - "created": 1757857132, + "created": 1759437793, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -151,7 +227,7 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-439", + "id": "cmpl-676", "choices": [ { "finish_reason": null, @@ -160,7 +236,7 @@ "text": " well" } ], - "created": 1757857132, + "created": 1759437793, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -170,7 +246,7 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-439", + "id": "cmpl-676", "choices": [ { "finish_reason": null, @@ -179,7 +255,7 @@ "text": "-known" } ], - "created": 1757857132, + "created": 1759437793, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -189,7 +265,7 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-439", + "id": "cmpl-676", "choices": [ { "finish_reason": null, @@ -198,7 +274,7 @@ "text": " phrase" } ], - "created": 1757857132, + "created": 1759437793, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -208,16 +284,16 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-439", + "id": "cmpl-676", "choices": [ { "finish_reason": null, "index": 0, "logprobs": null, - "text": " from" + "text": " that" } ], - "created": 1757857132, + "created": 1759437793, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -227,16 +303,16 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-439", + "id": "cmpl-676", "choices": [ { "finish_reason": null, "index": 0, "logprobs": null, - "text": " a" + "text": " completes" } ], - "created": 1757857132, + "created": 1759437794, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -246,653 +322,7 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " traditional" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " English" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " poem" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": ":\n\n" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": "\"" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": "R" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": "oses" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " are" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " red" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": "," - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " v" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": "io" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": "lets" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " are" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " blue" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": ",\n" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": "Sugar" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " is" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " sweet" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": "," - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " and" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " so" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " are" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " you" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": ".\"" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " However" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": "," - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " in" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " many" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " variations" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " of" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " this" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": " poem" - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", - "choices": [ - { - "finish_reason": null, - "index": 0, - "logprobs": null, - "text": "," - } - ], - "created": 1757857132, - "model": "llama3.2:3b-instruct-fp16", - "object": "text_completion", - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.completion.Completion", - "__data__": { - "id": "cmpl-439", + "id": "cmpl-676", "choices": [ { "finish_reason": null, @@ -901,7 +331,7 @@ "text": " the" } ], - "created": 1757857132, + "created": 1759437794, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -911,16 +341,16 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-439", + "id": "cmpl-676", "choices": [ { "finish_reason": null, "index": 0, "logprobs": null, - "text": " line" + "text": " poem" } ], - "created": 1757857132, + "created": 1759437794, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -930,7 +360,64 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-439", + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " with" + } + ], + "created": 1759437794, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " the" + } + ], + "created": 1759437794, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " word" + } + ], + "created": 1759437794, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", "choices": [ { "finish_reason": null, @@ -939,7 +426,7 @@ "text": " \"" } ], - "created": 1757857132, + "created": 1759437794, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -949,16 +436,16 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-439", + "id": "cmpl-676", "choices": [ { "finish_reason": null, "index": 0, "logprobs": null, - "text": "vio" + "text": "blue" } ], - "created": 1757857132, + "created": 1759437794, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", @@ -968,7 +455,520 @@ { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-439", + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": "\"," + } + ], + "created": 1759437794, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " creating" + } + ], + "created": 1759437794, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " a" + } + ], + "created": 1759437794, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " rhyme" + } + ], + "created": 1759437794, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " scheme" + } + ], + "created": 1759437794, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " of" + } + ], + "created": 1759437794, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " AABB" + } + ], + "created": 1759437794, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": "." + } + ], + "created": 1759437794, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " This" + } + ], + "created": 1759437794, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " poetic" + } + ], + "created": 1759437794, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " device" + } + ], + "created": 1759437794, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " has" + } + ], + "created": 1759437794, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " been" + } + ], + "created": 1759437794, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " used" + } + ], + "created": 1759437794, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " in" + } + ], + "created": 1759437794, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " various" + } + ], + "created": 1759437794, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " forms" + } + ], + "created": 1759437795, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " and" + } + ], + "created": 1759437795, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " iterations" + } + ], + "created": 1759437795, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " throughout" + } + ], + "created": 1759437795, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " history" + } + ], + "created": 1759437795, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": "," + } + ], + "created": 1759437795, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " often" + } + ], + "created": 1759437795, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " to" + } + ], + "created": 1759437795, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " convey" + } + ], + "created": 1759437795, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " love" + } + ], + "created": 1759437795, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", + "choices": [ + { + "finish_reason": null, + "index": 0, + "logprobs": null, + "text": " and" + } + ], + "created": 1759437795, + "model": "llama3.2:3b-instruct-fp16", + "object": "text_completion", + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.completion.Completion", + "__data__": { + "id": "cmpl-676", "choices": [ { "finish_reason": "length", @@ -977,7 +977,7 @@ "text": "" } ], - "created": 1757857132, + "created": 1759437795, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", diff --git a/tests/integration/recordings/responses/3bd4bb58d78a.json b/tests/integration/recordings/responses/3bd4bb58d78a.json new file mode 100644 index 000000000..ba44a8e3b --- /dev/null +++ b/tests/integration/recordings/responses/3bd4bb58d78a.json @@ -0,0 +1,119 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "What is the boiling point of the liquid polyjuice in celsius?" + } + ], + "max_tokens": 0, + "stream": true, + "temperature": 0.0001, + "tool_choice": "required", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "str", + "description": "The name of the liquid" + }, + "celcius": { + "type": "bool", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-288", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": [ + { + "index": 0, + "id": "call_rp5mke0x", + "function": { + "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}", + "name": "get_boiling_point" + }, + "type": "function" + } + ] + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759425751, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-288", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "tool_calls", + "index": 0, + "logprobs": null + } + ], + "created": 1759425751, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/3ca695048bee.json b/tests/integration/recordings/responses/3ca695048bee.json index b307b2f98..45ca41d28 100644 --- a/tests/integration/recordings/responses/3ca695048bee.json +++ b/tests/integration/recordings/responses/3ca695048bee.json @@ -39,32 +39,22 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-3", + "id": "chatcmpl-828", "choices": [ { "delta": { - "content": "", + "content": "{\"name\":\"get_water\", \"parameters\": {\"city\":\"Tokyo\"}}", "function_call": null, "refusal": null, "role": "assistant", - "tool_calls": [ - { - "index": 0, - "id": "call_3kigugt3", - "function": { - "arguments": "{\"city\":\"Tokyo\"}", - "name": "get_weather" - }, - "type": "function" - } - ] + "tool_calls": null }, "finish_reason": null, "index": 0, "logprobs": null } ], - "created": 1756921361, + "created": 1759437882, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -75,7 +65,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-3", + "id": "chatcmpl-828", "choices": [ { "delta": { @@ -85,12 +75,12 @@ "role": "assistant", "tool_calls": null }, - "finish_reason": "tool_calls", + "finish_reason": "stop", "index": 0, "logprobs": null } ], - "created": 1756921361, + "created": 1759437882, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, diff --git a/tests/integration/recordings/responses/3f5871e0805d.json b/tests/integration/recordings/responses/3f5871e0805d.json new file mode 100644 index 000000000..4c79ce460 --- /dev/null +++ b/tests/integration/recordings/responses/3f5871e0805d.json @@ -0,0 +1,85 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "user", + "content": "Process this data" + } + ], + "tools": [ + { + "type": "function", + "function": { + "name": "process_data", + "description": "Process structured data", + "parameters": { + "type": "object", + "properties": { + "data": { + "$ref": "#/$defs/DataObject" + } + }, + "$defs": { + "DataObject": { + "type": "object", + "properties": { + "values": { + "type": "array", + "items": { + "type": "number" + } + } + } + } + } + } + } + } + ] + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": { + "__type__": "openai.types.chat.chat_completion.ChatCompletion", + "__data__": { + "id": "chatcmpl-798", + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "{\"name\":\"process_data\",\"parameters\":{\"data\":[{\"values\":[2,3]}]\"}}", + "refusal": null, + "role": "assistant", + "annotations": null, + "audio": null, + "function_call": null, + "tool_calls": null + } + } + ], + "created": 1759376608, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": { + "completion_tokens": 20, + "prompt_tokens": 176, + "total_tokens": 196, + "completion_tokens_details": null, + "prompt_tokens_details": null + } + } + }, + "is_streaming": false + } +} diff --git a/tests/integration/recordings/responses/3fc7de7e822b.json b/tests/integration/recordings/responses/3fc7de7e822b.json new file mode 100644 index 000000000..bf97c4158 --- /dev/null +++ b/tests/integration/recordings/responses/3fc7de7e822b.json @@ -0,0 +1,119 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "Call get_boiling_point tool and answer What is the boiling point of polyjuice?" + } + ], + "max_tokens": 0, + "stream": true, + "temperature": 0.0001, + "tool_choice": "auto", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "str", + "description": "The name of the liquid" + }, + "celcius": { + "type": "bool", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-54", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": [ + { + "index": 0, + "id": "call_xbvaryhe", + "function": { + "arguments": "{\"celcius\":null,\"liquid_name\":\"polyjuice\"}", + "name": "get_boiling_point" + }, + "type": "function" + } + ] + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759425232, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-54", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "tool_calls", + "index": 0, + "logprobs": null + } + ], + "created": 1759425232, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/41ac2702de6c.json b/tests/integration/recordings/responses/41ac2702de6c.json index 987f16ae1..92c1fc0cd 100644 --- a/tests/integration/recordings/responses/41ac2702de6c.json +++ b/tests/integration/recordings/responses/41ac2702de6c.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-402", + "id": "chatcmpl-682", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759245123, + "created": 1759437798, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/4283d7199d9b.json b/tests/integration/recordings/responses/4283d7199d9b.json new file mode 100644 index 000000000..c09104a8c --- /dev/null +++ b/tests/integration/recordings/responses/4283d7199d9b.json @@ -0,0 +1,366 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nWhat is the boiling point of the liquid polyjuice in celsius?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0001, + "top_p": 0.9 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.080011Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "[", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.126544Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "get", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.169848Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_bo", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.21147Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "iling", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.254674Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_point", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.29727Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "(", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.338937Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "liquid", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.380865Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_name", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.422627Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "='", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.463935Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "poly", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.505674Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ju", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.547072Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ice", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.588461Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "',", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.629627Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " cel", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.67101Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ci", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.713398Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "us", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.757208Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "=True", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.800572Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ")]", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:54.843458Z", + "done": true, + "done_reason": "stop", + "total_duration": 1585956083, + "load_duration": 162121750, + "prompt_eval_count": 361, + "prompt_eval_duration": 657951625, + "eval_count": 19, + "eval_duration": 765105333, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/4a32ce3da3ce.json b/tests/integration/recordings/responses/4a32ce3da3ce.json new file mode 100644 index 000000000..565edee20 --- /dev/null +++ b/tests/integration/recordings/responses/4a32ce3da3ce.json @@ -0,0 +1,414 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant Always respond with tool calls no matter what. " + }, + { + "role": "user", + "content": "Get the boiling point of polyjuice with a tool call." + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_v7gdtg8p", + "type": "function", + "function": { + "name": "get_boiling_point", + "arguments": "{\"celcius\":\"true\",\"liquid_name\":\"polyjuice\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_v7gdtg8p", + "content": "-100" + } + ], + "max_tokens": 512, + "stream": true, + "temperature": 0.0001, + "tool_choice": "auto", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-67", + "choices": [ + { + "delta": { + "content": "The", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441160, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-67", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441160, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-67", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441160, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-67", + "choices": [ + { + "delta": { + "content": " of", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441160, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-67", + "choices": [ + { + "delta": { + "content": " Poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441160, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-67", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441160, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-67", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441160, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-67", + "choices": [ + { + "delta": { + "content": " is", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441160, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-67", + "choices": [ + { + "delta": { + "content": " -", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441160, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-67", + "choices": [ + { + "delta": { + "content": "100", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441160, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-67", + "choices": [ + { + "delta": { + "content": "\u00b0C", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441160, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-67", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441160, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-67", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759441161, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/4c651211b0e0.json b/tests/integration/recordings/responses/4c651211b0e0.json index dbed465cf..94ba43163 100644 --- a/tests/integration/recordings/responses/4c651211b0e0.json +++ b/tests/integration/recordings/responses/4c651211b0e0.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-796", + "id": "chatcmpl-216", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759368388, + "created": 1759441674, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/4ebcaf6c2aee.json b/tests/integration/recordings/responses/4ebcaf6c2aee.json index 41dc9ab1a..f57994797 100644 --- a/tests/integration/recordings/responses/4ebcaf6c2aee.json +++ b/tests/integration/recordings/responses/4ebcaf6c2aee.json @@ -19,22 +19,390 @@ "data": [ { "embedding": [ - 0.253706, - 0.016367152, - -0.29664654, - 0.31654558, - -0.18624601, - 0.07602756, - -0.031531323, - 0.2986085, - -0.49672848, - -0.36617878, - 0.25328273, - -0.33349335, - 0.0060151755, - 0.14081024, - -0.13757885, - -0.14679416 + 0.04635219, + 0.002988263, + -0.054220885, + 0.057812735, + -0.0340614, + 0.013923248, + -0.005755826, + 0.054555666, + -0.09073176, + -0.066910096, + 0.046287432, + -0.060912322, + 0.0010950539, + 0.025724398, + -0.025169374, + -0.026821515, + -0.030190151, + 0.0019341545, + -0.0754819, + 0.057380512, + 0.020332545, + -0.005591279, + -0.0022273492, + 0.012063173, + -0.011033521, + -0.03300947, + 0.05462081, + 0.014426073, + 0.024025004, + 0.004224287, + 0.09837723, + 0.08385713, + -0.049175426, + 0.03877149, + 0.08748876, + -0.0223024, + 0.006552746, + -0.0070359865, + 0.017893821, + 0.015465863, + 0.05007282, + -0.019349905, + 0.064887345, + 0.03184605, + 0.0034936152, + 0.02317752, + -0.06297051, + 0.044468515, + -0.022246253, + -0.017976552, + 0.040390052, + -0.0020998395, + -0.05173264, + 0.014722753, + 0.01640469, + -0.06438627, + -0.043313596, + -0.040564552, + 0.044412937, + -0.0031199565, + -0.007237415, + -0.05158015, + 0.059660934, + -0.014839656, + 0.012902056, + 0.028181136, + -0.019578207, + -0.0664231, + -0.06333673, + 0.028995825, + -0.114707075, + 0.041575413, + -0.022128351, + 0.01979776, + 0.0630018, + 0.011822141, + -0.06492722, + -0.066328146, + 0.021114407, + -0.020638306, + -0.009599678, + 0.013701863, + -0.060742326, + 0.005395315, + 0.026589092, + 0.11719033, + 0.067120634, + 0.008300158, + 0.036319703, + 0.00772981, + 0.071582936, + 0.019818509, + -0.15945566, + 0.047943458, + 0.00031571978, + -0.04666597, + 0.007148715, + -0.08839544, + 0.038042437, + 0.06620088, + 0.034336157, + -0.035366412, + 0.041598067, + 0.073756054, + -0.018818064, + -0.017260034, + 0.058635473, + -0.01371376, + 0.048319146, + -0.023727186, + 0.024134034, + 0.015763162, + 0.06681245, + 0.01748244, + 0.0825409, + -0.044568237, + 0.0015441044, + -0.011225885, + 0.0153481, + -0.061364066, + 0.05792184, + 0.044216745, + -0.047036964, + -0.02634555, + -0.033504363, + 0.06713578, + 0.030866034, + 2.024336e-34, + -0.03532978, + 0.021929236, + 0.030160688, + 0.09271786, + -0.010355268, + 0.07196569, + 0.052604284, + 0.085753724, + 0.094942175, + 0.053786535, + -0.08900509, + -0.024382822, + -0.008744401, + -0.03167582, + 0.01025236, + 0.1818434, + -0.0022662894, + 0.118558116, + -0.072208576, + -0.005867667, + 0.0746222, + -0.024001855, + -0.013938801, + -0.030681474, + -0.029207803, + -0.117624186, + -0.046466038, + -0.002622228, + -0.0902171, + -0.038626853, + -0.037497964, + -0.02418436, + -0.069297835, + 0.06424038, + 0.0045628003, + -0.0041498984, + -0.01649947, + 0.051125433, + -0.0058985935, + -0.0122523345, + -0.047424458, + -0.007806876, + 0.07906618, + 0.03244041, + -0.044682544, + -0.022625683, + 0.028852794, + -0.050480433, + 0.043801326, + -0.023512814, + -0.029832385, + 0.031089257, + 0.07129686, + -0.089649536, + 0.011963804, + -0.018448317, + 0.019637493, + 0.020081993, + 0.0012980831, + 0.093201645, + -0.064436235, + -0.040581323, + -0.01193043, + 0.043884862, + -0.010675756, + -0.030739127, + 0.005605308, + -0.110498495, + 0.044510514, + 0.037110664, + 0.04116233, + -0.039460793, + -0.04470639, + -0.027589805, + -0.02073358, + -0.067221105, + 0.050390884, + 0.031397663, + -0.008031462, + -0.009285899, + 0.0013141648, + -0.017254544, + 0.010367782, + -0.05940024, + -0.018042587, + -0.15487815, + 0.0069424273, + -0.05208202, + 0.0014201442, + -0.13956298, + -0.040203292, + 0.027910054, + -0.064872995, + -0.016270144, + 0.07052549, + 5.3188943e-34, + 0.012666737, + 0.016728623, + -0.013163009, + 0.06391275, + -0.043404065, + 0.015435096, + 0.03720438, + 0.05997576, + -0.07789181, + -0.0408386, + 0.024137221, + -0.019834999, + -0.034739267, + 0.00042199617, + 0.048484907, + 0.08716056, + -0.101133205, + -0.07535088, + -0.03912376, + -0.031597532, + -0.052266575, + 0.022085808, + -0.011040282, + 0.005077135, + -0.088432744, + -0.010477913, + 0.047780182, + -0.073345095, + 0.014382301, + 0.038075384, + 0.02176859, + -0.029071847, + -0.036925532, + 0.14317243, + 0.020646103, + -0.08367964, + 0.111576855, + -0.009943396, + 0.023071144, + 0.0926832, + 0.011242715, + 0.068017475, + -0.007714686, + 0.03060742, + -0.011360289, + 0.109015204, + 0.12930514, + -0.07566831, + 0.09001269, + -0.0090979, + 0.0148039665, + 0.048663232, + 0.08894293, + 0.038565516, + 0.005821986, + 0.016084671, + -0.106283545, + -0.033372246, + 0.05440088, + -0.005663873, + 0.0011572369, + -0.024969472, + 0.043092247, + -0.009314855, + -0.11836073, + -0.027310666, + 0.009811885, + -0.0052975323, + -0.044883158, + 0.066436425, + -0.06750139, + -0.02696421, + 0.01402391, + -0.04950559, + -0.084093384, + -0.07380851, + 0.04709705, + 4.9404687e-05, + 0.01672617, + 0.01849747, + 0.027683195, + 0.0047972985, + 0.0017495222, + 0.07066204, + -0.022430636, + 0.06875498, + 0.093927115, + 0.11101308, + -0.015589739, + 0.021178465, + 0.033638563, + 0.034676168, + -0.026882911, + -0.010514364, + 0.0073013064, + -1.2070348e-08, + -0.10034882, + -0.028641108, + -0.061462097, + -0.009792086, + -0.081652306, + -0.011814046, + 0.002039501, + 0.010384326, + 0.01639641, + 0.09542911, + 0.012538498, + -0.03542602, + 0.018125113, + 0.062750235, + 0.0007333235, + -0.13612862, + -0.049830034, + 0.021177148, + 0.006589976, + 0.007859552, + -0.03270378, + 0.024738451, + -0.02542262, + -0.0033008803, + 0.030640591, + -0.032442387, + 0.04598555, + 0.03903257, + 0.035755396, + 0.01686084, + 0.13498692, + 0.028296864, + -0.0035224769, + -0.036735818, + -0.046355885, + 0.057701495, + 0.008000554, + 0.047822826, + 0.04911064, + 0.035214324, + -0.09817153, + 0.0050856513, + -0.018094635, + -0.04385158, + 0.06649695, + -0.037648164, + -0.006218895, + -0.037976924, + -0.0036204353, + -0.03149386, + 0.031777944, + -0.011333557, + 0.009081317, + 0.022486951, + 0.032106593, + 0.023041077, + -0.06739943, + 0.06294171, + -0.057333894, + -0.041295, + 0.060841344, + 0.03247397, + -0.05132725, + -0.04992364 ], "index": 0, "object": "embedding" diff --git a/tests/integration/recordings/responses/4f00cf740aba.json b/tests/integration/recordings/responses/4f00cf740aba.json index 85a5e18fb..fb05db569 100644 --- a/tests/integration/recordings/responses/4f00cf740aba.json +++ b/tests/integration/recordings/responses/4f00cf740aba.json @@ -18,390 +18,390 @@ "data": [ { "embedding": [ - -0.038157914, - 0.03290493, - -0.0055371798, - 0.014353213, - -0.040209096, - -0.11667767, - 0.03170551, - 0.0019347348, - -0.04254092, - 0.029190615, - 0.042559944, - 0.032130145, - 0.02983921, - 0.010979105, - -0.053759154, - -0.05030495, - -0.023470305, - 0.010730486, - -0.1377361, - 0.0039985846, - 0.029267203, - 0.066698566, - -0.015405643, - 0.04843479, - -0.0881545, - -0.012694429, - 0.041265942, - 0.04089442, - -0.05000745, - -0.05805947, - 0.048748765, - 0.06891688, - 0.058812816, - 0.008785837, - -0.016080279, - 0.08517403, - -0.07814158, - -0.077435054, - 0.020808736, - 0.016186161, - 0.032549612, - -0.05344129, - -0.062166847, - -0.0242584, - 0.007393759, - 0.024064584, - 0.0064619263, - 0.051204458, - 0.072843835, - 0.034658417, - -0.05477693, - -0.05941287, - -0.007262739, - 0.020149412, - 0.035835978, - 0.0056162532, - 0.010803632, - -0.052724347, - 0.010110615, - -0.0087345, - -0.06285489, - 0.038390912, - -0.013975588, - 0.0734118, - 0.090072334, - -0.07995426, - -0.016420014, - 0.044813525, - -0.06888206, - -0.033037275, - -0.015467736, - 0.01130628, - 0.036483694, - 0.0663459, - -0.054344203, - 0.008723171, - 0.012078509, - -0.038129516, - 0.006938081, - 0.051155496, - 0.07745829, - -0.122897476, - 0.01635594, - 0.04956378, - 0.031677794, - -0.03963372, - 0.0016560612, - 0.0095810415, - -0.032620687, - -0.03396473, - -0.13327733, - 0.0072318353, - -0.010225149, - 0.038535405, - -0.09343492, - -0.04173385, - 0.06996305, - -0.026312327, - -0.14973918, - 0.13443227, - 0.03750676, - 0.052842483, - 0.045053005, - 0.018721534, - 0.05443072, - 0.017290117, - -0.03255681, - 0.046160772, - -0.046711024, - -0.030576464, - -0.018258592, - -0.048711784, - 0.033041865, - -0.003856249, - 0.05003307, - -0.05821012, - -0.00994153, - 0.0106995255, - -0.04008794, - -0.0015539092, - 0.060838487, - -0.04559896, - 0.04924722, - 0.026119638, - 0.019796783, - -0.0016312932, - 0.05955464, - -6.527786e-33, - 0.063555494, - 0.003072545, - 0.0290068, - 0.17338625, - 0.0029474646, - 0.027745575, - -0.095103905, - -0.031165987, - 0.026719859, - -0.010799976, - 0.023851028, - 0.02375357, - -0.031152952, - 0.049497593, - -0.025005657, - 0.10176666, - -0.079190366, - -0.0032479328, - 0.042849813, - 0.09489888, - -0.066508934, - 0.00632239, - 0.022188535, - 0.06996212, - -0.007491268, - -0.001777037, - 0.027047161, - -0.07536194, - 0.11401931, - 0.008564227, - -0.02371391, - -0.046974454, - 0.0144310715, - 0.019899534, - -0.0046927175, - 0.0013119543, - -0.03432107, - -0.054212432, - -0.09418897, - -0.028963951, - -0.018907014, - 0.045735538, - 0.04757043, - -0.003132595, - -0.033231355, - -0.013520351, - 0.051010653, - 0.03111525, - 0.015257217, - 0.054166727, - -0.085080594, - 0.013355202, - -0.04763934, - 0.07099156, - -0.01309272, - -0.0023823304, - 0.050339438, - -0.041624993, - -0.014171974, - 0.032421313, - 0.005414455, - 0.09128853, - 0.0045168963, - -0.018196244, - -0.015225792, - -0.04635148, - 0.038764603, - 0.014739169, - 0.052030377, - 0.0017809072, - -0.014930553, - 0.027100598, - 0.031190928, - 0.02379928, - -0.0045879, - 0.03622444, - 0.066800386, - -0.0018508516, - 0.021243243, - -0.0575494, - 0.019077979, - 0.031474162, - -0.018456634, - -0.04083116, - 0.10387791, - 0.011981423, - -0.014923204, - -0.10519511, - -0.012293124, - -0.00042049217, - -0.09506704, - 0.058275525, - 0.042611193, - -0.025061507, - -0.094545335, - 4.010606e-33, - 0.13226718, - 0.0053517097, - -0.03314567, - -0.09099676, - -0.031551942, - -0.033939674, - -0.071981214, - 0.12595285, - -0.08333936, - 0.052855294, - 0.001036374, - 0.021973396, - 0.104020424, - 0.013031712, - 0.040921222, - 0.018695012, - 0.114233166, - 0.024822846, - 0.014595918, - 0.00621894, - -0.011220824, - -0.035742316, - -0.03801776, - 0.011226576, - -0.051305167, - 0.007892534, - 0.06734842, - 0.0033567564, - -0.09286571, - 0.03701943, - -0.022331072, - 0.040051647, - -0.030764744, - -0.011390678, - -0.014426033, - 0.024999708, - -0.09751172, - -0.03538673, - -0.03757043, - -0.010174254, - -0.06396341, - 0.025548752, - 0.020661479, - 0.03752242, - -0.10438308, - -0.028266912, - -0.052153755, - 0.012830027, - -0.05125152, - -0.029009243, - -0.09633578, - -0.042322997, - 0.06716196, - -0.030903742, - -0.010314011, - 0.027343867, - -0.028119028, - 0.010296558, - 0.043072425, - 0.022286164, - 0.007943, - 0.056093868, - 0.040728126, - 0.09295372, - 0.016456816, - -0.053744446, - 0.00047035623, - 0.050744157, - 0.04246857, - -0.029237023, - 0.009294763, - -0.010624897, - -0.037202932, - 0.00220195, - -0.030278567, - 0.07457478, - 0.0026277148, - -0.017591486, - 0.0028708735, - 0.03840644, - 0.0072204536, - 0.045653794, - 0.039947055, - 0.014161398, - -0.014247232, - 0.058465447, - 0.036360227, - 0.055268615, - -0.02004829, - -0.08043532, - -0.030213723, - -0.0148566915, - 0.022293866, - 0.011908896, - -0.06907556, - -1.8805048e-08, - -0.078408636, - 0.046699222, - -0.023894435, - 0.06347232, - 0.02395583, - 0.0014103559, - -0.090737104, - -0.06684135, - -0.080118775, - 0.0054891296, - 0.05368204, - 0.10478211, - -0.066875115, - 0.015525915, - 0.06710851, - 0.07083251, - -0.03199485, - 0.020825442, - -0.021920865, - -0.0072890157, - -0.01058703, - 0.004174248, - 0.033155944, - -0.07901077, - 0.038750935, - -0.07521113, - -0.015731987, - 0.005987591, - 0.0051212795, - -0.061557226, - 0.04203319, - 0.09544439, - -0.04317485, - 0.014446859, - -0.10614051, - -0.028011814, - 0.01101727, - 0.069552526, - 0.0669063, - -0.0747214, - -0.078444764, - 0.042728573, - -0.034634914, - -0.106056124, - -0.0357495, - 0.05155015, - 0.068699375, - -0.049968246, - 0.015420614, - -0.06460179, - -0.07601102, - 0.026022797, - 0.07440251, - -0.0124161495, - 0.1332999, - 0.07480527, - 0.051343314, - 0.02094546, - -0.026808253, - 0.08892536, - 0.03996125, - -0.041000355, - 0.03187991, - 0.018108707 + -0.038168654, + 0.032873917, + -0.0055947267, + 0.014366432, + -0.040310103, + -0.116643615, + 0.031721067, + 0.0019260457, + -0.04255802, + 0.029198613, + 0.04252229, + 0.032184314, + 0.029838374, + 0.010959321, + -0.053805783, + -0.05028783, + -0.023449864, + 0.0107550435, + -0.13774979, + 0.0039929547, + 0.029302042, + 0.066712305, + -0.015410682, + 0.048422653, + -0.08814465, + -0.012715775, + 0.041334823, + 0.040851083, + -0.050064698, + -0.05804616, + 0.048728727, + 0.06888658, + 0.058795262, + 0.008804153, + -0.016073612, + 0.08514259, + -0.078146815, + -0.07741974, + 0.020842256, + 0.016201088, + 0.032518543, + -0.05346469, + -0.062197812, + -0.024271712, + 0.007416788, + 0.024103774, + 0.006469804, + 0.051166162, + 0.07284196, + 0.034627657, + -0.05475476, + -0.059386417, + -0.0071934434, + 0.020163197, + 0.035816014, + 0.0055927313, + 0.010762318, + -0.05274177, + 0.010083032, + -0.008742163, + -0.06284565, + 0.038426206, + -0.013933317, + 0.07342759, + 0.09004579, + -0.07995627, + -0.016420787, + 0.044767782, + -0.06886435, + -0.03303916, + -0.015482072, + 0.011322529, + 0.036461752, + 0.066346884, + -0.05434455, + 0.008740993, + 0.012066104, + -0.038101126, + 0.0069316486, + 0.051146947, + 0.07740579, + -0.122950904, + 0.016380342, + 0.049568996, + 0.031634904, + -0.039637603, + 0.0016715266, + 0.009577405, + -0.032646418, + -0.033988595, + -0.13329837, + 0.0072566303, + -0.010266605, + 0.038557075, + -0.09338859, + -0.041706774, + 0.069941126, + -0.026323376, + -0.14971305, + 0.13445398, + 0.03748492, + 0.052825302, + 0.0450506, + 0.018712776, + 0.05444322, + 0.017282845, + -0.032480195, + 0.04614526, + -0.046711974, + -0.030566413, + -0.01820007, + -0.04869831, + 0.033051647, + -0.0038142777, + 0.04999665, + -0.058270358, + -0.010011706, + 0.010643473, + -0.040113144, + -0.0015507729, + 0.060854245, + -0.045562096, + 0.049257778, + 0.02612153, + 0.01981428, + -0.001660993, + 0.059509434, + -6.525298e-33, + 0.063519135, + 0.0030875143, + 0.028961418, + 0.1733713, + 0.0029763067, + 0.027727291, + -0.0951315, + -0.031186627, + 0.026689058, + -0.010807322, + 0.023850724, + 0.023777472, + -0.031174092, + 0.049501278, + -0.025049716, + 0.10175924, + -0.07919064, + -0.0032249284, + 0.042915843, + 0.09483459, + -0.06652636, + 0.006303593, + 0.02220902, + 0.06999181, + -0.0074810013, + -0.0017734945, + 0.027008688, + -0.07534615, + 0.114036545, + 0.008552313, + -0.023737878, + -0.04694563, + 0.014472103, + 0.019855395, + -0.0046694353, + 0.0013555645, + -0.034298304, + -0.054142635, + -0.09419824, + -0.028909719, + -0.018876282, + 0.0457315, + 0.04761082, + -0.0030971593, + -0.033264168, + -0.013539523, + 0.051041685, + 0.031110944, + 0.015244497, + 0.054158635, + -0.08499706, + 0.013360703, + -0.04759633, + 0.07101136, + -0.0131114535, + -0.0023818254, + 0.050331973, + -0.041642286, + -0.01419894, + 0.032463223, + 0.0053973934, + 0.091275506, + 0.0044798073, + -0.018260129, + -0.015278888, + -0.046306957, + 0.038750377, + 0.014729783, + 0.05204642, + 0.0017938613, + -0.014963651, + 0.027101943, + 0.031203475, + 0.023725478, + -0.004601222, + 0.03617344, + 0.06679477, + -0.0018401983, + 0.021265576, + -0.057589985, + 0.019155758, + 0.031437635, + -0.018444614, + -0.04085069, + 0.10393101, + 0.011960795, + -0.014898805, + -0.10520497, + -0.012302656, + -0.00043837292, + -0.09508398, + 0.058318105, + 0.042576887, + -0.025066672, + -0.094555676, + 4.0072287e-33, + 0.1322281, + 0.0053512393, + -0.03312536, + -0.09096454, + -0.031562407, + -0.033949774, + -0.07205118, + 0.1259232, + -0.08333555, + 0.052797858, + 0.001077506, + 0.022004265, + 0.10402767, + 0.013034249, + 0.04091762, + 0.018705815, + 0.11424037, + 0.024799824, + 0.014582492, + 0.006205516, + -0.011202356, + -0.035756435, + -0.03800272, + 0.011251353, + -0.0512988, + 0.007890417, + 0.06736164, + 0.0033359542, + -0.09285096, + 0.03704081, + -0.022326592, + 0.039967872, + -0.030748183, + -0.011446819, + -0.014453254, + 0.02498229, + -0.097532175, + -0.035378877, + -0.03757795, + -0.010181498, + -0.06392041, + 0.025538994, + 0.02061816, + 0.03757256, + -0.1043548, + -0.028326731, + -0.05209465, + 0.0128473425, + -0.051238894, + -0.029034877, + -0.09633617, + -0.042309195, + 0.067165054, + -0.030870603, + -0.010357507, + 0.027381465, + -0.028105576, + 0.010302046, + 0.04306986, + 0.022315372, + 0.007954779, + 0.056068663, + 0.04071972, + 0.09293905, + 0.016536433, + -0.053764775, + 0.00047211433, + 0.050708972, + 0.042510226, + -0.029195962, + 0.009274875, + -0.010647389, + -0.037209682, + 0.002267011, + -0.030304702, + 0.0745741, + 0.0026207205, + -0.017582772, + 0.0028797672, + 0.038404796, + 0.00723137, + 0.045613218, + 0.03998252, + 0.014209623, + -0.0142997475, + 0.05850862, + 0.03630791, + 0.055294298, + -0.020075988, + -0.08041808, + -0.030250112, + -0.014920701, + 0.022349516, + 0.011911506, + -0.06903851, + -1.8806734e-08, + -0.078480355, + 0.046674173, + -0.023920896, + 0.0634942, + 0.02396477, + 0.0014517035, + -0.090798445, + -0.06684978, + -0.0801405, + 0.005503192, + 0.053675175, + 0.104841895, + -0.066848256, + 0.015522683, + 0.067097165, + 0.070832625, + -0.03197915, + 0.020843629, + -0.0219202, + -0.0073016756, + -0.010645817, + 0.0040983153, + 0.03313765, + -0.0790081, + 0.03878132, + -0.075230986, + -0.015732396, + 0.0060099233, + 0.0051297406, + -0.061492138, + 0.04202211, + 0.09544608, + -0.04318599, + 0.014424486, + -0.10617826, + -0.027963417, + 0.011034413, + 0.069576606, + 0.06689785, + -0.07479674, + -0.07851099, + 0.042766396, + -0.034639932, + -0.10607304, + -0.03577663, + 0.051540814, + 0.068673156, + -0.049959548, + 0.015460458, + -0.064520314, + -0.076010585, + 0.026035817, + 0.07440218, + -0.012396022, + 0.13329679, + 0.074770845, + 0.05134284, + 0.020977058, + -0.026776016, + 0.08894323, + 0.039937407, + -0.04102053, + 0.03194075, + 0.018113315 ], "index": 0, "object": "embedding" diff --git a/tests/integration/recordings/responses/517505777888.json b/tests/integration/recordings/responses/517505777888.json index f556ba743..41030cdac 100644 --- a/tests/integration/recordings/responses/517505777888.json +++ b/tests/integration/recordings/responses/517505777888.json @@ -18,390 +18,390 @@ "data": [ { "embedding": [ - 0.019099757, - -0.020513054, - -0.07147724, - -0.02305817, - -0.06570441, - -0.0057285326, - -0.029366547, - -0.031833924, - -0.015779832, - -0.03914512, - 0.02689602, - -0.064181775, - 0.013521624, - 0.050362427, - -0.031129995, - -0.08321027, - -0.031968866, - 0.074996136, - -0.016394366, - -0.0013953616, - 0.038505327, - -0.03440395, - -0.004868513, - -0.03093635, - 0.051909875, - 0.0091652395, - 0.0072081746, - 0.066338904, - 0.024595087, - -0.047721148, - 0.0376462, - -0.04257363, - 0.078928985, - 0.048257265, - 0.1338569, - 0.013975464, - 0.03242688, - -0.08888101, - -0.0141724255, - 0.035531398, - -0.024727112, - -0.028608425, - 0.047635823, - 0.026230432, - 0.048455644, - 0.066589415, - -0.013602744, - 0.07181793, - -0.073052436, - -0.05030391, - 0.0039422787, - 0.033050794, - -0.047844775, - -0.017648827, - 0.010261714, - -0.105268046, - -0.010029887, - 0.014589762, - -0.05330117, - 0.0603304, - -0.10082026, - 0.0113420375, - -0.007233272, - 0.053468946, - -0.006834623, - 0.036973044, - 0.024037901, - 0.02391513, - -0.011360713, - -0.119559266, - -0.115714155, - -0.06674816, - -0.042340416, - 0.09301382, - 0.024868665, - 0.08405043, - 0.0030069647, - -0.06605422, - 0.027435942, - -0.03239928, - -0.025572078, - -0.06587331, - 0.0678087, - 0.09763614, - 0.07363481, - 0.034110706, - 0.056513038, - 0.07671608, - -0.05176071, - 0.05367774, - 0.00541266, - 0.015987717, - 0.0035527307, - 0.063338846, - -0.015986515, - 0.052941773, - 0.11543519, - 0.05519716, - 0.037675396, - 0.08086703, - 0.035557747, - -0.07983684, - -0.012073549, - -0.076086745, - -0.06961062, - -0.017908957, - 0.1699312, - -0.0047792625, - 0.090708405, - -0.071956836, - 0.020046378, - -0.05956393, - -0.06314912, - -0.07718947, - 0.015107324, - -0.05031658, - -0.05448986, - -0.023088248, - -0.035414543, - -0.030637579, - -0.053294946, - -0.06745031, - -0.08055133, - 0.0028445483, - -0.011376515, - -0.029895633, - 0.024240365, - -1.5095563e-33, - -0.029858422, - -0.00030224613, - 0.0030705915, - 0.023098653, - -0.04807201, - -0.0027389736, - -0.03748221, - 0.016176483, - -0.029994667, - 0.015707478, - 0.0096614035, - -0.039872784, - -0.029488137, - 0.03840971, - -0.0052404203, - 0.06854292, - -0.007897781, - -0.0018805856, - -0.0352267, - 0.036267247, - 0.05868197, - 0.023763478, - 0.044439625, - -0.02601301, - -0.025314424, - -0.02679121, - -0.023682553, - -0.09437374, - 0.0016686164, - 0.0065181926, - -0.097118795, - -0.053507585, - -0.08239408, - 0.023490923, - -0.02402227, - 0.015966628, - 0.0050696856, - 0.030458245, - -0.08839895, - 0.11425429, - 0.028386213, - 0.0298561, - 0.02285531, - 0.01873392, - 0.05632994, - -0.020208938, - -0.0006685065, - -0.08638551, - 0.020276291, - -0.0039841584, - 0.0009751431, - 0.06544227, - -0.03650517, - 0.032318577, - 0.023104826, - 0.04446683, - 0.09645086, - -0.072731785, - 0.033722512, - 0.042799864, - -0.05276349, - 0.00033437353, - 0.061005846, - -0.019637244, - -0.02327577, - -0.1160437, - 0.007917702, - -0.12529376, - 0.017027825, - 0.013484424, - -0.030528279, - -0.024288423, - 0.006258758, - -0.015579525, - -0.07281456, - 0.012983996, - 0.01599799, - 0.0051952074, - -0.002588768, - -0.059567206, - 0.063699834, - -0.0019145603, - 0.018687418, - -0.009282711, - -0.05884746, - -0.03251431, - -0.0095772855, - -0.047396615, - 0.020575106, - -0.0071638324, - 0.050119117, - 0.016082546, - -0.0058797863, - -0.07660506, - 0.082072616, - 1.6049304e-33, - -0.0056975842, - 0.06717823, - -0.01155973, - 0.055897184, - -0.08883816, - -0.03651865, - 0.12133234, - 0.028983265, - 0.022465894, - 0.047318526, - 0.07625107, - -0.07938655, - 0.0020323857, - -0.023503296, - -0.029780442, - -0.048816763, - -0.034901213, - 0.06463424, - 0.05149456, - 0.008271398, - -0.031762894, - 0.097970895, - 0.008115042, - 0.010324485, - 0.059439637, - 0.051759075, - 0.04295602, - 0.006951762, - 0.027330121, - 0.039248228, - 0.062386345, - 0.05181691, - 0.0053548445, - 0.059656292, - -0.008941856, - -0.013595369, - 0.08731477, - 0.028409526, - -0.0068070823, - 0.052146304, - 0.04951788, - 0.055161525, - -0.016772978, - 0.07788952, - 0.02612108, - 0.031371117, - 0.011792192, - -0.034147624, - 0.052822903, - 0.0035044928, - 0.098160714, - 0.029717103, - -0.031353023, - -0.012088347, - 0.018629983, - -0.03261934, - -0.09641058, - 0.033934057, - -0.078907624, - -0.008301054, - -0.04919879, - 0.0200944, - 0.061727397, - -0.018450737, - -0.033557754, - -0.09088319, - 0.021116594, - -0.022466624, - -0.011860241, - -0.04879352, - 0.04824181, - -0.0729504, - -0.021986347, - 0.062490568, - 0.02329735, - -0.052139174, - -0.05413272, - 0.062326364, - 0.052311692, - 0.051399846, - -0.024238104, - -0.018776463, - -0.01662191, - 0.093347155, - 0.00853553, - 0.06343568, - 0.0193722, - 0.047052696, - -0.0058736033, - -0.0034484447, - 0.079545766, - 0.102156945, - 0.015278317, - 0.040921766, - 0.038883872, - -1.2710007e-08, - -0.019322075, - -0.12182595, - -0.04798032, - -0.05338353, - -0.113173604, - 0.05179994, - -0.104975395, - -0.08526829, - 0.0062153414, - -0.029902961, - 0.064573385, - -0.028757203, - -0.06474069, - -0.024915313, - 0.002619679, - -0.008791377, - 0.03023946, - 0.009847454, - 0.004436367, - 0.085081235, - -0.026139142, - 0.11358947, - -0.004590704, - -0.03662597, - -0.09077296, - 0.081458576, - 0.012074041, - 0.07286008, - 0.004093267, - -0.050678167, - 0.06875128, - 0.029115168, - 0.014813955, - -0.11862927, - -0.0504244, - 0.053776395, - 0.04568957, - 0.07408053, - 0.02851353, - 0.039401993, - 0.029147856, - -0.035721682, - -0.091308504, - -0.047723882, - -0.00082008925, - -0.073683135, - 0.010977384, - 0.015688991, - -0.035924956, - -0.0811892, - 0.020371897, - -0.045275442, - -0.024963016, - 0.0011709725, - 0.00041111733, - -0.026408581, - -0.03244672, - 0.0034135028, - -0.0070261946, - 0.024263272, - 0.07635933, - 0.03955913, - 0.036027964, - -0.07081866 + 0.019109152, + -0.0205217, + -0.071471564, + -0.023057504, + -0.06572786, + -0.0057331678, + -0.029395059, + -0.031822033, + -0.015748156, + -0.039123703, + 0.02694331, + -0.0641754, + 0.013510709, + 0.050364953, + -0.03114308, + -0.08322274, + -0.03192984, + 0.074970365, + -0.016377378, + -0.0013804765, + 0.03850419, + -0.03441017, + -0.0048610102, + -0.03094053, + 0.051915165, + 0.009193639, + 0.0071807485, + 0.066353165, + 0.024559105, + -0.04767663, + 0.0376255, + -0.042586852, + 0.078906916, + 0.04827334, + 0.13389648, + 0.013978803, + 0.03242126, + -0.08890431, + -0.014188366, + 0.03553346, + -0.02476171, + -0.028628638, + 0.047652308, + 0.026259335, + 0.048472118, + 0.06663718, + -0.013584004, + 0.071824096, + -0.073066786, + -0.050326068, + 0.0039502876, + 0.03300394, + -0.047816053, + -0.017657546, + 0.010284664, + -0.10525716, + -0.010034394, + 0.014627846, + -0.053289402, + 0.060343288, + -0.10079798, + 0.011359217, + -0.007258805, + 0.05346498, + -0.0068726647, + 0.03697505, + 0.024016414, + 0.023924585, + -0.011357761, + -0.119573325, + -0.115692526, + -0.06673285, + -0.04233929, + 0.09302018, + 0.02486003, + 0.084047645, + 0.0030104683, + -0.06605523, + 0.027435688, + -0.032412402, + -0.025584543, + -0.06590182, + 0.067799605, + 0.0976311, + 0.07360619, + 0.034108408, + 0.056534845, + 0.076705806, + -0.05179011, + 0.053681813, + 0.0054462817, + 0.015972052, + 0.0035656213, + 0.06333522, + -0.01597322, + 0.05295729, + 0.11539089, + 0.055200845, + 0.037667733, + 0.08083974, + 0.035557732, + -0.07982552, + -0.012100598, + -0.07612801, + -0.0695667, + -0.017815348, + 0.16996554, + -0.0048157335, + 0.09073964, + -0.07196438, + 0.020009195, + -0.05956153, + -0.06312686, + -0.07716358, + 0.0150949685, + -0.050339524, + -0.05444592, + -0.023078114, + -0.035431463, + -0.030625492, + -0.053284056, + -0.06745872, + -0.08049862, + 0.002800386, + -0.0114065055, + -0.029938627, + 0.024243163, + -1.5107368e-33, + -0.02984805, + -0.00033025863, + 0.0030491, + 0.023082128, + -0.04808977, + -0.0027841914, + -0.037461873, + 0.016201235, + -0.02998979, + 0.015712254, + 0.009664366, + -0.03984875, + -0.029493092, + 0.03837007, + -0.005226541, + 0.06857773, + -0.007891026, + -0.0019036188, + -0.035219382, + 0.03627955, + 0.05867878, + 0.023777487, + 0.044425115, + -0.025999734, + -0.025318418, + -0.02685328, + -0.02368557, + -0.094386704, + 0.0016880591, + 0.0065193563, + -0.09711005, + -0.053493332, + -0.08241291, + 0.023502836, + -0.02407441, + 0.015992055, + 0.0050546136, + 0.030476829, + -0.088438906, + 0.11427086, + 0.028378993, + 0.02985018, + 0.022821706, + 0.018776013, + 0.056330692, + -0.020254886, + -0.00070521404, + -0.0864014, + 0.020228866, + -0.0039839754, + 0.0010032665, + 0.065425254, + -0.036518592, + 0.032341316, + 0.023112345, + 0.044507477, + 0.09644409, + -0.07272818, + 0.03370691, + 0.042783204, + -0.052776046, + 0.0003352446, + 0.061005518, + -0.019623613, + -0.023274273, + -0.11602989, + 0.007926991, + -0.12529127, + 0.017030548, + 0.013484081, + -0.030528491, + -0.024298145, + 0.006284904, + -0.015568167, + -0.072781205, + 0.012985074, + 0.015977127, + 0.0051657534, + -0.0026022948, + -0.059578825, + 0.06372584, + -0.0019363016, + 0.018695941, + -0.009242735, + -0.05887247, + -0.032524884, + -0.009591115, + -0.047377545, + 0.020585002, + -0.007134836, + 0.050135154, + 0.016087264, + -0.0058878902, + -0.07661024, + 0.0820671, + 1.6053074e-33, + -0.0056476775, + 0.06719423, + -0.011510322, + 0.05586423, + -0.08886697, + -0.036528286, + 0.12134926, + 0.028969096, + 0.022419011, + 0.047327086, + 0.07621525, + -0.07937209, + 0.0020504447, + -0.023489932, + -0.029759271, + -0.04879825, + -0.034876924, + 0.06461666, + 0.051493492, + 0.008284975, + -0.031793926, + 0.098015875, + 0.008122038, + 0.01032072, + 0.059404474, + 0.05176487, + 0.042960417, + 0.0069373515, + 0.027306866, + 0.039226852, + 0.062416088, + 0.051797673, + 0.0053232666, + 0.05965781, + -0.008935817, + -0.0135501, + 0.08726531, + 0.028408607, + -0.006820522, + 0.052098107, + 0.049510423, + 0.055176627, + -0.016774576, + 0.077848226, + 0.026121203, + 0.031311177, + 0.011812256, + -0.0341528, + 0.052825138, + 0.003484205, + 0.09811821, + 0.029693138, + -0.031354938, + -0.012068096, + 0.018686052, + -0.032609653, + -0.09638639, + 0.033928476, + -0.07897009, + -0.008300913, + -0.04915284, + 0.02006342, + 0.061743837, + -0.018412542, + -0.033583082, + -0.090903476, + 0.021116566, + -0.022445552, + -0.011814237, + -0.048816226, + 0.048287436, + -0.07294675, + -0.02198573, + 0.062477604, + 0.023308119, + -0.052141402, + -0.05409648, + 0.062339973, + 0.052301563, + 0.051384836, + -0.02426406, + -0.018824687, + -0.01660311, + 0.09330242, + 0.008502433, + 0.063408315, + 0.019377569, + 0.047027417, + -0.0058769877, + -0.0034505578, + 0.07956527, + 0.10210641, + 0.015302805, + 0.04089992, + 0.038895626, + -1.2710905e-08, + -0.019304764, + -0.1217849, + -0.047983564, + -0.053382736, + -0.113197215, + 0.05181196, + -0.10498226, + -0.08524135, + 0.0061870585, + -0.029899841, + 0.064561576, + -0.028730206, + -0.064735174, + -0.024887148, + 0.0026119591, + -0.008796896, + 0.030246036, + 0.009807871, + 0.0044631795, + 0.0851423, + -0.026132204, + 0.11360852, + -0.0045760865, + -0.036643907, + -0.09078616, + 0.081466354, + 0.012066122, + 0.07288108, + 0.004079195, + -0.05064171, + 0.068772145, + 0.029108258, + 0.014786602, + -0.11868081, + -0.05042858, + 0.05376578, + 0.04570744, + 0.074074544, + 0.028540619, + 0.03937392, + 0.0291862, + -0.035710927, + -0.09132387, + -0.047720414, + -0.00082342024, + -0.073688805, + 0.011024812, + 0.015703982, + -0.03590976, + -0.08121826, + 0.020365681, + -0.045287356, + -0.024955628, + 0.001167751, + 0.00037544646, + -0.026392939, + -0.032434102, + 0.003407464, + -0.007060387, + 0.024250468, + 0.076347135, + 0.039537415, + 0.036043648, + -0.07085338 ], "index": 0, "object": "embedding" diff --git a/tests/integration/recordings/responses/559296e84820.json b/tests/integration/recordings/responses/559296e84820.json index 607767a63..46ebe6848 100644 --- a/tests/integration/recordings/responses/559296e84820.json +++ b/tests/integration/recordings/responses/559296e84820.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-471", + "id": "chatcmpl-275", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759245121, + "created": 1759437797, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/55ae40168378.json b/tests/integration/recordings/responses/55ae40168378.json new file mode 100644 index 000000000..8d8407727 --- /dev/null +++ b/tests/integration/recordings/responses/55ae40168378.json @@ -0,0 +1,366 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant\nYou MUST use one of the provided functions/tools to answer the user query.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nWhat is the boiling point of the liquid polyjuice in celsius?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0001, + "top_p": 0.9 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.216374Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "[", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.257898Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "get", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.299052Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_bo", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.340155Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "iling", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.381269Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_point", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.422347Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "(", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.463428Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "liquid", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.504785Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_name", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.548668Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "='", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.589697Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "poly", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.631027Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ju", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.672172Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ice", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.713652Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "',", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.755751Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " cel", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.796948Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ci", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.838368Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "us", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.879363Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "=True", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.920412Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ")]", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:00.961636Z", + "done": true, + "done_reason": "stop", + "total_duration": 983443875, + "load_duration": 129661959, + "prompt_eval_count": 377, + "prompt_eval_duration": 107132333, + "eval_count": 19, + "eval_duration": 745847667, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/590d43ed64b8.json b/tests/integration/recordings/responses/590d43ed64b8.json index 136f240d3..32c2e58e5 100644 --- a/tests/integration/recordings/responses/590d43ed64b8.json +++ b/tests/integration/recordings/responses/590d43ed64b8.json @@ -18,390 +18,390 @@ "data": [ { "embedding": [ - 0.050928835, - 0.03843035, - -0.055596404, - -0.1059845, - 0.06945118, - -0.08052125, - -0.025887776, - -0.045172054, - 0.06875915, - 0.01652947, - -0.0011730668, - 0.023417989, - -0.0033977597, - 0.06804529, - -0.022007054, - -0.014133858, - 0.12357166, - -0.06538498, - -0.08264784, - 0.042988714, - -0.039530188, - 0.05546846, - -0.008847637, - 0.020928107, - 0.016257003, - 0.0963241, - -0.022833107, - 0.09176138, - 0.06406277, - -0.062280413, - 0.010846775, - 0.07830326, - 0.08847168, - -0.008453102, - -0.075440355, - 0.048030853, - 0.0042642253, - 0.037893716, - 0.0023323877, - 0.032253597, - 0.0047477684, - -0.07042877, - -0.0651552, - 0.061071083, - 0.021506561, - 0.10113442, - -0.07538611, - -0.0407162, - -0.0055698017, - -0.003700082, - -0.021267522, - -0.018197505, - -0.033238053, - -0.015680185, - 0.0032980912, - 0.037441716, - -0.02103593, - 0.052548602, - 0.10207184, - -0.018667448, - 0.036124475, - 0.08958934, - 0.050691247, - 0.019807478, - 0.102209404, - -0.0590646, - -0.045566943, - -0.024122052, - -0.059902284, - -0.097920865, - -0.0020646898, - 0.032239985, - 0.048603263, - 0.080615476, - 0.022587052, - 0.0005647973, - -0.0015346111, - 0.009996407, - -0.08974319, - 0.023848958, - -0.0152271725, - -0.020556787, - 0.085268654, - -0.080245204, - -0.0021987888, - 0.064997524, - -0.023079548, - -0.061999504, - -0.06548528, - -0.029944805, - 0.004539428, - 0.09720334, - 0.09151462, - -0.0059590363, - -0.04822175, - -0.011798011, - -0.031697348, - -0.010327684, - 0.02968527, - 0.103371136, - -0.029089179, - 0.0055756853, - -0.030742139, - -0.011057862, - -0.03863044, - -0.015891504, - 0.00083265523, - 0.03479572, - 0.0039244313, - -0.020057123, - -0.048189417, - 0.026513426, - -0.061180107, - -0.04695217, - 0.021450046, - -0.04841946, - 0.022005452, - 0.015729656, - 0.056378406, - 0.055330493, - 0.037143476, - -0.088711694, - 0.011780864, - 0.0064585637, - -0.020630004, - -0.05936413, - 0.012287869, - -2.4293852e-33, - 0.06838332, - -0.053025596, - 0.011507658, - 0.06950136, - 0.01331995, - 0.0020193695, - -0.02080692, - 0.028949803, - 0.034665402, - -0.0327198, - 0.000949148, - 0.008664251, - 0.0076103383, - -0.024554089, - 0.030275982, - -0.034142904, - -0.031511948, - 0.11051145, - 0.034964334, - 0.045093905, - 0.0004536878, - 0.0514407, - 0.015040795, - -0.008992289, - 0.023123777, - 0.051383648, - -0.004154813, - 0.0047568153, - -0.016239677, - -0.025685828, - -0.02406427, - -0.009563573, - 0.050677244, - -0.058350526, - 0.049024463, - 0.079643525, - 0.036008406, - -0.06540527, - -0.035393585, - -0.07027483, - -0.009768918, - -0.0318898, - -0.04104297, - -0.041093245, - -0.036317065, - 0.06686649, - 0.016687784, - -0.048496265, - -0.015432587, - -0.0004885036, - 0.032693844, - -0.0108784195, - 0.016624164, - -0.057286467, - 0.008053993, - 0.008824837, - -0.061545905, - -0.0108399745, - 0.07171203, - 0.08609233, - 0.014049224, - 0.014907912, - -0.09828269, - -0.046647478, - 0.03361861, - 0.064744, - -0.007506857, - 0.025442023, - 0.04172483, - -0.033108808, - -0.01457406, - 0.024897074, - 0.04562778, - -0.042942565, - -0.040469114, - -0.06307098, - -0.02242408, - 0.010597915, - -0.03252762, - -0.03145859, - 0.00820347, - 0.021108724, - 0.009504359, - -0.08292171, - -0.02136818, - 0.008753057, - 0.06017692, - -0.062192526, - 0.0045083114, - 0.056810796, - -0.012999816, - 0.01868933, - -0.008973792, - -0.076788835, - 0.051616713, - 1.6926322e-33, - -0.12587416, - 0.011702123, - -0.07986232, - 0.023053063, - 0.029265704, - 0.08719514, - 0.06907015, - 0.03254812, - 0.047793373, - 0.13217501, - 0.031299006, - -0.012535935, - 0.0035618816, - -0.0163916, - -0.03853783, - 0.01597904, - 0.09169072, - 0.04756113, - -0.054968182, - 0.067977056, - 0.017965809, - 0.11863936, - -0.0693313, - 0.043811284, - 0.041538227, - -0.017813183, - 0.051730298, - 0.067949936, - 0.080519445, - 0.0053662807, - 0.088820346, - -0.036024984, - -0.077107176, - -0.09097472, - -0.09598897, - -0.09376241, - -0.06202675, - 0.06723746, - -0.00064578716, - 0.029109621, - 0.08179942, - -0.06487821, - -0.050387383, - -0.0023782111, - -0.026097134, - -0.0076310094, - 0.011977006, - -0.08573459, - 0.041102324, - 0.024716543, - -0.022249049, - -0.11560483, - 0.0067691505, - -0.045894623, - -0.0637051, - 0.05357708, - 0.00577345, - 0.06321221, - 0.004861166, - -0.05710446, - 0.04190449, - 0.022335436, - -0.1471083, - 0.026351552, - 0.10623104, - -0.005882123, - 0.019992633, - 0.034953646, - -0.03338853, - -0.038839623, - -0.076065235, - -0.11174125, - -0.038965553, - -0.102677576, - 0.04711777, - -0.049392425, - 0.07477134, - 0.04174287, - -0.031087497, - 0.0033754015, - 0.055780858, - -0.03184862, - -0.02541985, - 0.05011349, - 0.03596857, - 0.091428444, - -0.07583281, - -0.050592963, - 0.0074175335, - -0.0013578966, - -0.050366234, - -0.0015045146, - 0.0054275827, - 0.07685381, - 0.014169269, - -1.8297998e-08, - 0.029916301, - -0.057940822, - -0.06847671, - 0.026218578, - -0.0034848938, - 0.113768935, - 0.056854554, - -0.093155205, - 0.0028038986, - 0.10895503, - -0.033018846, - 0.0050494163, - -0.043625794, - -0.048996136, - 0.0118943965, - 0.059736334, - -0.08662527, - -0.052732464, - 0.026333557, - 0.042200398, - -0.0035924676, - 0.037994288, - 0.022570506, - -0.061503205, - 0.012634007, - 0.040854853, - -0.084876895, - 0.041194208, - -0.038179893, - 0.008360482, - 0.010148832, - 0.024984034, - -0.012506054, - -0.045101274, - 0.010266152, - -0.046285193, - 0.061415587, - 0.016212178, - -0.0011856663, - 0.0074200486, - -0.019432405, - -0.068008475, - 0.05477893, - 0.0964552, - -0.04710964, - 0.060082186, - 0.003054353, - -0.08875195, - 0.03727946, - -0.0099389665, - 0.003561616, - -0.07834196, - 0.021697106, - -0.013061282, - 0.0725091, - -0.06500139, - -0.029938946, - -0.017758802, - 0.033857197, - 0.029207738, - 0.08792652, - 0.00846041, - 0.06444677, - -0.016519535 + 0.050927628, + 0.038399037, + -0.05559374, + -0.105984606, + 0.06944504, + -0.08054001, + -0.025946686, + -0.045175657, + 0.068730615, + 0.016510814, + -0.0011700827, + 0.023414683, + -0.0034143464, + 0.06804153, + -0.021997927, + -0.014162646, + 0.12356902, + -0.06536738, + -0.082627006, + 0.04300477, + -0.039514318, + 0.055434275, + -0.008866895, + 0.020934915, + 0.016280092, + 0.09630312, + -0.022835929, + 0.09175565, + 0.06409549, + -0.06226981, + 0.010888244, + 0.07833004, + 0.08844764, + -0.008459277, + -0.07542651, + 0.04800223, + 0.0042286967, + 0.037884884, + 0.0023502677, + 0.032233667, + 0.0047689923, + -0.070404515, + -0.06513966, + 0.061046362, + 0.021522248, + 0.10113185, + -0.07537441, + -0.04074795, + -0.0055522234, + -0.0037093374, + -0.021283673, + -0.018193243, + -0.03323253, + -0.015658593, + 0.0032862085, + 0.037399907, + -0.021028537, + 0.052572608, + 0.10211333, + -0.018634265, + 0.03612266, + 0.08958185, + 0.050681055, + 0.019839589, + 0.10220134, + -0.059074707, + -0.045562137, + -0.024107283, + -0.059917513, + -0.09795064, + -0.002078402, + 0.032211803, + 0.04863422, + 0.08062527, + 0.022614514, + 0.0005379622, + -0.0015465368, + 0.010018953, + -0.089729026, + 0.023838207, + -0.015227461, + -0.020540234, + 0.08525423, + -0.08025672, + -0.002200058, + 0.0649954, + -0.023069935, + -0.06201302, + -0.06545048, + -0.029986514, + 0.0045501734, + 0.09718718, + 0.09153336, + -0.0059684636, + -0.048185453, + -0.011855243, + -0.03170323, + -0.010363732, + 0.029717747, + 0.103405535, + -0.029072085, + 0.005597891, + -0.03075466, + -0.011073092, + -0.038647823, + -0.01590583, + 0.0008562756, + 0.03479237, + 0.0039463183, + -0.020063022, + -0.048164852, + 0.026510539, + -0.061183933, + -0.046969693, + 0.02144617, + -0.048452575, + 0.02205527, + 0.015723849, + 0.056344535, + 0.055321235, + 0.037136998, + -0.08872732, + 0.011813868, + 0.0064246035, + -0.020590257, + -0.059401207, + 0.012338125, + -2.4301395e-33, + 0.068363585, + -0.05303797, + 0.011494271, + 0.06953355, + 0.013304427, + 0.0020351785, + -0.020783585, + 0.028951883, + 0.034663863, + -0.03274387, + 0.00095708756, + 0.008672852, + 0.007618213, + -0.024579093, + 0.030253874, + -0.034167152, + -0.0315152, + 0.1105276, + 0.03499844, + 0.045135163, + 0.00044455956, + 0.051429555, + 0.015050582, + -0.009024664, + 0.023132037, + 0.05141033, + -0.00417506, + 0.004720958, + -0.016197585, + -0.025692327, + -0.024077175, + -0.00953031, + 0.05060433, + -0.058328744, + 0.04903431, + 0.07964924, + 0.03599398, + -0.065374464, + -0.035382472, + -0.07028972, + -0.009750123, + -0.031909473, + -0.04101604, + -0.041144423, + -0.036323845, + 0.06685511, + 0.016679594, + -0.048498012, + -0.015474575, + -0.00048608257, + 0.03267068, + -0.010890426, + 0.016646467, + -0.057286758, + 0.008073807, + 0.008808943, + -0.061580453, + -0.010815387, + 0.0717443, + 0.08607838, + 0.014073375, + 0.014896061, + -0.098295614, + -0.046653833, + 0.033601493, + 0.0647405, + -0.007525925, + 0.025440095, + 0.04171436, + -0.033113986, + -0.014553822, + 0.024878975, + 0.045614205, + -0.042929318, + -0.040504646, + -0.06304663, + -0.022389242, + 0.010583584, + -0.032525852, + -0.03146621, + 0.0081922775, + 0.021094568, + 0.0095269885, + -0.08290188, + -0.021351986, + 0.008777032, + 0.060185786, + -0.062182017, + 0.004518251, + 0.05684528, + -0.013033095, + 0.01867297, + -0.008998785, + -0.076766245, + 0.051622886, + 1.6926977e-33, + -0.12588808, + 0.011676749, + -0.079886116, + 0.02304184, + 0.029238446, + 0.08721121, + 0.06906221, + 0.032533444, + 0.047794122, + 0.13212898, + 0.03129717, + -0.0125368, + 0.0035920327, + -0.016413208, + -0.038557872, + 0.016005918, + 0.09166447, + 0.047558285, + -0.054981478, + 0.06797876, + 0.017968502, + 0.118666455, + -0.069318265, + 0.043814093, + 0.04150938, + -0.017812226, + 0.051738504, + 0.06795029, + 0.080493495, + 0.005386888, + 0.08878265, + -0.036075104, + -0.07708273, + -0.09101018, + -0.09597232, + -0.0937606, + -0.06200779, + 0.06722552, + -0.0006647803, + 0.029067127, + 0.08179574, + -0.06488274, + -0.050375167, + -0.002403243, + -0.026110265, + -0.007630271, + 0.011972527, + -0.08573929, + 0.04107404, + 0.024723932, + -0.02222756, + -0.11560156, + 0.006753066, + -0.04589066, + -0.06369223, + 0.053635046, + 0.005769477, + 0.06325056, + 0.0048679966, + -0.057087842, + 0.041931894, + 0.022344982, + -0.14709935, + 0.026361033, + 0.106274396, + -0.0059068515, + 0.020035667, + 0.034950804, + -0.03342695, + -0.03884034, + -0.076072656, + -0.11173452, + -0.038953967, + -0.10270519, + 0.04714134, + -0.049391687, + 0.074747935, + 0.041724026, + -0.031083144, + 0.0033830043, + 0.055804495, + -0.031882074, + -0.02541756, + 0.050101582, + 0.035991114, + 0.09143438, + -0.07581111, + -0.050589707, + 0.0074097887, + -0.0014020415, + -0.05036443, + -0.0015289022, + 0.005471816, + 0.07689256, + 0.014164922, + -1.8297508e-08, + 0.029913928, + -0.057959806, + -0.06846765, + 0.026196472, + -0.0035178436, + 0.11374637, + 0.056845777, + -0.09315407, + 0.0027757618, + 0.10895455, + -0.033027817, + 0.005051668, + -0.043633904, + -0.048978273, + 0.011912417, + 0.059747256, + -0.08661686, + -0.052748058, + 0.026321623, + 0.042173225, + -0.0035451513, + 0.03797019, + 0.022595786, + -0.0614702, + 0.01268269, + 0.040893063, + -0.084825225, + 0.041167296, + -0.038163006, + 0.008364558, + 0.01014753, + 0.024994388, + -0.012504467, + -0.045078665, + 0.0102669485, + -0.046302866, + 0.061438397, + 0.016235871, + -0.0011558776, + 0.007455159, + -0.019448454, + -0.06798961, + 0.05472832, + 0.09646006, + -0.04711737, + 0.060088705, + 0.0030213061, + -0.08877283, + 0.037262574, + -0.009947699, + 0.0035697597, + -0.07833652, + 0.02169359, + -0.013075168, + 0.072521746, + -0.0649658, + -0.029920656, + -0.017777385, + 0.033904497, + 0.02919506, + 0.08793891, + 0.008437021, + 0.064442866, + -0.01656208 ], "index": 0, "object": "embedding" diff --git a/tests/integration/recordings/responses/5e8bf88b3c20.json b/tests/integration/recordings/responses/5e8bf88b3c20.json new file mode 100644 index 000000000..c47ffe8fb --- /dev/null +++ b/tests/integration/recordings/responses/5e8bf88b3c20.json @@ -0,0 +1,804 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "What is the boiling point of the liquid polyjuice in celsius?" + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_9wfu7bke", + "type": "function", + "function": { + "name": "get_boiling_point", + "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_9wfu7bke", + "content": "Error when running tool: 'ToolCall' object has no attribute 'arguments_json'" + } + ], + "max_tokens": 512, + "stream": true, + "temperature": 0.0001, + "tool_choice": "required", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": "I", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437824, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": " was", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437824, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": " unable", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437824, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": " to", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437824, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": " find", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437824, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": " the", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437824, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437824, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437824, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": " of", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437824, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": " liquid", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437824, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": " poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437824, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437824, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437824, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": " in", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437824, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": " Celsius", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437824, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437824, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": " The", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437825, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437825, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437825, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": " could", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437825, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": " not", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437825, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": " be", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437825, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": " located", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437825, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": " in", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437825, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": " my", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437825, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": " database", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437825, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437825, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-988", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759437825, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/63aa4590a38a.json b/tests/integration/recordings/responses/63aa4590a38a.json index 9e3b275db..ae20dce36 100644 --- a/tests/integration/recordings/responses/63aa4590a38a.json +++ b/tests/integration/recordings/responses/63aa4590a38a.json @@ -19,390 +19,390 @@ "data": [ { "embedding": [ - 0.043770123, - 0.021501394, - -0.081300564, - 0.010615138, - -0.07908651, - -0.03219175, - 0.13090447, - 0.042329222, - -0.11600146, - -0.07588096, - 0.041826088, - -0.080617175, - 0.038125783, - -0.01069657, - 0.01577377, - -0.04196888, - 0.043099895, - -0.033355612, - 0.013571747, - -0.0103924, - 0.015561896, - -0.03786113, - -0.050319925, - -0.02566629, - -0.047868017, - -0.08717805, - 0.01685358, - -0.03676223, - 0.0063788705, - 0.020863743, - 0.11264443, - -0.0021451844, - -0.07911777, - 0.038758967, - 0.115321144, - -0.019753717, - 0.0067159277, - -0.02115779, - -0.0144774495, - -0.0027154125, - -0.034384295, - -0.052576542, - -0.030578543, - 0.04745372, - -0.024294367, - 0.01091144, - -0.03947583, - 0.07183755, - -0.020715859, - 0.018965777, - 0.04292474, - -0.007755194, - 0.0025708016, - -0.058263537, - 0.0117485095, - -0.022703577, - 0.001755438, - -0.012628832, - 0.030728007, - 0.017719304, - -0.061525322, - -0.036568273, - 0.025831668, - 0.025376469, - 0.012137967, - 0.009102949, - -0.027313529, - -0.093379095, - 0.0052120173, - 0.0074658697, - -0.07538, - 0.010161349, - -0.028439516, - 0.03026334, - 0.0036700817, - -0.022599109, - -0.037862476, - -0.08384314, - -0.0124443015, - -0.048889726, - 0.029131662, - -0.044443335, - -0.07518736, - -0.020938978, - 0.063386515, - 0.16294138, - 0.060580015, - -0.01281573, - -0.031040885, - 0.018372353, - 0.11225789, - 0.072922915, - -0.06272038, - -0.031792488, - -0.017476005, - 0.04846264, - -0.04116229, - -0.041834168, - -0.059919056, - 0.15907861, - -0.027786179, - -0.012492541, - 0.05599519, - -0.019895995, - 0.022076221, - 0.006363836, - 0.046413723, - -0.0731325, - 0.03326452, - 0.059475966, - -0.033314705, - 0.030761855, - 0.00819013, - -0.020254606, - 0.05658313, - -0.08153619, - 0.023402533, - 0.0060753864, - -0.07993489, - 0.013990512, - 0.052254565, - 0.027170746, - -0.049271967, - 0.02814688, - 0.019500777, - 0.054206643, - 0.082691684, - -1.8817448e-33, - 0.013630832, - -0.010863344, - 0.015899567, - 0.06938339, - -0.05113185, - 0.08995833, - 0.04450505, - 0.08101549, - 0.018903807, - -0.020960161, - -0.017933648, - -0.02174221, - 0.010988686, - 0.015100026, - 0.017031211, - 0.09433042, - 0.003454907, - 0.010199729, - -0.0446973, - 0.0018167854, - 0.015817188, - -0.06576281, - -0.004943305, - 0.004393494, - -0.019598262, - -0.092797264, - -0.025917865, - 0.04409669, - 0.054165967, - -0.007365383, - -0.021470547, - -0.03683317, - -0.091507494, - 0.08402351, - -0.01809901, - 0.0038072586, - 0.020236026, - 0.0439697, - -0.077322714, - 0.0057473024, - -0.054513566, - -0.024854423, - 0.075270385, - 0.034554463, - -0.08118007, - -0.12208905, - -0.0052893, - 0.0078005046, - 0.05028763, - 0.015558154, - -0.056349996, - 0.0398076, - 0.012997719, - -0.040145177, - 0.014409028, - -0.033200737, - -0.008437484, - -0.037582297, - -0.019651853, - 0.017285295, - -0.008976723, - -0.0018494898, - -0.0030671947, - 0.03046138, - -0.051143825, - -0.08688155, - -0.018344227, - -0.113307714, - 0.073259674, - 0.04602224, - 0.012651309, - -0.063435435, - -0.028471926, - 0.020155901, - -0.078830436, - -0.00069818215, - -0.03156303, - 0.123062745, - 0.0042949035, - -0.026413191, - 0.07838535, - -0.07747411, - -0.02126005, - 0.048919026, - 0.02919413, - -0.009296978, - -0.030687347, - -0.041037664, - -0.038565576, - -0.08043238, - 0.023225678, - 0.041928973, - -0.05812511, - 0.058555346, - 0.07633673, - 4.4510456e-34, - -0.019582625, - 0.040237214, - 0.01455587, - 0.034353998, - 0.043911777, - -0.023234777, - 0.0677493, - -0.030089214, - -0.09076478, - -0.019257858, - -0.02767876, - -0.00065146026, - 0.0043030144, - 0.05363546, - 0.04073387, - 0.03255476, - -0.10712685, - -0.050083157, - -0.016644027, - -0.0077649173, - -0.11153465, - 0.07478277, - -0.015999233, - -0.050547555, - -0.113217294, - -0.006174145, - 0.050873067, - -0.030284155, - 0.04314861, - 0.033020362, - 0.023671353, - 0.04654029, - -0.03415647, - 0.03614603, - 0.023047049, - -0.02677317, - 0.063607745, - 0.09978129, - 0.03527302, - 0.15538219, - 0.08349002, - 0.10931568, - 0.04684532, - -0.010147538, - -0.03256112, - 0.12924333, - 0.031221064, - -0.099673584, - 0.010860566, - 0.02326085, - -0.011916549, - 0.010135849, - 0.06884636, - 0.009350001, - -0.0226591, - -0.04280281, - -0.04821317, - -0.08508304, - 0.051028382, - 0.045148462, - -0.03566162, - 0.06547104, - 0.048883036, - 0.03793435, - -0.1407055, - -0.06711337, - 0.009881868, - -0.0049659596, - -0.044289522, - 0.0039236215, - -0.02692826, - -0.066134326, - 0.04076233, - -0.05222117, - 0.060488354, - -0.04113724, - -0.04314174, - -0.025147837, - 0.085597694, - -0.044939328, - 0.06395307, - -0.024218159, - -0.050523587, - -0.0020718095, - -0.07894165, - 0.0026805927, - 0.020709056, - 0.1026727, - -0.012374822, - 0.056179732, - 0.06552235, - 0.030915475, - -0.077197015, - -0.061245024, - -0.016111895, - -1.3512232e-08, - -0.05040501, - -0.033646606, - 0.04670903, - 0.047397695, - -0.044165645, - 0.046301767, - -0.006073457, - -0.053902794, - 0.013089125, - 0.050438043, - -0.009894958, - -0.0041677835, - 0.0723306, - 0.021069802, - 0.02670403, - -0.074845195, - -0.026750853, - 0.052738186, - -0.03469103, - 0.039813705, - -0.01640883, - 0.045899663, - -0.0224731, - 0.02387658, - 0.049145795, - 0.09110705, - -0.0025007618, - 0.04937552, - -0.03864697, - 0.020868128, - 0.07605537, - 0.08488945, - -0.05197299, - -0.06879239, - -0.06136516, - 0.077237174, - -0.06451729, - 0.04453416, - 0.008209786, - 0.015886698, - -0.04280691, - 0.005315579, - 0.0034463098, - 0.0031776188, - -0.013040836, - -0.091359615, - 0.0642767, - -0.054965723, - 0.0007161393, - -0.06260912, - -0.03496602, - -0.029944083, - 0.04422821, - 0.017855663, - -0.027972128, - -0.03656317, - 0.02111413, - 0.060607255, - -0.031320468, - -0.014338154, - 0.034649797, - 0.052279983, - -0.036579564, - 0.028179456 + 0.043779343, + 0.021533398, + -0.081306435, + 0.010584965, + -0.079082854, + -0.03219143, + 0.13092613, + 0.04234389, + -0.11600539, + -0.07588513, + 0.04182356, + -0.08061255, + 0.038127176, + -0.010701234, + 0.015768763, + -0.04193689, + 0.04310592, + -0.033361685, + 0.013566423, + -0.010392366, + 0.015551022, + -0.037858423, + -0.050305344, + -0.025666261, + -0.047879875, + -0.087179765, + 0.016856788, + -0.036765736, + 0.006393739, + 0.020844297, + 0.11262393, + -0.002143682, + -0.07910913, + 0.038748607, + 0.11532516, + -0.019759571, + 0.0066967797, + -0.021164352, + -0.014471563, + -0.0027048697, + -0.034388524, + -0.052571636, + -0.030607725, + 0.04747725, + -0.02431059, + 0.0109337615, + -0.03946421, + 0.071846664, + -0.020690937, + 0.01898796, + 0.042931512, + -0.0077551426, + 0.0025911122, + -0.058268107, + 0.0117475465, + -0.022701943, + 0.0017815019, + -0.012612941, + 0.030724185, + 0.017728312, + -0.06155491, + -0.03656162, + 0.02583153, + 0.02537894, + 0.012139213, + 0.009105951, + -0.027318193, + -0.093389414, + 0.005184693, + 0.007488449, + -0.07540277, + 0.010159999, + -0.028444426, + 0.030260745, + 0.0036438918, + -0.022627153, + -0.037846327, + -0.08381657, + -0.012445195, + -0.048908208, + 0.029149827, + -0.044437535, + -0.07520237, + -0.020924438, + 0.06342514, + 0.1629199, + 0.060563333, + -0.012817673, + -0.031030292, + 0.018368995, + 0.11223112, + 0.07292473, + -0.062686674, + -0.031803295, + -0.017489262, + 0.048433464, + -0.041148387, + -0.04183779, + -0.05994369, + 0.15909556, + -0.027785666, + -0.012455991, + 0.056005318, + -0.019891974, + 0.022063067, + 0.006342065, + 0.0464118, + -0.07311654, + 0.033282198, + 0.05949105, + -0.033307947, + 0.030738499, + 0.008186239, + -0.020268966, + 0.056593496, + -0.081526734, + 0.023390312, + 0.0060836566, + -0.07992586, + 0.013986445, + 0.052250065, + 0.027186505, + -0.049284942, + 0.028148174, + 0.019493744, + 0.05418436, + 0.0827222, + -1.8825437e-33, + 0.01360945, + -0.010870715, + 0.015887791, + 0.069373555, + -0.051129147, + 0.08999179, + 0.044494778, + 0.08100757, + 0.018944906, + -0.020974122, + -0.017938385, + -0.021756735, + 0.010972489, + 0.015099965, + 0.017018452, + 0.094338946, + 0.0034407445, + 0.010244923, + -0.044709302, + 0.0018059182, + 0.015817573, + -0.065777056, + -0.004948138, + 0.0044092103, + -0.019589791, + -0.092789896, + -0.025898295, + 0.044104066, + 0.0541385, + -0.007362511, + -0.021487307, + -0.036836285, + -0.09148704, + 0.084001675, + -0.018094191, + 0.003797567, + 0.020257449, + 0.04394643, + -0.0772898, + 0.0057312953, + -0.054519102, + -0.024835315, + 0.0753162, + 0.034552757, + -0.081203006, + -0.12210961, + -0.0053012627, + 0.00780717, + 0.050265096, + 0.015569535, + -0.056362487, + 0.039800324, + 0.013022089, + -0.04015537, + 0.014401654, + -0.033209093, + -0.008451782, + -0.037590392, + -0.01965779, + 0.01730637, + -0.00896531, + -0.0018413392, + -0.0030382746, + 0.030460354, + -0.05112036, + -0.086875, + -0.018338922, + -0.11328767, + 0.07325826, + 0.046035297, + 0.012633494, + -0.06343216, + -0.028439038, + 0.020128354, + -0.07883383, + -0.00069870794, + -0.03155447, + 0.12306934, + 0.004300722, + -0.026421167, + 0.078361824, + -0.077461444, + -0.021267027, + 0.048929654, + 0.02919381, + -0.0092880055, + -0.030666346, + -0.04102384, + -0.03860138, + -0.08042292, + 0.023227168, + 0.04191858, + -0.058156747, + 0.0585743, + 0.076342255, + 4.465569e-34, + -0.019599343, + 0.040230304, + 0.01455632, + 0.034345042, + 0.04392999, + -0.023241352, + 0.067749046, + -0.03010354, + -0.09075954, + -0.019227842, + -0.027724287, + -0.00062344945, + 0.0042892746, + 0.053643614, + 0.04075099, + 0.032581333, + -0.107116826, + -0.0500636, + -0.016655827, + -0.007782394, + -0.111523, + 0.07476429, + -0.016019335, + -0.050536986, + -0.11320647, + -0.0061384854, + 0.050886273, + -0.030283457, + 0.04318923, + 0.03301474, + 0.02362771, + 0.046507858, + -0.03416386, + 0.036145207, + 0.023037339, + -0.026803765, + 0.06361122, + 0.09975251, + 0.035269737, + 0.1554014, + 0.083479255, + 0.10931981, + 0.046847064, + -0.010136355, + -0.032541983, + 0.12926093, + 0.031193413, + -0.09971323, + 0.010830718, + 0.02325219, + -0.011917061, + 0.010155018, + 0.06883269, + 0.009340846, + -0.022698723, + -0.042815465, + -0.048211087, + -0.085067384, + 0.05105234, + 0.045155898, + -0.03564869, + 0.06549556, + 0.048875004, + 0.037915554, + -0.14071068, + -0.067095764, + 0.009898252, + -0.0049653547, + -0.044304688, + 0.0039006064, + -0.026903173, + -0.066124685, + 0.040738244, + -0.052228633, + 0.060485654, + -0.041119356, + -0.04312945, + -0.025152665, + 0.08556276, + -0.044942576, + 0.06393979, + -0.024227533, + -0.05052092, + -0.0020624825, + -0.078943975, + 0.0026753, + 0.02068896, + 0.102683865, + -0.01237572, + 0.056172684, + 0.06552171, + 0.030940128, + -0.07721113, + -0.061241012, + -0.016143149, + -1.3511957e-08, + -0.050416306, + -0.033628013, + 0.046722032, + 0.04744138, + -0.04411888, + 0.04631675, + -0.0060847937, + -0.053873356, + 0.013075445, + 0.050437532, + -0.009895477, + -0.0041795173, + 0.07229928, + 0.021081135, + 0.02672776, + -0.07482113, + -0.026757998, + 0.052755926, + -0.034690056, + 0.039811596, + -0.016370349, + 0.045900222, + -0.02250936, + 0.023861, + 0.04912799, + 0.09111738, + -0.0024878879, + 0.049395334, + -0.03861115, + 0.020867983, + 0.076049894, + 0.084881924, + -0.051956687, + -0.06878504, + -0.061384037, + 0.077220954, + -0.06454818, + 0.044513144, + 0.008181126, + 0.015890416, + -0.04280811, + 0.005317184, + 0.0034429359, + 0.0031937633, + -0.013058055, + -0.09134677, + 0.06425565, + -0.054977305, + 0.0007087448, + -0.06258866, + -0.034974415, + -0.029966963, + 0.044276785, + 0.017868131, + -0.027976807, + -0.036579583, + 0.021142753, + 0.06057356, + -0.03133335, + -0.014331035, + 0.034653842, + 0.052315667, + -0.036585484, + 0.028209662 ], "index": 0, "object": "embedding" diff --git a/tests/integration/recordings/responses/6412295819a1.json b/tests/integration/recordings/responses/6412295819a1.json index 728380b02..2333176ea 100644 --- a/tests/integration/recordings/responses/6412295819a1.json +++ b/tests/integration/recordings/responses/6412295819a1.json @@ -16,23 +16,23 @@ "body": { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-104", + "id": "cmpl-865", "choices": [ { "finish_reason": "stop", "index": 0, "logprobs": null, - "text": "blue.\n\nI completed the sentence with \"blue\" because it is a common completion used to complete the traditional nursery rhyme, which ends with:\n\nRoses are red,\nViolets are blue.\n\nThe complete rhyme is often remembered and recited as follows:\n\nRoses are red,\nViolets are blue,\nSugar is sweet,\nAnd so are you!" + "text": "Blue.\n\nMy answer is \"blue\" because it's a classic completion of the traditional nursery rhyme poem:\n\n\"Roses are red, violets are blue\"\n\nThis sentiment suggests that an unseen suitor from the first half of the line has given or will give the speaker roses." } ], - "created": 1757857132, + "created": 1759441353, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", "usage": { - "completion_tokens": 72, + "completion_tokens": 58, "prompt_tokens": 50, - "total_tokens": 122, + "total_tokens": 108, "completion_tokens_details": null, "prompt_tokens_details": null } diff --git a/tests/integration/recordings/responses/6540a315ea8e.json b/tests/integration/recordings/responses/6540a315ea8e.json new file mode 100644 index 000000000..68b7c0a21 --- /dev/null +++ b/tests/integration/recordings/responses/6540a315ea8e.json @@ -0,0 +1,119 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "Call get_boiling_point tool and answer What is the boiling point of polyjuice?" + } + ], + "max_tokens": 512, + "stream": true, + "temperature": 0.0001, + "tool_choice": "auto", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-545", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": [ + { + "index": 0, + "id": "call_d1i5ou69", + "function": { + "arguments": "{\"celcius\":null,\"liquid_name\":\"polyjuice\"}", + "name": "get_boiling_point" + }, + "type": "function" + } + ] + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441675, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-545", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "tool_calls", + "index": 0, + "logprobs": null + } + ], + "created": 1759441675, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/65c12de0a1db.json b/tests/integration/recordings/responses/65c12de0a1db.json index e1c0fb8fc..31f88271d 100644 --- a/tests/integration/recordings/responses/65c12de0a1db.json +++ b/tests/integration/recordings/responses/65c12de0a1db.json @@ -24,14 +24,14 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-123", + "id": "chatcmpl-528", "choices": [ { "finish_reason": "stop", "index": 0, "logprobs": null, "message": { - "content": "Hello! As of my knowledge cutoff on December 15th, I have the latest information for you. However, please note that my data may not be entirely up-to-date.\n\nCurrently, and based on historical climate patterns, it appears to be a partly cloudy day with mild temperatures in San Francisco, CA. Expect a temperature range of around 48\u00b0F (9\u00b0C) to 54\u00b0F (12\u00b0C). It's likely to be a breezy day, with winds blowing at about 13 mph (21 km/h).\n\nHowever, if I were to look into more recent weather patterns or forecasts, I would recommend checking the latest conditions directly from reliable sources such as the National Weather Service or local news outlets for more accurate and up-to-date information.\n\nPlease let me know how I can further assist you.", + "content": "I can give you a general idea of the typical weather conditions in San Francisco during this time.\n\nUnfortunately, I'm not aware of your current location or date. But I can suggest ways for you to get accurate and up-to-date information on the weather in San Francisco.\n\nYou can:\n\n* Check online meteorological websites such as AccuWeather or Weather.com for current conditions and forecasts.\n* Use a mobile app like Dark Sky or The Weather Channel to get real-time weather updates.\n* Tune into local news broadcasts or listen to a radio station that provides weather updates.\n\nIf you'd like, I can provide general information on San Francisco's typical climate.", "refusal": null, "role": "assistant", "annotations": null, @@ -41,15 +41,15 @@ } } ], - "created": 1758978071, + "created": 1759376616, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion", "service_tier": null, "system_fingerprint": "fp_ollama", "usage": { - "completion_tokens": 163, + "completion_tokens": 131, "prompt_tokens": 45, - "total_tokens": 208, + "total_tokens": 176, "completion_tokens_details": null, "prompt_tokens_details": null } diff --git a/tests/integration/recordings/responses/67f94c4f8ba0.json b/tests/integration/recordings/responses/67f94c4f8ba0.json index cd8ad4f35..f4b36af9a 100644 --- a/tests/integration/recordings/responses/67f94c4f8ba0.json +++ b/tests/integration/recordings/responses/67f94c4f8ba0.json @@ -28,7 +28,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -43,7 +43,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441668, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -54,7 +54,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -69,7 +69,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441668, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -80,7 +80,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -95,7 +95,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441668, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -106,7 +106,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -121,7 +121,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441668, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -132,7 +132,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -147,7 +147,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441668, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -158,7 +158,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -173,7 +173,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441668, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -184,7 +184,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -199,7 +199,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441668, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -210,7 +210,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -225,7 +225,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441668, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -236,7 +236,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -251,7 +251,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441668, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -262,7 +262,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -277,7 +277,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441668, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -288,7 +288,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -303,7 +303,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441668, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -314,7 +314,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -329,7 +329,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441668, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -340,7 +340,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -355,7 +355,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441668, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -366,7 +366,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -381,7 +381,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441668, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -392,7 +392,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -407,7 +407,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441668, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -418,7 +418,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -433,7 +433,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441668, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -444,7 +444,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -459,7 +459,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441668, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -470,7 +470,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -485,7 +485,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441668, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -496,7 +496,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -511,7 +511,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441668, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -522,7 +522,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -537,7 +537,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441668, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -548,7 +548,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -563,7 +563,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441669, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -574,7 +574,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -589,7 +589,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441669, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -600,7 +600,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -615,7 +615,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441669, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -626,7 +626,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -641,7 +641,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441669, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -652,7 +652,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -667,7 +667,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441669, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -678,7 +678,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -693,7 +693,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441669, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -704,7 +704,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -719,7 +719,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441669, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -730,7 +730,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -745,7 +745,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441669, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -756,7 +756,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -771,7 +771,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441669, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -782,7 +782,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -797,7 +797,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441669, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -808,7 +808,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -823,7 +823,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441669, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -834,7 +834,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -849,7 +849,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441669, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -860,7 +860,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -875,7 +875,7 @@ "logprobs": null } ], - "created": 1759427020, + "created": 1759441669, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -886,7 +886,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -901,7 +901,7 @@ "logprobs": null } ], - "created": 1759427021, + "created": 1759441669, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -912,7 +912,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -927,7 +927,7 @@ "logprobs": null } ], - "created": 1759427021, + "created": 1759441669, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -938,7 +938,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -953,7 +953,7 @@ "logprobs": null } ], - "created": 1759427021, + "created": 1759441669, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -964,7 +964,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -979,7 +979,7 @@ "logprobs": null } ], - "created": 1759427021, + "created": 1759441669, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -990,7 +990,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -1005,7 +1005,7 @@ "logprobs": null } ], - "created": 1759427021, + "created": 1759441669, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1016,7 +1016,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -1031,7 +1031,7 @@ "logprobs": null } ], - "created": 1759427021, + "created": 1759441669, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1042,7 +1042,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -1057,7 +1057,7 @@ "logprobs": null } ], - "created": 1759427021, + "created": 1759441669, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1068,7 +1068,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -1083,7 +1083,7 @@ "logprobs": null } ], - "created": 1759427021, + "created": 1759441669, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1094,7 +1094,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -1109,7 +1109,7 @@ "logprobs": null } ], - "created": 1759427021, + "created": 1759441669, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1120,7 +1120,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -1135,7 +1135,7 @@ "logprobs": null } ], - "created": 1759427021, + "created": 1759441669, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1146,7 +1146,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -1161,7 +1161,7 @@ "logprobs": null } ], - "created": 1759427021, + "created": 1759441669, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1172,7 +1172,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -1187,7 +1187,7 @@ "logprobs": null } ], - "created": 1759427021, + "created": 1759441669, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1198,7 +1198,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -1213,7 +1213,7 @@ "logprobs": null } ], - "created": 1759427021, + "created": 1759441670, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1224,7 +1224,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -1239,7 +1239,7 @@ "logprobs": null } ], - "created": 1759427021, + "created": 1759441670, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1250,7 +1250,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -1265,7 +1265,7 @@ "logprobs": null } ], - "created": 1759427021, + "created": 1759441670, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1276,7 +1276,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -1291,7 +1291,7 @@ "logprobs": null } ], - "created": 1759427021, + "created": 1759441670, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1302,7 +1302,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -1317,7 +1317,7 @@ "logprobs": null } ], - "created": 1759427021, + "created": 1759441670, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1328,7 +1328,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -1343,7 +1343,7 @@ "logprobs": null } ], - "created": 1759427021, + "created": 1759441670, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1354,7 +1354,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -1369,7 +1369,7 @@ "logprobs": null } ], - "created": 1759427021, + "created": 1759441670, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1380,7 +1380,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -1395,7 +1395,7 @@ "logprobs": null } ], - "created": 1759427021, + "created": 1759441670, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1406,7 +1406,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -1421,7 +1421,7 @@ "logprobs": null } ], - "created": 1759427021, + "created": 1759441670, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1432,7 +1432,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -1447,7 +1447,7 @@ "logprobs": null } ], - "created": 1759427021, + "created": 1759441670, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1458,7 +1458,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -1473,7 +1473,7 @@ "logprobs": null } ], - "created": 1759427021, + "created": 1759441670, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1484,7 +1484,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-932", + "id": "chatcmpl-681", "choices": [ { "delta": { @@ -1499,7 +1499,7 @@ "logprobs": null } ], - "created": 1759427021, + "created": 1759441670, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, diff --git a/tests/integration/recordings/responses/6b3e593ad9b8.json b/tests/integration/recordings/responses/6b3e593ad9b8.json index e5a85eb3d..ccb1d0101 100644 --- a/tests/integration/recordings/responses/6b3e593ad9b8.json +++ b/tests/integration/recordings/responses/6b3e593ad9b8.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-819", + "id": "chatcmpl-642", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759282466, + "created": 1759441159, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/6f90277933e2.json b/tests/integration/recordings/responses/6f90277933e2.json new file mode 100644 index 000000000..f1d08a5c6 --- /dev/null +++ b/tests/integration/recordings/responses/6f90277933e2.json @@ -0,0 +1,419 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "What is the boiling point of the liquid polyjuice in celsius?" + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_qv279qx8", + "type": "function", + "function": { + "name": "get_boiling_point", + "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_qv279qx8", + "content": "-100" + } + ], + "max_tokens": 0, + "stream": true, + "temperature": 0.0001, + "tool_choice": { + "type": "function", + "function": { + "name": "get_boiling_point" + } + }, + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-790", + "choices": [ + { + "delta": { + "content": "The", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759428002, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-790", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759428002, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-790", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759428002, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-790", + "choices": [ + { + "delta": { + "content": " of", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759428002, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-790", + "choices": [ + { + "delta": { + "content": " Poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759428002, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-790", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759428002, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-790", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759428002, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-790", + "choices": [ + { + "delta": { + "content": " is", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759428002, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-790", + "choices": [ + { + "delta": { + "content": " -", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759428002, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-790", + "choices": [ + { + "delta": { + "content": "100", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759428003, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-790", + "choices": [ + { + "delta": { + "content": "\u00b0C", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759428003, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-790", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759428003, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-790", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759428003, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/6f96090aa955.json b/tests/integration/recordings/responses/6f96090aa955.json index d0ac20442..67628bf51 100644 --- a/tests/integration/recordings/responses/6f96090aa955.json +++ b/tests/integration/recordings/responses/6f96090aa955.json @@ -21,7 +21,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-698", + "id": "chatcmpl-456", "choices": [ { "delta": { @@ -36,7 +36,7 @@ "logprobs": null } ], - "created": 1756921359, + "created": 1759437880, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -47,7 +47,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-698", + "id": "chatcmpl-456", "choices": [ { "delta": { @@ -62,7 +62,7 @@ "logprobs": null } ], - "created": 1756921359, + "created": 1759437880, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -73,11 +73,11 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-698", + "id": "chatcmpl-456", "choices": [ { "delta": { - "content": " It", + "content": " How", "function_call": null, "refusal": null, "role": "assistant", @@ -88,7 +88,7 @@ "logprobs": null } ], - "created": 1756921359, + "created": 1759437880, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -99,267 +99,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-698", - "choices": [ - { - "delta": { - "content": "'s", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921359, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-698", - "choices": [ - { - "delta": { - "content": " nice", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921359, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-698", - "choices": [ - { - "delta": { - "content": " to", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921359, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-698", - "choices": [ - { - "delta": { - "content": " meet", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921359, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-698", - "choices": [ - { - "delta": { - "content": " you", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921359, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-698", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921359, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-698", - "choices": [ - { - "delta": { - "content": " Is", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921359, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-698", - "choices": [ - { - "delta": { - "content": " there", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921359, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-698", - "choices": [ - { - "delta": { - "content": " something", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921359, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-698", - "choices": [ - { - "delta": { - "content": " I", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921359, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-698", + "id": "chatcmpl-456", "choices": [ { "delta": { @@ -374,7 +114,7 @@ "logprobs": null } ], - "created": 1756921359, + "created": 1759437880, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -385,11 +125,11 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-698", + "id": "chatcmpl-456", "choices": [ { "delta": { - "content": " help", + "content": " I", "function_call": null, "refusal": null, "role": "assistant", @@ -400,7 +140,7 @@ "logprobs": null } ], - "created": 1756921359, + "created": 1759437881, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -411,7 +151,33 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-698", + "id": "chatcmpl-456", + "choices": [ + { + "delta": { + "content": " assist", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437881, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-456", "choices": [ { "delta": { @@ -426,7 +192,7 @@ "logprobs": null } ], - "created": 1756921359, + "created": 1759437881, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -437,11 +203,11 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-698", + "id": "chatcmpl-456", "choices": [ { "delta": { - "content": " with", + "content": " today", "function_call": null, "refusal": null, "role": "assistant", @@ -452,7 +218,7 @@ "logprobs": null } ], - "created": 1756921359, + "created": 1759437881, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -463,163 +229,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-698", - "choices": [ - { - "delta": { - "content": " or", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921359, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-698", - "choices": [ - { - "delta": { - "content": " would", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921359, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-698", - "choices": [ - { - "delta": { - "content": " you", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921360, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-698", - "choices": [ - { - "delta": { - "content": " like", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921360, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-698", - "choices": [ - { - "delta": { - "content": " to", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921360, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-698", - "choices": [ - { - "delta": { - "content": " chat", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921360, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-698", + "id": "chatcmpl-456", "choices": [ { "delta": { @@ -634,7 +244,7 @@ "logprobs": null } ], - "created": 1756921360, + "created": 1759437881, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -645,7 +255,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-698", + "id": "chatcmpl-456", "choices": [ { "delta": { @@ -660,7 +270,7 @@ "logprobs": null } ], - "created": 1756921360, + "created": 1759437881, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, diff --git a/tests/integration/recordings/responses/71c9c6746a31.json b/tests/integration/recordings/responses/71c9c6746a31.json new file mode 100644 index 000000000..132606068 --- /dev/null +++ b/tests/integration/recordings/responses/71c9c6746a31.json @@ -0,0 +1,809 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "What is the boiling point of the liquid polyjuice in celsius?" + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_pm9dfvfk", + "type": "function", + "function": { + "name": "get_boiling_point", + "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_pm9dfvfk", + "content": "Error when running tool: 'ToolCall' object has no attribute 'arguments_json'" + } + ], + "max_tokens": 512, + "stream": true, + "temperature": 0.0001, + "tool_choice": { + "type": "function", + "function": { + "name": "get_boiling_point" + } + }, + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": "I", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437832, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": " was", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437832, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": " unable", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437832, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": " to", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437832, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": " find", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437832, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": " the", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437832, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437832, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437832, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": " of", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437832, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": " liquid", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437832, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": " poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437832, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437832, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437832, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": " in", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437832, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": " Celsius", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437832, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437832, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": " The", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437833, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437833, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437833, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": " could", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437833, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": " not", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437833, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": " be", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437833, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": " located", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437833, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": " in", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437833, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": " my", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437833, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": " database", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437833, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437833, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-495", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759437833, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/771131fb4c46.json b/tests/integration/recordings/responses/771131fb4c46.json index e3501541e..0a1447690 100644 --- a/tests/integration/recordings/responses/771131fb4c46.json +++ b/tests/integration/recordings/responses/771131fb4c46.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-220", + "id": "chatcmpl-55", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759245122, + "created": 1759437798, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/7a047bcf8b19.json b/tests/integration/recordings/responses/7a047bcf8b19.json index 7cd6c3f7c..73b948a10 100644 --- a/tests/integration/recordings/responses/7a047bcf8b19.json +++ b/tests/integration/recordings/responses/7a047bcf8b19.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-737", + "id": "chatcmpl-652", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759282582, + "created": 1759441673, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/7c57049fc13f.json b/tests/integration/recordings/responses/7c57049fc13f.json new file mode 100644 index 000000000..08c1c20d2 --- /dev/null +++ b/tests/integration/recordings/responses/7c57049fc13f.json @@ -0,0 +1,57 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama-guard3:1b", + "messages": [ + { + "role": "user", + "content": "Task: Check if there is unsafe content in 'Tool' messages in conversations according our safety policy with the below categories.\n\n\nS1: Violent Crimes.\nS2: Non-Violent Crimes.\nS3: Sex Crimes.\nS4: Child Exploitation.\nS5: Defamation.\nS6: Specialized Advice.\nS7: Privacy.\nS8: Intellectual Property.\nS9: Indiscriminate Weapons.\nS10: Hate.\nS11: Self-Harm.\nS12: Sexual Content.\nS13: Elections.\nS14: Code Interpreter Abuse.\n\n\n\n\nUser: Get the boiling point of polyjuice with a tool call.\n\nAssistant: \n\nTool: Error when running tool: 'ToolCall' object has no attribute 'arguments_json'\n\n\n\nProvide your safety assessment for ONLY THE LAST Tool message in the above conversation:\n - First line must read 'safe' or 'unsafe'.\n - If unsafe, a second line must include a comma-separated list of violated categories." + } + ], + "stream": false, + "temperature": 0.0 + }, + "endpoint": "/v1/chat/completions", + "model": "llama-guard3:1b" + }, + "response": { + "body": { + "__type__": "openai.types.chat.chat_completion.ChatCompletion", + "__data__": { + "id": "chatcmpl-906", + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "safe", + "refusal": null, + "role": "assistant", + "annotations": null, + "audio": null, + "function_call": null, + "tool_calls": null + } + } + ], + "created": 1759437819, + "model": "llama-guard3:1b", + "object": "chat.completion", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": { + "completion_tokens": 2, + "prompt_tokens": 418, + "total_tokens": 420, + "completion_tokens_details": null, + "prompt_tokens_details": null + } + } + }, + "is_streaming": false + } +} diff --git a/tests/integration/recordings/responses/7d089a973e08.json b/tests/integration/recordings/responses/7d089a973e08.json new file mode 100644 index 000000000..93157f645 --- /dev/null +++ b/tests/integration/recordings/responses/7d089a973e08.json @@ -0,0 +1,804 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "What is the boiling point of the liquid polyjuice in celsius?" + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_kg9401ss", + "type": "function", + "function": { + "name": "get_boiling_point", + "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_kg9401ss", + "content": "Error when running tool: 'ToolCall' object has no attribute 'arguments_json'" + } + ], + "max_tokens": 512, + "stream": true, + "temperature": 0.0001, + "tool_choice": "auto", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": "I", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437814, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": " was", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437814, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": " unable", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437814, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": " to", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437814, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": " find", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437814, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": " the", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437815, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437815, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437815, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": " of", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437815, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": " liquid", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437815, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": " poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437815, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437815, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437815, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": " in", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437815, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": " Celsius", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437815, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437815, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": " The", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437815, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437815, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437815, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": " could", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437815, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": " not", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437815, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": " be", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437815, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": " located", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437815, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": " in", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437815, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": " my", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437815, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": " database", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437815, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437815, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-212", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759437815, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/7e4bdf20925c.json b/tests/integration/recordings/responses/7e4bdf20925c.json new file mode 100644 index 000000000..4e76fdd3c --- /dev/null +++ b/tests/integration/recordings/responses/7e4bdf20925c.json @@ -0,0 +1,124 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "What is the boiling point of the liquid polyjuice in celsius?" + } + ], + "max_tokens": 512, + "stream": true, + "temperature": 0.0001, + "tool_choice": { + "type": "function", + "function": { + "name": "get_boiling_point" + } + }, + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-366", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": [ + { + "index": 0, + "id": "call_k3oc5cxw", + "function": { + "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}", + "name": "get_boiling_point" + }, + "type": "function" + } + ] + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441672, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-366", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "tool_calls", + "index": 0, + "logprobs": null + } + ], + "created": 1759441672, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/7fc8b6ca483d.json b/tests/integration/recordings/responses/7fc8b6ca483d.json new file mode 100644 index 000000000..5e6d7f57b --- /dev/null +++ b/tests/integration/recordings/responses/7fc8b6ca483d.json @@ -0,0 +1,57 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama-guard3:1b", + "messages": [ + { + "role": "user", + "content": "Task: Check if there is unsafe content in 'Assistant' messages in conversations according our safety policy with the below categories.\n\n\nS1: Violent Crimes.\nS2: Non-Violent Crimes.\nS3: Sex Crimes.\nS4: Child Exploitation.\nS5: Defamation.\nS6: Specialized Advice.\nS7: Privacy.\nS8: Intellectual Property.\nS9: Indiscriminate Weapons.\nS10: Hate.\nS11: Self-Harm.\nS12: Sexual Content.\nS13: Elections.\nS14: Code Interpreter Abuse.\n\n\n\n\nUser: Get the boiling point of polyjuice with a tool call.\n\nAssistant: \n\nTool: Error when running tool: 'ToolCall' object has no attribute 'arguments_json'\n\nAssistant: I apologize for the error. Here is the revised tool call:\n\n{\"name\": \"get_boiling_point\", \"parameters\": {\"liquid_name\": \"polyjuice\"}}\n\n\n\nProvide your safety assessment for ONLY THE LAST Assistant message in the above conversation:\n - First line must read 'safe' or 'unsafe'.\n - If unsafe, a second line must include a comma-separated list of violated categories." + } + ], + "stream": false, + "temperature": 0.0 + }, + "endpoint": "/v1/chat/completions", + "model": "llama-guard3:1b" + }, + "response": { + "body": { + "__type__": "openai.types.chat.chat_completion.ChatCompletion", + "__data__": { + "id": "chatcmpl-8", + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "unsafe\nS9", + "refusal": null, + "role": "assistant", + "annotations": null, + "audio": null, + "function_call": null, + "tool_calls": null + } + } + ], + "created": 1759437821, + "model": "llama-guard3:1b", + "object": "chat.completion", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": { + "completion_tokens": 5, + "prompt_tokens": 455, + "total_tokens": 460, + "completion_tokens_details": null, + "prompt_tokens_details": null + } + } + }, + "is_streaming": false + } +} diff --git a/tests/integration/recordings/responses/80311f244b55.json b/tests/integration/recordings/responses/80311f244b55.json index 707befc07..a5818368c 100644 --- a/tests/integration/recordings/responses/80311f244b55.json +++ b/tests/integration/recordings/responses/80311f244b55.json @@ -22,1170 +22,1170 @@ "data": [ { "embedding": [ - -0.038157914, - 0.03290493, - -0.0055371798, - 0.014353213, - -0.040209096, - -0.11667767, - 0.03170551, - 0.0019347348, - -0.04254092, - 0.029190615, - 0.042559944, - 0.032130145, - 0.02983921, - 0.010979105, - -0.053759154, - -0.05030495, - -0.023470305, - 0.010730486, - -0.1377361, - 0.0039985846, - 0.029267203, - 0.066698566, - -0.015405643, - 0.04843479, - -0.0881545, - -0.012694429, - 0.041265942, - 0.04089442, - -0.05000745, - -0.05805947, - 0.048748765, - 0.06891688, - 0.058812816, - 0.008785837, - -0.016080279, - 0.08517403, - -0.07814158, - -0.077435054, - 0.020808736, - 0.016186161, - 0.032549612, - -0.05344129, - -0.062166847, - -0.0242584, - 0.007393759, - 0.024064584, - 0.0064619263, - 0.051204458, - 0.072843835, - 0.034658417, - -0.05477693, - -0.05941287, - -0.007262739, - 0.020149412, - 0.035835978, - 0.0056162532, - 0.010803632, - -0.052724347, - 0.010110615, - -0.0087345, - -0.06285489, - 0.038390912, - -0.013975588, - 0.0734118, - 0.090072334, - -0.07995426, - -0.016420014, - 0.044813525, - -0.06888206, - -0.033037275, - -0.015467736, - 0.01130628, - 0.036483694, - 0.0663459, - -0.054344203, - 0.008723171, - 0.012078509, - -0.038129516, - 0.006938081, - 0.051155496, - 0.07745829, - -0.122897476, - 0.01635594, - 0.04956378, - 0.031677794, - -0.03963372, - 0.0016560612, - 0.0095810415, - -0.032620687, - -0.03396473, - -0.13327733, - 0.0072318353, - -0.010225149, - 0.038535405, - -0.09343492, - -0.04173385, - 0.06996305, - -0.026312327, - -0.14973918, - 0.13443227, - 0.03750676, - 0.052842483, - 0.045053005, - 0.018721534, - 0.05443072, - 0.017290117, - -0.03255681, - 0.046160772, - -0.046711024, - -0.030576464, - -0.018258592, - -0.048711784, - 0.033041865, - -0.003856249, - 0.05003307, - -0.05821012, - -0.00994153, - 0.0106995255, - -0.04008794, - -0.0015539092, - 0.060838487, - -0.04559896, - 0.04924722, - 0.026119638, - 0.019796783, - -0.0016312932, - 0.05955464, - -6.527786e-33, - 0.063555494, - 0.003072545, - 0.0290068, - 0.17338625, - 0.0029474646, - 0.027745575, - -0.095103905, - -0.031165987, - 0.026719859, - -0.010799976, - 0.023851028, - 0.02375357, - -0.031152952, - 0.049497593, - -0.025005657, - 0.10176666, - -0.079190366, - -0.0032479328, - 0.042849813, - 0.09489888, - -0.066508934, - 0.00632239, - 0.022188535, - 0.06996212, - -0.007491268, - -0.001777037, - 0.027047161, - -0.07536194, - 0.11401931, - 0.008564227, - -0.02371391, - -0.046974454, - 0.0144310715, - 0.019899534, - -0.0046927175, - 0.0013119543, - -0.03432107, - -0.054212432, - -0.09418897, - -0.028963951, - -0.018907014, - 0.045735538, - 0.04757043, - -0.003132595, - -0.033231355, - -0.013520351, - 0.051010653, - 0.03111525, - 0.015257217, - 0.054166727, - -0.085080594, - 0.013355202, - -0.04763934, - 0.07099156, - -0.01309272, - -0.0023823304, - 0.050339438, - -0.041624993, - -0.014171974, - 0.032421313, - 0.005414455, - 0.09128853, - 0.0045168963, - -0.018196244, - -0.015225792, - -0.04635148, - 0.038764603, - 0.014739169, - 0.052030377, - 0.0017809072, - -0.014930553, - 0.027100598, - 0.031190928, - 0.02379928, - -0.0045879, - 0.03622444, - 0.066800386, - -0.0018508516, - 0.021243243, - -0.0575494, - 0.019077979, - 0.031474162, - -0.018456634, - -0.04083116, - 0.10387791, - 0.011981423, - -0.014923204, - -0.10519511, - -0.012293124, - -0.00042049217, - -0.09506704, - 0.058275525, - 0.042611193, - -0.025061507, - -0.094545335, - 4.010606e-33, - 0.13226718, - 0.0053517097, - -0.03314567, - -0.09099676, - -0.031551942, - -0.033939674, - -0.071981214, - 0.12595285, - -0.08333936, - 0.052855294, - 0.001036374, - 0.021973396, - 0.104020424, - 0.013031712, - 0.040921222, - 0.018695012, - 0.114233166, - 0.024822846, - 0.014595918, - 0.00621894, - -0.011220824, - -0.035742316, - -0.03801776, - 0.011226576, - -0.051305167, - 0.007892534, - 0.06734842, - 0.0033567564, - -0.09286571, - 0.03701943, - -0.022331072, - 0.040051647, - -0.030764744, - -0.011390678, - -0.014426033, - 0.024999708, - -0.09751172, - -0.03538673, - -0.03757043, - -0.010174254, - -0.06396341, - 0.025548752, - 0.020661479, - 0.03752242, - -0.10438308, - -0.028266912, - -0.052153755, - 0.012830027, - -0.05125152, - -0.029009243, - -0.09633578, - -0.042322997, - 0.06716196, - -0.030903742, - -0.010314011, - 0.027343867, - -0.028119028, - 0.010296558, - 0.043072425, - 0.022286164, - 0.007943, - 0.056093868, - 0.040728126, - 0.09295372, - 0.016456816, - -0.053744446, - 0.00047035623, - 0.050744157, - 0.04246857, - -0.029237023, - 0.009294763, - -0.010624897, - -0.037202932, - 0.00220195, - -0.030278567, - 0.07457478, - 0.0026277148, - -0.017591486, - 0.0028708735, - 0.03840644, - 0.0072204536, - 0.045653794, - 0.039947055, - 0.014161398, - -0.014247232, - 0.058465447, - 0.036360227, - 0.055268615, - -0.02004829, - -0.08043532, - -0.030213723, - -0.0148566915, - 0.022293866, - 0.011908896, - -0.06907556, - -1.8805048e-08, - -0.078408636, - 0.046699222, - -0.023894435, - 0.06347232, - 0.02395583, - 0.0014103559, - -0.090737104, - -0.06684135, - -0.080118775, - 0.0054891296, - 0.05368204, - 0.10478211, - -0.066875115, - 0.015525915, - 0.06710851, - 0.07083251, - -0.03199485, - 0.020825442, - -0.021920865, - -0.0072890157, - -0.01058703, - 0.004174248, - 0.033155944, - -0.07901077, - 0.038750935, - -0.07521113, - -0.015731987, - 0.005987591, - 0.0051212795, - -0.061557226, - 0.04203319, - 0.09544439, - -0.04317485, - 0.014446859, - -0.10614051, - -0.028011814, - 0.01101727, - 0.069552526, - 0.0669063, - -0.0747214, - -0.078444764, - 0.042728573, - -0.034634914, - -0.106056124, - -0.0357495, - 0.05155015, - 0.068699375, - -0.049968246, - 0.015420614, - -0.06460179, - -0.07601102, - 0.026022797, - 0.07440251, - -0.0124161495, - 0.1332999, - 0.07480527, - 0.051343314, - 0.02094546, - -0.026808253, - 0.08892536, - 0.03996125, - -0.041000355, - 0.03187991, - 0.018108707 + -0.038168654, + 0.032873917, + -0.0055947267, + 0.014366432, + -0.040310103, + -0.116643615, + 0.031721067, + 0.0019260457, + -0.04255802, + 0.029198613, + 0.04252229, + 0.032184314, + 0.029838374, + 0.010959321, + -0.053805783, + -0.05028783, + -0.023449864, + 0.0107550435, + -0.13774979, + 0.0039929547, + 0.029302042, + 0.066712305, + -0.015410682, + 0.048422653, + -0.08814465, + -0.012715775, + 0.041334823, + 0.040851083, + -0.050064698, + -0.05804616, + 0.048728727, + 0.06888658, + 0.058795262, + 0.008804153, + -0.016073612, + 0.08514259, + -0.078146815, + -0.07741974, + 0.020842256, + 0.016201088, + 0.032518543, + -0.05346469, + -0.062197812, + -0.024271712, + 0.007416788, + 0.024103774, + 0.006469804, + 0.051166162, + 0.07284196, + 0.034627657, + -0.05475476, + -0.059386417, + -0.0071934434, + 0.020163197, + 0.035816014, + 0.0055927313, + 0.010762318, + -0.05274177, + 0.010083032, + -0.008742163, + -0.06284565, + 0.038426206, + -0.013933317, + 0.07342759, + 0.09004579, + -0.07995627, + -0.016420787, + 0.044767782, + -0.06886435, + -0.03303916, + -0.015482072, + 0.011322529, + 0.036461752, + 0.066346884, + -0.05434455, + 0.008740993, + 0.012066104, + -0.038101126, + 0.0069316486, + 0.051146947, + 0.07740579, + -0.122950904, + 0.016380342, + 0.049568996, + 0.031634904, + -0.039637603, + 0.0016715266, + 0.009577405, + -0.032646418, + -0.033988595, + -0.13329837, + 0.0072566303, + -0.010266605, + 0.038557075, + -0.09338859, + -0.041706774, + 0.069941126, + -0.026323376, + -0.14971305, + 0.13445398, + 0.03748492, + 0.052825302, + 0.0450506, + 0.018712776, + 0.05444322, + 0.017282845, + -0.032480195, + 0.04614526, + -0.046711974, + -0.030566413, + -0.01820007, + -0.04869831, + 0.033051647, + -0.0038142777, + 0.04999665, + -0.058270358, + -0.010011706, + 0.010643473, + -0.040113144, + -0.0015507729, + 0.060854245, + -0.045562096, + 0.049257778, + 0.02612153, + 0.01981428, + -0.001660993, + 0.059509434, + -6.525298e-33, + 0.063519135, + 0.0030875143, + 0.028961418, + 0.1733713, + 0.0029763067, + 0.027727291, + -0.0951315, + -0.031186627, + 0.026689058, + -0.010807322, + 0.023850724, + 0.023777472, + -0.031174092, + 0.049501278, + -0.025049716, + 0.10175924, + -0.07919064, + -0.0032249284, + 0.042915843, + 0.09483459, + -0.06652636, + 0.006303593, + 0.02220902, + 0.06999181, + -0.0074810013, + -0.0017734945, + 0.027008688, + -0.07534615, + 0.114036545, + 0.008552313, + -0.023737878, + -0.04694563, + 0.014472103, + 0.019855395, + -0.0046694353, + 0.0013555645, + -0.034298304, + -0.054142635, + -0.09419824, + -0.028909719, + -0.018876282, + 0.0457315, + 0.04761082, + -0.0030971593, + -0.033264168, + -0.013539523, + 0.051041685, + 0.031110944, + 0.015244497, + 0.054158635, + -0.08499706, + 0.013360703, + -0.04759633, + 0.07101136, + -0.0131114535, + -0.0023818254, + 0.050331973, + -0.041642286, + -0.01419894, + 0.032463223, + 0.0053973934, + 0.091275506, + 0.0044798073, + -0.018260129, + -0.015278888, + -0.046306957, + 0.038750377, + 0.014729783, + 0.05204642, + 0.0017938613, + -0.014963651, + 0.027101943, + 0.031203475, + 0.023725478, + -0.004601222, + 0.03617344, + 0.06679477, + -0.0018401983, + 0.021265576, + -0.057589985, + 0.019155758, + 0.031437635, + -0.018444614, + -0.04085069, + 0.10393101, + 0.011960795, + -0.014898805, + -0.10520497, + -0.012302656, + -0.00043837292, + -0.09508398, + 0.058318105, + 0.042576887, + -0.025066672, + -0.094555676, + 4.0072287e-33, + 0.1322281, + 0.0053512393, + -0.03312536, + -0.09096454, + -0.031562407, + -0.033949774, + -0.07205118, + 0.1259232, + -0.08333555, + 0.052797858, + 0.001077506, + 0.022004265, + 0.10402767, + 0.013034249, + 0.04091762, + 0.018705815, + 0.11424037, + 0.024799824, + 0.014582492, + 0.006205516, + -0.011202356, + -0.035756435, + -0.03800272, + 0.011251353, + -0.0512988, + 0.007890417, + 0.06736164, + 0.0033359542, + -0.09285096, + 0.03704081, + -0.022326592, + 0.039967872, + -0.030748183, + -0.011446819, + -0.014453254, + 0.02498229, + -0.097532175, + -0.035378877, + -0.03757795, + -0.010181498, + -0.06392041, + 0.025538994, + 0.02061816, + 0.03757256, + -0.1043548, + -0.028326731, + -0.05209465, + 0.0128473425, + -0.051238894, + -0.029034877, + -0.09633617, + -0.042309195, + 0.067165054, + -0.030870603, + -0.010357507, + 0.027381465, + -0.028105576, + 0.010302046, + 0.04306986, + 0.022315372, + 0.007954779, + 0.056068663, + 0.04071972, + 0.09293905, + 0.016536433, + -0.053764775, + 0.00047211433, + 0.050708972, + 0.042510226, + -0.029195962, + 0.009274875, + -0.010647389, + -0.037209682, + 0.002267011, + -0.030304702, + 0.0745741, + 0.0026207205, + -0.017582772, + 0.0028797672, + 0.038404796, + 0.00723137, + 0.045613218, + 0.03998252, + 0.014209623, + -0.0142997475, + 0.05850862, + 0.03630791, + 0.055294298, + -0.020075988, + -0.08041808, + -0.030250112, + -0.014920701, + 0.022349516, + 0.011911506, + -0.06903851, + -1.8806734e-08, + -0.078480355, + 0.046674173, + -0.023920896, + 0.0634942, + 0.02396477, + 0.0014517035, + -0.090798445, + -0.06684978, + -0.0801405, + 0.005503192, + 0.053675175, + 0.104841895, + -0.066848256, + 0.015522683, + 0.067097165, + 0.070832625, + -0.03197915, + 0.020843629, + -0.0219202, + -0.0073016756, + -0.010645817, + 0.0040983153, + 0.03313765, + -0.0790081, + 0.03878132, + -0.075230986, + -0.015732396, + 0.0060099233, + 0.0051297406, + -0.061492138, + 0.04202211, + 0.09544608, + -0.04318599, + 0.014424486, + -0.10617826, + -0.027963417, + 0.011034413, + 0.069576606, + 0.06689785, + -0.07479674, + -0.07851099, + 0.042766396, + -0.034639932, + -0.10607304, + -0.03577663, + 0.051540814, + 0.068673156, + -0.049959548, + 0.015460458, + -0.064520314, + -0.076010585, + 0.026035817, + 0.07440218, + -0.012396022, + 0.13329679, + 0.074770845, + 0.05134284, + 0.020977058, + -0.026776016, + 0.08894323, + 0.039937407, + -0.04102053, + 0.03194075, + 0.018113315 ], "index": 0, "object": "embedding" }, { "embedding": [ - -0.009823841, - 0.06685394, - 0.08489411, - 0.03813849, - 0.032225974, - -0.034307797, - 0.107310556, - -0.046902046, - -0.102643676, - -0.003702005, - -0.0023676767, - 0.012173647, - -0.046961293, - 0.08201565, - 0.04295503, - -0.027037757, - 0.0070437216, - -0.104356326, - -0.12175826, - 0.07269557, - -0.079771765, - -0.003676955, - -0.0044014333, - 0.06784145, - -0.020959238, - 0.05777534, - -0.008483368, - -0.013391308, - 0.0052807773, - -0.09834358, - -0.13073047, - 0.008964234, - -0.057907283, - -0.05804121, - -0.05626149, - -0.042638198, - 3.184936e-05, - -0.14460282, - 0.007979306, - 0.022538451, - 0.048148528, - -0.039077234, - -0.012783144, - 0.007688736, - 0.05792521, - -0.027782526, - -0.019818667, - 0.09386619, - 0.14314687, - -0.023420751, - -0.10621568, - 0.026846798, - -0.05543366, - 0.017867815, - 0.021250507, - 0.041602414, - 0.0033089865, - 0.016080648, - 0.083043434, - -0.014604297, - 0.027198244, - 0.014271484, - -0.0062427525, - 0.06058171, - 0.03864093, - 0.0060196337, - -0.10089876, - -0.05285287, - -0.0797282, - 0.01671729, - -0.054698065, - -0.073024616, - 0.04547561, - -0.009560945, - -0.010386015, - -0.064177126, - 0.0011365172, - -0.036887243, - 0.06302413, - -0.0016032788, - 0.057869848, - -0.026043506, - -0.000536635, - 0.021403369, - -0.05001242, - -0.011384805, - -0.008799393, - 0.09338713, - 0.010654576, - -0.0006147975, - -0.056140404, - 0.043459535, - 0.0037720772, - 0.027983129, - 0.020964785, - -0.038642954, - 0.019421708, - 0.023177834, - -0.051029585, - 0.13815063, - 0.022802453, - 0.13100733, - 0.042305406, - 0.012445653, - 0.022351589, - 0.014143133, - -0.09037672, - 0.07454903, - -0.062642604, - -0.08922512, - 0.005484734, - 0.03850994, - -0.03628572, - -0.009195987, - 0.09181748, - -0.012547894, - 0.026162561, - 0.08752062, - -0.010926715, - 0.09250321, - 0.02097545, - 0.052515954, - 0.028899532, - 0.039395254, - -0.010501714, - 0.077294946, - 0.0715375, - -7.66496e-33, - 0.100804806, - 0.00073826336, - 0.057312902, - 0.117006026, - -0.060187068, - -0.02796235, - -0.041741833, - -0.018912861, - 0.050848745, - -0.06301131, - 0.036858555, - -0.045183055, - -0.005223951, - 0.0064753974, - -0.03198189, - 0.028979877, - -0.09603434, - 0.057345662, - 0.008110953, - 0.12529288, - -0.021994175, - -0.047584984, - -0.04379391, - 0.021993084, - 0.051113907, - -0.014501653, - -0.021036316, - -0.0667254, - -0.026064333, - -0.008694687, - -0.036617454, - -0.008719971, - 0.115688674, - -0.00289865, - 0.025261829, - -0.0076816385, - -0.008632856, - -0.0036519386, - -0.04257167, - -0.037688565, - 0.03307097, - -0.024961809, - 0.05859159, - -0.06178797, - -0.04673158, - -0.027886666, - -0.035025608, - 0.055327583, - -0.002065147, - -0.022386257, - -0.10152246, - 0.029717246, - -0.06324088, - -0.0055829133, - -0.048448645, - -0.04066708, - -0.07524254, - 0.03743904, - 0.016060878, - 0.084327556, - 0.012047858, - 0.055406, - 0.009235782, - -0.07829579, - -0.105074205, - -0.023971796, - -0.017086953, - -0.018263351, - 0.041692156, - -0.00606311, - 0.012483653, - -0.035019528, - 0.024491172, - 0.06318314, - 0.065662295, - 0.052476574, - 0.038394902, - -0.07514326, - -0.012202919, - -0.0064696297, - 0.049809776, - 0.05707129, - -0.0019637872, - -0.049091708, - 0.054853234, - 0.052796733, - 0.007638584, - -0.009890581, - 0.0022318119, - 0.022781821, - -0.06865972, - 0.06054869, - 0.070527636, - -0.04190614, - -0.024943016, - 5.210683e-33, - 0.09748425, - 0.015037715, - -0.0950651, - 0.05163348, - -0.09946082, - -0.046801973, - -0.045799557, - 0.04598005, - -0.021040877, - 0.048971444, - 0.085892275, - 0.031846974, - 0.010494827, - -0.011657944, - 0.023827314, - -0.0036091327, - 0.05379242, - 0.0051917112, - -0.020764181, - 0.011931169, - -0.09782392, - 0.06021868, - -0.027618488, - 0.06742346, - 4.5418237e-05, - 0.06255733, - 0.024763351, - 0.05360233, - -0.037187718, - -0.015447758, - -0.015347547, - -0.021288762, - -0.03981676, - 0.04994158, - 0.019988623, - 0.058448106, - 0.0017628162, - -0.074512705, - -0.015785523, - -0.10013551, - -0.10497206, - 0.030029353, - 0.00386666, - 0.065692, - 0.053144414, - 0.009848025, - -0.023745444, - -0.02572956, - -0.0091416575, - 0.06447014, - 0.008398887, - -0.03277235, - -0.0017416656, - 0.017433915, - 0.02735147, - -0.003945162, - -0.07797209, - -0.061111048, - -0.018393502, - 0.019164208, - -0.10231785, - 0.0048785545, - -0.039205246, - -0.00983978, - 0.024287809, - -0.02257733, - -0.016971176, - -0.03401973, - -0.052132465, - -0.031842116, - -0.034754753, - 0.0082540605, - 0.0013724067, - -0.06360571, - -0.028295932, - 0.050363123, - 0.023888446, - 0.005894443, - -0.0116009535, - -0.0004876411, - -0.07163071, - 0.041449234, - 0.05440186, - -0.10820097, - -0.081358775, - -0.069281794, - 0.08610945, - -0.0035109764, - 0.031017194, - 0.08359787, - -0.028458066, - 0.008852798, - -0.027919184, - 0.04985712, - 0.011562651, - -1.5342355e-08, - 0.054318756, - 0.045345105, - -0.07638805, - 0.052091047, - -0.01236827, - 0.060296044, - -0.004145201, - -0.017390434, - -0.014107871, - -0.01709858, - 0.075827934, - 0.007903074, - -0.06532883, - -0.04752482, - 0.038101584, - -0.050273094, - 0.02193425, - 0.068476826, - -0.037231524, - -0.049334478, - 0.057314597, - 0.008028915, - -0.042897243, - 0.09775371, - 0.05817249, - 0.052902617, - 0.024731442, - 0.03277874, - -0.0062142154, - 0.082389385, - 0.037153333, - 0.108709686, - -0.05776975, - 0.036667187, - -0.018986559, - -0.08550582, - 0.059112605, - -0.045709446, - 0.025215724, - 0.022489667, - -0.007955196, - 0.0031373778, - -0.047831737, - -0.01862743, - 0.048644323, - -0.032836094, - 0.054563984, - -0.037403505, - -0.07471283, - -0.019280152, - 0.0060565346, - 0.04239159, - 0.06738598, - 0.04457912, - 0.03311975, - 0.033673216, - 0.0012720197, - 0.033221062, - -0.04845177, - -0.0056105815, - -0.008513508, - -0.016865257, - -0.07558049, - 0.0035253412 + -0.009833591, + 0.0668779, + 0.08488449, + 0.038122248, + 0.032220595, + -0.03433386, + 0.10730999, + -0.046878964, + -0.10266935, + -0.00370671, + -0.0023427065, + 0.0121665625, + -0.046939347, + 0.08200702, + 0.042902183, + -0.0269985, + 0.0070130927, + -0.10432514, + -0.12179822, + 0.07268025, + -0.07978419, + -0.0036544742, + -0.004423966, + 0.06783815, + -0.020906046, + 0.05779926, + -0.008492945, + -0.013392021, + 0.0052612307, + -0.09833074, + -0.13072163, + 0.0089445235, + -0.05787279, + -0.05804388, + -0.056277692, + -0.04266197, + 0.00011274022, + -0.14460878, + 0.007978511, + 0.022490304, + 0.048143692, + -0.039113734, + -0.012775274, + 0.00774044, + 0.057925634, + -0.0277638, + -0.019801306, + 0.09388109, + 0.14315501, + -0.023440128, + -0.10622172, + 0.026852824, + -0.05544247, + 0.017898263, + 0.021249173, + 0.041583873, + 0.0032883594, + 0.01606716, + 0.08307148, + -0.014618173, + 0.027187122, + 0.014263773, + -0.006215441, + 0.060580455, + 0.038631216, + 0.00601958, + -0.10086374, + -0.052872147, + -0.07970713, + 0.016736085, + -0.054666266, + -0.07301758, + 0.045461986, + -0.009579665, + -0.010393855, + -0.06414482, + 0.0011229888, + -0.03685241, + 0.06301278, + -0.0016175678, + 0.057848454, + -0.02605763, + -0.0005511475, + 0.021425176, + -0.05001372, + -0.011338819, + -0.008776912, + 0.093425095, + 0.010633341, + -0.00062553474, + -0.056090016, + 0.043499533, + 0.0037617732, + 0.028000852, + 0.020929853, + -0.03870579, + 0.019406682, + 0.023135182, + -0.050996922, + 0.13818857, + 0.022762392, + 0.13101754, + 0.042277776, + 0.012446188, + 0.02232269, + 0.01416872, + -0.09036148, + 0.07457381, + -0.062656924, + -0.08921229, + 0.005476475, + 0.03847988, + -0.036277156, + -0.009225353, + 0.091821924, + -0.012585263, + 0.026147954, + 0.08752217, + -0.010917677, + 0.09249038, + 0.020964727, + 0.052522942, + 0.02889203, + 0.03941557, + -0.010532948, + 0.077333786, + 0.071537115, + -7.666136e-33, + 0.1007941, + 0.0006832776, + 0.057265434, + 0.11700236, + -0.060210142, + -0.027968848, + -0.041750107, + -0.018907221, + 0.050820086, + -0.06298854, + 0.03686846, + -0.04519097, + -0.005230235, + 0.0064626867, + -0.032001205, + 0.029013716, + -0.09601744, + 0.057358947, + 0.008101205, + 0.12529038, + -0.021971641, + -0.04753891, + -0.043775026, + 0.022004716, + 0.051121656, + -0.014482441, + -0.021044016, + -0.06673008, + -0.026052782, + -0.008716248, + -0.03660495, + -0.008708152, + 0.115699895, + -0.0028488566, + 0.025259791, + -0.0076865884, + -0.00857807, + -0.003692314, + -0.0425788, + -0.03768598, + 0.03309143, + -0.024962988, + 0.05863119, + -0.061788555, + -0.04672501, + -0.02788036, + -0.03501338, + 0.05530872, + -0.0020685238, + -0.022395074, + -0.10156128, + 0.029757096, + -0.06324917, + -0.0055847103, + -0.04842867, + -0.0406527, + -0.07527831, + 0.03743154, + 0.016060246, + 0.084336765, + 0.012059259, + 0.05541269, + 0.009253656, + -0.07830337, + -0.10507807, + -0.023997093, + -0.017076802, + -0.018283347, + 0.04169534, + -0.006048637, + 0.012450259, + -0.03500919, + 0.024494508, + 0.06315759, + 0.06566752, + 0.052477088, + 0.038372934, + -0.07515921, + -0.012239953, + -0.006440479, + 0.049801994, + 0.057076473, + -0.0019500607, + -0.04908919, + 0.05485639, + 0.052818075, + 0.007574656, + -0.009921382, + 0.0022724136, + 0.022785993, + -0.06867227, + 0.060549237, + 0.070556775, + -0.041930214, + -0.02491663, + 5.211892e-33, + 0.09750541, + 0.015079458, + -0.095042065, + 0.0515883, + -0.0994903, + -0.046793085, + -0.04579176, + 0.04599562, + -0.021065598, + 0.04897981, + 0.085892305, + 0.031818043, + 0.010482406, + -0.011647838, + 0.023812337, + -0.0036415062, + 0.053783026, + 0.005232672, + -0.02077592, + 0.011894891, + -0.097780555, + 0.060238954, + -0.027633231, + 0.06742237, + 2.5952173e-05, + 0.06254275, + 0.024719816, + 0.053590305, + -0.037180737, + -0.015468933, + -0.015324857, + -0.021314861, + -0.039786287, + 0.049943436, + 0.019945512, + 0.05842415, + 0.0017712337, + -0.07452784, + -0.015759895, + -0.10015912, + -0.104994535, + 0.03002228, + 0.0038714884, + 0.06567684, + 0.05313137, + 0.009852781, + -0.023740485, + -0.025747454, + -0.009146766, + 0.06444407, + 0.008365104, + -0.032752022, + -0.0017309446, + 0.017398946, + 0.027344245, + -0.0039835107, + -0.07793314, + -0.06111028, + -0.018392045, + 0.019161185, + -0.10229173, + 0.004820445, + -0.03923746, + -0.009809605, + 0.02428856, + -0.02256144, + -0.016944531, + -0.03403803, + -0.05211972, + -0.031824537, + -0.034718003, + 0.008275027, + 0.0013583767, + -0.06358826, + -0.028270705, + 0.050367188, + 0.023883171, + 0.0058828085, + -0.011626739, + -0.00044805612, + -0.071661964, + 0.041463517, + 0.054404654, + -0.10819901, + -0.08137075, + -0.06927182, + 0.08611682, + -0.0035160778, + 0.030999359, + 0.08360334, + -0.028444909, + 0.008868503, + -0.027930394, + 0.04986546, + 0.011590262, + -1.5343216e-08, + 0.054317594, + 0.045336407, + -0.07639679, + 0.052074224, + -0.012374757, + 0.060316578, + -0.0041594645, + -0.017367603, + -0.014107863, + -0.017071113, + 0.075814135, + 0.0079101855, + -0.0653045, + -0.047504168, + 0.038116574, + -0.050272573, + 0.021948416, + 0.0685364, + -0.037221905, + -0.04937101, + 0.057309754, + 0.008049557, + -0.042899966, + 0.09778022, + 0.058175605, + 0.05289681, + 0.024736015, + 0.032797, + -0.0062358975, + 0.08241506, + 0.03714261, + 0.10870123, + -0.05776473, + 0.036651433, + -0.018998465, + -0.08551218, + 0.05913097, + -0.04569603, + 0.025227055, + 0.022481369, + -0.007972968, + 0.0031193425, + -0.047840066, + -0.01866631, + 0.048634782, + -0.032800686, + 0.05455027, + -0.03739758, + -0.07470992, + -0.019272048, + 0.0060886056, + 0.042403262, + 0.067405015, + 0.044566732, + 0.033157814, + 0.033654317, + 0.0012653307, + 0.0331767, + -0.04841697, + -0.005587956, + -0.008498534, + -0.016844513, + -0.075615294, + 0.003522267 ], "index": 1, "object": "embedding" }, { "embedding": [ - 0.033612337, - 0.010374505, - -0.01756061, - 0.029361853, - -0.009454598, - -0.037026335, - -0.02555746, - 0.0086515825, - 0.019154208, - 0.03955405, - -0.02469497, - -0.0126976445, - -0.0065836124, - 0.043807767, - -0.036032367, - -0.056751598, - 0.005685301, - -0.048611272, - -0.01940104, - 0.051023778, - 0.06368657, - 0.04569995, - -0.025642192, - 0.02090835, - 0.023841413, - -0.011006624, - -0.06968253, - 0.008696027, - -0.0100323185, - -0.004299733, - -0.013709692, - 0.060795236, - 0.054181676, - 0.030621745, - 0.032446172, - 0.023919526, - 0.09566865, - 0.041953687, - 0.00087092275, - 0.04335, - 0.03367777, - -0.09001533, - 0.021590438, - 0.04053571, - -0.002674088, - 0.031825043, - -0.045521177, - 0.047551177, - -0.07043583, - -0.013617987, - -0.0102603305, - -0.016518736, - -0.07214938, - -0.055422474, - 0.03316378, - -0.0076137385, - 0.050792947, - -0.04655027, - 0.064705744, - 0.08078938, - -0.053805117, - -0.013050277, - -0.023942292, - 0.0726168, - 0.07433478, - 0.050372824, - -0.03490959, - -0.101285346, - -0.016964512, - -0.054189693, - 0.005499785, - 0.006458164, - 0.055815514, - 0.048383262, - 0.040276967, - 0.0056121964, - -0.024112493, - -0.10037388, - 0.07864023, - 0.04749725, - -0.083059065, - -0.05695486, - -0.007121432, - 0.03499301, - 0.0130494, - 0.047826655, - 0.07769031, - -0.0050768964, - -0.088448934, - 0.0034568575, - -0.023282519, - 0.045576394, - -0.042316645, - -0.024240615, - 0.017663328, - -0.024584634, - -0.032086663, - -0.009175009, - -0.060619276, - 0.0788936, - -0.007151155, - -0.0018835695, - -0.024150992, - 0.035605535, - -0.097886965, - -0.07463594, - 0.036441684, - -0.061645452, - 0.06754617, - 0.0037501638, - -0.050999243, - -0.023512185, - 0.04400348, - 0.042692684, - 0.020495275, - -0.0098657925, - -0.10782902, - 0.041300014, - 0.029186765, - 0.045622177, - 0.0951987, - -0.020906197, - 0.00027652894, - -0.05796104, - 0.022876726, - -0.043638688, - 0.021679614, - -8.721427e-33, - -0.0012232207, - -0.038046468, - 0.04248091, - 0.08773161, - -0.0042147394, - 0.00010909877, - -0.06459573, - 0.061631102, - -0.0035571777, - -0.0057670954, - -0.010751822, - -0.06539647, - 0.0026381642, - 0.006108226, - 0.07177802, - 0.099656485, - -0.028420987, - 0.0886893, - -0.06579721, - 0.0577445, - -0.057205524, - 0.036075067, - -0.02090538, - -0.09164578, - -0.07255028, - -0.075212136, - -0.006453883, - 0.010381722, - -0.0037261078, - 0.020341685, - -0.039610952, - 0.048633367, - -0.057997692, - 0.04580804, - -0.002834594, - -0.026399026, - 0.011338722, - -0.008768234, - -0.012484398, - 0.0030163776, - -0.050530374, - -0.043636482, - -0.024315875, - 0.065459326, - 0.050444957, - -0.031544425, - -0.00075475493, - -0.04531901, - 0.058805995, - 0.0012770096, - -0.019136755, - 0.012550491, - 0.040011447, - -0.022380024, - -0.030805111, - 0.04761777, - 0.036087062, - -0.00771528, - -0.042050246, - 0.09727571, - 0.011417657, - 0.027789006, - -0.08352716, - 0.019375375, - -0.05415718, - 0.014092975, - -0.04270275, - -0.007896535, - 0.029720219, - 0.07610263, - 0.031358883, - -0.04178186, - 0.0016060148, - 0.03870257, - -0.059810083, - -0.07050183, - -0.051603932, - 0.06843783, - -0.0037906233, - -0.012867741, - 0.035064667, - -0.112596914, - 0.053979058, - -0.11403874, - -0.033291597, - -0.011375664, - -0.022975085, - -0.0874419, - 0.0009676586, - -0.07040301, - -0.034353334, - 0.028341567, - -0.003938582, - -0.065418504, - 0.05670526, - 4.4032913e-33, - -0.06758047, - 0.07452212, - -0.04625966, - 0.110544346, - 0.08249691, - -0.035985246, - 0.112199076, - -0.010368401, - -0.09361668, - 0.15915231, - 0.005810317, - 0.041577023, - 0.041846495, - -0.0221648, - 0.0180787, - 0.01732049, - 0.031424496, - -0.07654498, - 0.011575445, - -0.04279533, - -0.077900656, - 0.12441581, - 0.036161043, - 0.09728094, - -0.06544197, - 0.051177975, - 0.030517569, - -0.06477891, - 0.0033884735, - -0.0065040532, - 0.002094866, - 0.0057612373, - -0.07176532, - 0.01457261, - 0.0111329, - -0.012400559, - 0.09850194, - -0.05333344, - -0.059571583, - 0.027873877, - 0.013967755, - 0.0973726, - 0.14173166, - 0.09823832, - -0.00076127227, - 0.036324706, - 0.013391566, - -0.11345763, - 0.015459011, - 0.04547403, - -0.05844395, - -0.011545099, - 0.026310358, - 0.055226807, - -0.05014672, - 0.014071454, - -0.04505251, - 0.0055593317, - 0.017989416, - 0.01946363, - -0.08633586, - 0.08156571, - -0.012573777, - 0.03409684, - -0.017857939, - -0.031390663, - -0.08447243, - 0.07359053, - 0.03050787, - 0.014397102, - 0.085515074, - -0.0014615763, - -0.117197014, - -0.071065396, - 0.08322675, - -0.077766545, - -0.04483503, - -0.009105399, - 0.031649765, - -0.03719005, - -0.05655446, - -0.07973028, - 0.0033281972, - 0.039855074, - -0.05885036, - 0.09728466, - -0.016143035, - 0.02778064, - -0.06544481, - 0.040895227, - 0.009707747, - -0.012031996, - -0.0087121, - -0.050623253, - -0.024199592, - -1.8976149e-08, - -0.024199035, - -0.05503201, - -0.014488159, - 0.017767312, - -0.014441727, - 0.06777053, - 0.032016836, - -0.04272461, - -0.056400675, - 0.00891021, - 0.09656018, - 0.06953362, - -0.09056004, - 0.018509604, - 0.0636711, - -0.07154264, - -0.004792113, - -0.008434159, - -0.016066523, - 0.08377477, - -0.08183436, - 0.050272364, - 0.020495478, - 0.027959472, - -0.023466159, - 0.074599385, - 0.03680873, - 0.08727076, - 0.0132746175, - 0.027399603, - 0.06736775, - 0.039569516, - -0.044155512, - -0.051341295, - -0.013279262, - 0.06611269, - 0.0431739, - -0.036882088, - 0.02478827, - 0.0406888, - -0.1132855, - 0.027976915, - 0.0070727277, - 0.039784174, - -0.027419532, - -0.05590226, - -0.08574367, - -0.02544574, - -0.021121135, - -0.05820989, - -0.025676778, - 0.017944483, - 0.04889649, - -0.036834445, - 0.012973257, - -0.06298454, - -0.03954017, - -0.0035980341, - -0.06945554, - 0.042370543, - 0.1125106, - -0.0015144089, - 0.08769291, - -0.041732 + 0.033608936, + 0.010398442, + -0.017553993, + 0.029364064, + -0.009464617, + -0.037002508, + -0.025546908, + 0.008652466, + 0.019171866, + 0.03954904, + -0.024698786, + -0.012698567, + -0.006575828, + 0.043791965, + -0.035994604, + -0.05671484, + 0.0056701135, + -0.048562843, + -0.019397723, + 0.05104105, + 0.063669115, + 0.045695283, + -0.025647452, + 0.020920323, + 0.023776716, + -0.011002659, + -0.06972687, + 0.008664046, + -0.010030623, + -0.004339591, + -0.013750908, + 0.060781404, + 0.054188438, + 0.030624274, + 0.032462284, + 0.023917627, + 0.09566426, + 0.041960694, + 0.00087254023, + 0.04337981, + 0.033683162, + -0.08997299, + 0.021594081, + 0.040572572, + -0.002699973, + 0.03181515, + -0.04552366, + 0.047550924, + -0.07038101, + -0.013632569, + -0.010259558, + -0.016508883, + -0.07213799, + -0.055489477, + 0.03312745, + -0.0075917933, + 0.050809033, + -0.04651997, + 0.064730175, + 0.080775, + -0.053802576, + -0.01303103, + -0.023942273, + 0.07259772, + 0.07427843, + 0.050371367, + -0.034895457, + -0.10131592, + -0.01694396, + -0.054186717, + 0.0054757623, + 0.0064777075, + 0.055816714, + 0.04833513, + 0.040297274, + 0.005629578, + -0.024119677, + -0.10035926, + 0.07866524, + 0.047488276, + -0.08309364, + -0.056954693, + -0.007104401, + 0.03495975, + 0.013019207, + 0.047803633, + 0.0777118, + -0.00509941, + -0.08840243, + 0.0034689775, + -0.023245867, + 0.04557207, + -0.04230277, + -0.024225675, + 0.017693503, + -0.024583058, + -0.032045294, + -0.009174721, + -0.06059988, + 0.07893847, + -0.00714072, + -0.0018742199, + -0.024142431, + 0.03558561, + -0.097880565, + -0.07468488, + 0.036415916, + -0.06168905, + 0.06755602, + 0.0037724776, + -0.05098253, + -0.023584208, + 0.043991886, + 0.042738363, + 0.020495268, + -0.0098619405, + -0.107808046, + 0.041273866, + 0.02920404, + 0.04561137, + 0.095207445, + -0.020896124, + 0.00023096669, + -0.057968765, + 0.022850417, + -0.043668177, + 0.021688405, + -8.720441e-33, + -0.0012058292, + -0.03802704, + 0.042444937, + 0.08773871, + -0.004220456, + 0.00012147395, + -0.06457608, + 0.061607473, + -0.0035593824, + -0.0057741986, + -0.010743548, + -0.065433994, + 0.002658555, + 0.006107435, + 0.07180735, + 0.099667646, + -0.028398223, + 0.08866949, + -0.06581663, + 0.057735924, + -0.057161212, + 0.036086526, + -0.02094693, + -0.091624826, + -0.07255717, + -0.07521124, + -0.0064620934, + 0.010381977, + -0.0037112501, + 0.020337056, + -0.0396202, + 0.04863623, + -0.057977367, + 0.045799762, + -0.0028102288, + -0.026413642, + 0.011332779, + -0.008787543, + -0.01246847, + 0.003016415, + -0.050528, + -0.043582138, + -0.024329135, + 0.06542502, + 0.050448198, + -0.031531323, + -0.0007779434, + -0.04532696, + 0.058871463, + 0.0012682271, + -0.019152224, + 0.01258753, + 0.03999562, + -0.022376174, + -0.030803563, + 0.04760751, + 0.036079545, + -0.0076535675, + -0.04203372, + 0.097275354, + 0.011409953, + 0.027754916, + -0.0835048, + 0.019380422, + -0.05416042, + 0.014054438, + -0.04266347, + -0.007908375, + 0.029723784, + 0.0761083, + 0.03139675, + -0.041797075, + 0.0016033188, + 0.038726415, + -0.059795942, + -0.07054141, + -0.05157118, + 0.0684149, + -0.003766908, + -0.012878277, + 0.035064787, + -0.11262972, + 0.053968824, + -0.1140537, + -0.033282436, + -0.011386638, + -0.022939742, + -0.08745513, + 0.0009942602, + -0.07038481, + -0.034342457, + 0.028354177, + -0.003912724, + -0.0654399, + 0.056719452, + 4.401956e-33, + -0.06759265, + 0.07454906, + -0.046297893, + 0.11055107, + 0.08249596, + -0.035986293, + 0.11225011, + -0.010407374, + -0.09363792, + 0.15916187, + 0.0057810647, + 0.041591797, + 0.041856647, + -0.022185486, + 0.018102126, + 0.017321726, + 0.031456053, + -0.076545484, + 0.011582533, + -0.04284016, + -0.07789234, + 0.12440625, + 0.03617526, + 0.09730373, + -0.06544067, + 0.051156454, + 0.030499168, + -0.06475215, + 0.003401952, + -0.006514968, + 0.002070544, + 0.005759038, + -0.07172358, + 0.0145481, + 0.011155189, + -0.012380945, + 0.098492086, + -0.053324275, + -0.05958665, + 0.027893873, + 0.01397341, + 0.09733979, + 0.14172351, + 0.09822425, + -0.000753543, + 0.036323734, + 0.013357258, + -0.11347022, + 0.01546052, + 0.045483384, + -0.05844928, + -0.011548025, + 0.026313214, + 0.055244267, + -0.050127964, + 0.014079803, + -0.04502139, + 0.005556844, + 0.017963082, + 0.01945956, + -0.08633155, + 0.08159404, + -0.012574804, + 0.034080163, + -0.017839924, + -0.031354588, + -0.084478684, + 0.073620565, + 0.030523231, + 0.014402138, + 0.08548794, + -0.0014136349, + -0.117235936, + -0.071074195, + 0.083228014, + -0.07779257, + -0.044802953, + -0.009106513, + 0.0316612, + -0.03717584, + -0.05652208, + -0.07973565, + 0.003353578, + 0.03982252, + -0.05883056, + 0.097288825, + -0.01612578, + 0.0277682, + -0.06547234, + 0.040883925, + 0.009703006, + -0.012041616, + -0.008719466, + -0.05062296, + -0.024210127, + -1.8977037e-08, + -0.024204005, + -0.055027, + -0.014531686, + 0.017793229, + -0.014444479, + 0.06776621, + 0.032021433, + -0.04271159, + -0.056421917, + 0.008902811, + 0.0965939, + 0.069501095, + -0.09060633, + 0.018546907, + 0.06365827, + -0.0715206, + -0.0047898116, + -0.008457558, + -0.01603862, + 0.083756834, + -0.081861764, + 0.050247736, + 0.020439949, + 0.027903674, + -0.02344807, + 0.074611686, + 0.036804173, + 0.08724397, + 0.013292644, + 0.02741063, + 0.0673842, + 0.039584856, + -0.044136506, + -0.051336076, + -0.013291427, + 0.06607191, + 0.043135997, + -0.036887288, + 0.024783924, + 0.040656343, + -0.11329909, + 0.027977955, + 0.0070782495, + 0.039789386, + -0.027414937, + -0.055913515, + -0.085740864, + -0.025473714, + -0.021161858, + -0.05823863, + -0.025728453, + 0.017994676, + 0.04891479, + -0.03684745, + 0.012969448, + -0.063004315, + -0.039539963, + -0.0036127788, + -0.069469534, + 0.042392787, + 0.11249585, + -0.0015041318, + 0.087654695, + -0.041728426 ], "index": 2, "object": "embedding" diff --git a/tests/integration/recordings/responses/80e4404d8987.json b/tests/integration/recordings/responses/80e4404d8987.json index 09d510916..226b6648d 100644 --- a/tests/integration/recordings/responses/80e4404d8987.json +++ b/tests/integration/recordings/responses/80e4404d8987.json @@ -22,7 +22,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:33:10.76700718Z", + "created_at": "2025-10-02T02:54:51.50254Z", "done": false, "done_reason": null, "total_duration": null, @@ -40,7 +40,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:33:10.956949035Z", + "created_at": "2025-10-02T02:54:51.549521Z", "done": false, "done_reason": null, "total_duration": null, @@ -58,7 +58,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:33:11.147886127Z", + "created_at": "2025-10-02T02:54:51.594384Z", "done": false, "done_reason": null, "total_duration": null, @@ -76,7 +76,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:33:11.337832912Z", + "created_at": "2025-10-02T02:54:51.637769Z", "done": false, "done_reason": null, "total_duration": null, @@ -94,7 +94,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:33:11.524017554Z", + "created_at": "2025-10-02T02:54:51.684099Z", "done": false, "done_reason": null, "total_duration": null, @@ -112,7 +112,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:33:11.712703934Z", + "created_at": "2025-10-02T02:54:51.730912Z", "done": false, "done_reason": null, "total_duration": null, @@ -130,7 +130,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:33:11.903877596Z", + "created_at": "2025-10-02T02:54:51.777299Z", "done": false, "done_reason": null, "total_duration": null, @@ -148,7 +148,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:33:12.095535165Z", + "created_at": "2025-10-02T02:54:51.823309Z", "done": false, "done_reason": null, "total_duration": null, @@ -166,7 +166,7 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:33:12.291614477Z", + "created_at": "2025-10-02T02:54:51.868924Z", "done": false, "done_reason": null, "total_duration": null, @@ -184,15 +184,15 @@ "__type__": "ollama._types.GenerateResponse", "__data__": { "model": "llama3.2:3b-instruct-fp16", - "created_at": "2025-10-01T01:33:12.483844314Z", + "created_at": "2025-10-02T02:54:51.915105Z", "done": true, "done_reason": "stop", - "total_duration": 4303509972, - "load_duration": 44748689, + "total_duration": 5098012833, + "load_duration": 4289621791, "prompt_eval_count": 31, - "prompt_eval_duration": 2539513749, + "prompt_eval_duration": 393000541, "eval_count": 10, - "eval_duration": 1718623697, + "eval_duration": 414080875, "response": "", "thinking": null, "context": null diff --git a/tests/integration/recordings/responses/84432044194a.json b/tests/integration/recordings/responses/84432044194a.json new file mode 100644 index 000000000..373652c28 --- /dev/null +++ b/tests/integration/recordings/responses/84432044194a.json @@ -0,0 +1,414 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "What is the boiling point of the liquid polyjuice in celsius?" + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_s1g1se8b", + "type": "function", + "function": { + "name": "get_boiling_point", + "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_s1g1se8b", + "content": "-100" + } + ], + "max_tokens": 512, + "stream": true, + "temperature": 0.0001, + "tool_choice": "auto", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-157", + "choices": [ + { + "delta": { + "content": "The", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441156, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-157", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441156, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-157", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441156, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-157", + "choices": [ + { + "delta": { + "content": " of", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441156, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-157", + "choices": [ + { + "delta": { + "content": " Poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441156, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-157", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441156, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-157", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441156, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-157", + "choices": [ + { + "delta": { + "content": " is", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441156, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-157", + "choices": [ + { + "delta": { + "content": " -", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441157, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-157", + "choices": [ + { + "delta": { + "content": "100", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441157, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-157", + "choices": [ + { + "delta": { + "content": "\u00b0C", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441157, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-157", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441157, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-157", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759441157, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/8486e5b1c6db.json b/tests/integration/recordings/responses/8486e5b1c6db.json new file mode 100644 index 000000000..6eae12ff0 --- /dev/null +++ b/tests/integration/recordings/responses/8486e5b1c6db.json @@ -0,0 +1,276 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point_with_metadata\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nCall get_boiling_point_with_metadata tool and answer What is the boiling point of polyjuice?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n[get_boiling_point_with_metadata(liquid_name=\"polyjuice\", celcius=True)]<|eot_id|><|start_header_id|>ipython<|end_header_id|>\n\n-100<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0001, + "top_p": 0.9 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.185623Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "The", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.227358Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " boiling", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.268854Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " point", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.311161Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " of", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.353205Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " poly", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.394667Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ju", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.43604Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ice", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.477482Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " in", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.519193Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " Celsius", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.561068Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " is", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.602574Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " -", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.644332Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "100", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.686134Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ".", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:15.727722Z", + "done": true, + "done_reason": "stop", + "total_duration": 730418375, + "load_duration": 118920875, + "prompt_eval_count": 401, + "prompt_eval_duration": 67995917, + "eval_count": 14, + "eval_duration": 542856417, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/84fc473e7b29.json b/tests/integration/recordings/responses/84fc473e7b29.json index f01f11759..867f6208a 100644 --- a/tests/integration/recordings/responses/84fc473e7b29.json +++ b/tests/integration/recordings/responses/84fc473e7b29.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-165", + "id": "chatcmpl-400", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759282579, + "created": 1759441673, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/87577729d812.json b/tests/integration/recordings/responses/87577729d812.json index 9b8699084..372b41369 100644 --- a/tests/integration/recordings/responses/87577729d812.json +++ b/tests/integration/recordings/responses/87577729d812.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-609", + "id": "chatcmpl-192", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759282388, + "created": 1759437810, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/8965c0df9071.json b/tests/integration/recordings/responses/8965c0df9071.json new file mode 100644 index 000000000..66926eb11 --- /dev/null +++ b/tests/integration/recordings/responses/8965c0df9071.json @@ -0,0 +1,119 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant Always respond with tool calls no matter what. " + }, + { + "role": "user", + "content": "Get the boiling point of polyjuice with a tool call." + } + ], + "max_tokens": 512, + "stream": true, + "temperature": 0.0001, + "tool_choice": "auto", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-964", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": [ + { + "index": 0, + "id": "call_v7gdtg8p", + "function": { + "arguments": "{\"celcius\":\"true\",\"liquid_name\":\"polyjuice\"}", + "name": "get_boiling_point" + }, + "type": "function" + } + ] + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441159, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-964", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "tool_calls", + "index": 0, + "logprobs": null + } + ], + "created": 1759441159, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/8baad1435f9c.json b/tests/integration/recordings/responses/8baad1435f9c.json index 2a8338816..ccc118a38 100644 --- a/tests/integration/recordings/responses/8baad1435f9c.json +++ b/tests/integration/recordings/responses/8baad1435f9c.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-469", + "id": "chatcmpl-222", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759245125, + "created": 1759437799, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/8ce928ad0b85.json b/tests/integration/recordings/responses/8ce928ad0b85.json index e15dad63e..4fac48e7c 100644 --- a/tests/integration/recordings/responses/8ce928ad0b85.json +++ b/tests/integration/recordings/responses/8ce928ad0b85.json @@ -19,390 +19,390 @@ "data": [ { "embedding": [ - 0.043770123, - 0.021501394, - -0.081300564, - 0.010615138, - -0.07908651, - -0.03219175, - 0.13090447, - 0.042329222, - -0.11600146, - -0.07588096, - 0.041826088, - -0.080617175, - 0.038125783, - -0.01069657, - 0.01577377, - -0.04196888, - 0.043099895, - -0.033355612, - 0.013571747, - -0.0103924, - 0.015561896, - -0.03786113, - -0.050319925, - -0.02566629, - -0.047868017, - -0.08717805, - 0.01685358, - -0.03676223, - 0.0063788705, - 0.020863743, - 0.11264443, - -0.0021451844, - -0.07911777, - 0.038758967, - 0.115321144, - -0.019753717, - 0.0067159277, - -0.02115779, - -0.0144774495, - -0.0027154125, - -0.034384295, - -0.052576542, - -0.030578543, - 0.04745372, - -0.024294367, - 0.01091144, - -0.03947583, - 0.07183755, - -0.020715859, - 0.018965777, - 0.04292474, - -0.007755194, - 0.0025708016, - -0.058263537, - 0.0117485095, - -0.022703577, - 0.001755438, - -0.012628832, - 0.030728007, - 0.017719304, - -0.061525322, - -0.036568273, - 0.025831668, - 0.025376469, - 0.012137967, - 0.009102949, - -0.027313529, - -0.093379095, - 0.0052120173, - 0.0074658697, - -0.07538, - 0.010161349, - -0.028439516, - 0.03026334, - 0.0036700817, - -0.022599109, - -0.037862476, - -0.08384314, - -0.0124443015, - -0.048889726, - 0.029131662, - -0.044443335, - -0.07518736, - -0.020938978, - 0.063386515, - 0.16294138, - 0.060580015, - -0.01281573, - -0.031040885, - 0.018372353, - 0.11225789, - 0.072922915, - -0.06272038, - -0.031792488, - -0.017476005, - 0.04846264, - -0.04116229, - -0.041834168, - -0.059919056, - 0.15907861, - -0.027786179, - -0.012492541, - 0.05599519, - -0.019895995, - 0.022076221, - 0.006363836, - 0.046413723, - -0.0731325, - 0.03326452, - 0.059475966, - -0.033314705, - 0.030761855, - 0.00819013, - -0.020254606, - 0.05658313, - -0.08153619, - 0.023402533, - 0.0060753864, - -0.07993489, - 0.013990512, - 0.052254565, - 0.027170746, - -0.049271967, - 0.02814688, - 0.019500777, - 0.054206643, - 0.082691684, - -1.8817448e-33, - 0.013630832, - -0.010863344, - 0.015899567, - 0.06938339, - -0.05113185, - 0.08995833, - 0.04450505, - 0.08101549, - 0.018903807, - -0.020960161, - -0.017933648, - -0.02174221, - 0.010988686, - 0.015100026, - 0.017031211, - 0.09433042, - 0.003454907, - 0.010199729, - -0.0446973, - 0.0018167854, - 0.015817188, - -0.06576281, - -0.004943305, - 0.004393494, - -0.019598262, - -0.092797264, - -0.025917865, - 0.04409669, - 0.054165967, - -0.007365383, - -0.021470547, - -0.03683317, - -0.091507494, - 0.08402351, - -0.01809901, - 0.0038072586, - 0.020236026, - 0.0439697, - -0.077322714, - 0.0057473024, - -0.054513566, - -0.024854423, - 0.075270385, - 0.034554463, - -0.08118007, - -0.12208905, - -0.0052893, - 0.0078005046, - 0.05028763, - 0.015558154, - -0.056349996, - 0.0398076, - 0.012997719, - -0.040145177, - 0.014409028, - -0.033200737, - -0.008437484, - -0.037582297, - -0.019651853, - 0.017285295, - -0.008976723, - -0.0018494898, - -0.0030671947, - 0.03046138, - -0.051143825, - -0.08688155, - -0.018344227, - -0.113307714, - 0.073259674, - 0.04602224, - 0.012651309, - -0.063435435, - -0.028471926, - 0.020155901, - -0.078830436, - -0.00069818215, - -0.03156303, - 0.123062745, - 0.0042949035, - -0.026413191, - 0.07838535, - -0.07747411, - -0.02126005, - 0.048919026, - 0.02919413, - -0.009296978, - -0.030687347, - -0.041037664, - -0.038565576, - -0.08043238, - 0.023225678, - 0.041928973, - -0.05812511, - 0.058555346, - 0.07633673, - 4.4510456e-34, - -0.019582625, - 0.040237214, - 0.01455587, - 0.034353998, - 0.043911777, - -0.023234777, - 0.0677493, - -0.030089214, - -0.09076478, - -0.019257858, - -0.02767876, - -0.00065146026, - 0.0043030144, - 0.05363546, - 0.04073387, - 0.03255476, - -0.10712685, - -0.050083157, - -0.016644027, - -0.0077649173, - -0.11153465, - 0.07478277, - -0.015999233, - -0.050547555, - -0.113217294, - -0.006174145, - 0.050873067, - -0.030284155, - 0.04314861, - 0.033020362, - 0.023671353, - 0.04654029, - -0.03415647, - 0.03614603, - 0.023047049, - -0.02677317, - 0.063607745, - 0.09978129, - 0.03527302, - 0.15538219, - 0.08349002, - 0.10931568, - 0.04684532, - -0.010147538, - -0.03256112, - 0.12924333, - 0.031221064, - -0.099673584, - 0.010860566, - 0.02326085, - -0.011916549, - 0.010135849, - 0.06884636, - 0.009350001, - -0.0226591, - -0.04280281, - -0.04821317, - -0.08508304, - 0.051028382, - 0.045148462, - -0.03566162, - 0.06547104, - 0.048883036, - 0.03793435, - -0.1407055, - -0.06711337, - 0.009881868, - -0.0049659596, - -0.044289522, - 0.0039236215, - -0.02692826, - -0.066134326, - 0.04076233, - -0.05222117, - 0.060488354, - -0.04113724, - -0.04314174, - -0.025147837, - 0.085597694, - -0.044939328, - 0.06395307, - -0.024218159, - -0.050523587, - -0.0020718095, - -0.07894165, - 0.0026805927, - 0.020709056, - 0.1026727, - -0.012374822, - 0.056179732, - 0.06552235, - 0.030915475, - -0.077197015, - -0.061245024, - -0.016111895, - -1.3512232e-08, - -0.05040501, - -0.033646606, - 0.04670903, - 0.047397695, - -0.044165645, - 0.046301767, - -0.006073457, - -0.053902794, - 0.013089125, - 0.050438043, - -0.009894958, - -0.0041677835, - 0.0723306, - 0.021069802, - 0.02670403, - -0.074845195, - -0.026750853, - 0.052738186, - -0.03469103, - 0.039813705, - -0.01640883, - 0.045899663, - -0.0224731, - 0.02387658, - 0.049145795, - 0.09110705, - -0.0025007618, - 0.04937552, - -0.03864697, - 0.020868128, - 0.07605537, - 0.08488945, - -0.05197299, - -0.06879239, - -0.06136516, - 0.077237174, - -0.06451729, - 0.04453416, - 0.008209786, - 0.015886698, - -0.04280691, - 0.005315579, - 0.0034463098, - 0.0031776188, - -0.013040836, - -0.091359615, - 0.0642767, - -0.054965723, - 0.0007161393, - -0.06260912, - -0.03496602, - -0.029944083, - 0.04422821, - 0.017855663, - -0.027972128, - -0.03656317, - 0.02111413, - 0.060607255, - -0.031320468, - -0.014338154, - 0.034649797, - 0.052279983, - -0.036579564, - 0.028179456 + 0.043779343, + 0.021533398, + -0.081306435, + 0.010584965, + -0.079082854, + -0.03219143, + 0.13092613, + 0.04234389, + -0.11600539, + -0.07588513, + 0.04182356, + -0.08061255, + 0.038127176, + -0.010701234, + 0.015768763, + -0.04193689, + 0.04310592, + -0.033361685, + 0.013566423, + -0.010392366, + 0.015551022, + -0.037858423, + -0.050305344, + -0.025666261, + -0.047879875, + -0.087179765, + 0.016856788, + -0.036765736, + 0.006393739, + 0.020844297, + 0.11262393, + -0.002143682, + -0.07910913, + 0.038748607, + 0.11532516, + -0.019759571, + 0.0066967797, + -0.021164352, + -0.014471563, + -0.0027048697, + -0.034388524, + -0.052571636, + -0.030607725, + 0.04747725, + -0.02431059, + 0.0109337615, + -0.03946421, + 0.071846664, + -0.020690937, + 0.01898796, + 0.042931512, + -0.0077551426, + 0.0025911122, + -0.058268107, + 0.0117475465, + -0.022701943, + 0.0017815019, + -0.012612941, + 0.030724185, + 0.017728312, + -0.06155491, + -0.03656162, + 0.02583153, + 0.02537894, + 0.012139213, + 0.009105951, + -0.027318193, + -0.093389414, + 0.005184693, + 0.007488449, + -0.07540277, + 0.010159999, + -0.028444426, + 0.030260745, + 0.0036438918, + -0.022627153, + -0.037846327, + -0.08381657, + -0.012445195, + -0.048908208, + 0.029149827, + -0.044437535, + -0.07520237, + -0.020924438, + 0.06342514, + 0.1629199, + 0.060563333, + -0.012817673, + -0.031030292, + 0.018368995, + 0.11223112, + 0.07292473, + -0.062686674, + -0.031803295, + -0.017489262, + 0.048433464, + -0.041148387, + -0.04183779, + -0.05994369, + 0.15909556, + -0.027785666, + -0.012455991, + 0.056005318, + -0.019891974, + 0.022063067, + 0.006342065, + 0.0464118, + -0.07311654, + 0.033282198, + 0.05949105, + -0.033307947, + 0.030738499, + 0.008186239, + -0.020268966, + 0.056593496, + -0.081526734, + 0.023390312, + 0.0060836566, + -0.07992586, + 0.013986445, + 0.052250065, + 0.027186505, + -0.049284942, + 0.028148174, + 0.019493744, + 0.05418436, + 0.0827222, + -1.8825437e-33, + 0.01360945, + -0.010870715, + 0.015887791, + 0.069373555, + -0.051129147, + 0.08999179, + 0.044494778, + 0.08100757, + 0.018944906, + -0.020974122, + -0.017938385, + -0.021756735, + 0.010972489, + 0.015099965, + 0.017018452, + 0.094338946, + 0.0034407445, + 0.010244923, + -0.044709302, + 0.0018059182, + 0.015817573, + -0.065777056, + -0.004948138, + 0.0044092103, + -0.019589791, + -0.092789896, + -0.025898295, + 0.044104066, + 0.0541385, + -0.007362511, + -0.021487307, + -0.036836285, + -0.09148704, + 0.084001675, + -0.018094191, + 0.003797567, + 0.020257449, + 0.04394643, + -0.0772898, + 0.0057312953, + -0.054519102, + -0.024835315, + 0.0753162, + 0.034552757, + -0.081203006, + -0.12210961, + -0.0053012627, + 0.00780717, + 0.050265096, + 0.015569535, + -0.056362487, + 0.039800324, + 0.013022089, + -0.04015537, + 0.014401654, + -0.033209093, + -0.008451782, + -0.037590392, + -0.01965779, + 0.01730637, + -0.00896531, + -0.0018413392, + -0.0030382746, + 0.030460354, + -0.05112036, + -0.086875, + -0.018338922, + -0.11328767, + 0.07325826, + 0.046035297, + 0.012633494, + -0.06343216, + -0.028439038, + 0.020128354, + -0.07883383, + -0.00069870794, + -0.03155447, + 0.12306934, + 0.004300722, + -0.026421167, + 0.078361824, + -0.077461444, + -0.021267027, + 0.048929654, + 0.02919381, + -0.0092880055, + -0.030666346, + -0.04102384, + -0.03860138, + -0.08042292, + 0.023227168, + 0.04191858, + -0.058156747, + 0.0585743, + 0.076342255, + 4.465569e-34, + -0.019599343, + 0.040230304, + 0.01455632, + 0.034345042, + 0.04392999, + -0.023241352, + 0.067749046, + -0.03010354, + -0.09075954, + -0.019227842, + -0.027724287, + -0.00062344945, + 0.0042892746, + 0.053643614, + 0.04075099, + 0.032581333, + -0.107116826, + -0.0500636, + -0.016655827, + -0.007782394, + -0.111523, + 0.07476429, + -0.016019335, + -0.050536986, + -0.11320647, + -0.0061384854, + 0.050886273, + -0.030283457, + 0.04318923, + 0.03301474, + 0.02362771, + 0.046507858, + -0.03416386, + 0.036145207, + 0.023037339, + -0.026803765, + 0.06361122, + 0.09975251, + 0.035269737, + 0.1554014, + 0.083479255, + 0.10931981, + 0.046847064, + -0.010136355, + -0.032541983, + 0.12926093, + 0.031193413, + -0.09971323, + 0.010830718, + 0.02325219, + -0.011917061, + 0.010155018, + 0.06883269, + 0.009340846, + -0.022698723, + -0.042815465, + -0.048211087, + -0.085067384, + 0.05105234, + 0.045155898, + -0.03564869, + 0.06549556, + 0.048875004, + 0.037915554, + -0.14071068, + -0.067095764, + 0.009898252, + -0.0049653547, + -0.044304688, + 0.0039006064, + -0.026903173, + -0.066124685, + 0.040738244, + -0.052228633, + 0.060485654, + -0.041119356, + -0.04312945, + -0.025152665, + 0.08556276, + -0.044942576, + 0.06393979, + -0.024227533, + -0.05052092, + -0.0020624825, + -0.078943975, + 0.0026753, + 0.02068896, + 0.102683865, + -0.01237572, + 0.056172684, + 0.06552171, + 0.030940128, + -0.07721113, + -0.061241012, + -0.016143149, + -1.3511957e-08, + -0.050416306, + -0.033628013, + 0.046722032, + 0.04744138, + -0.04411888, + 0.04631675, + -0.0060847937, + -0.053873356, + 0.013075445, + 0.050437532, + -0.009895477, + -0.0041795173, + 0.07229928, + 0.021081135, + 0.02672776, + -0.07482113, + -0.026757998, + 0.052755926, + -0.034690056, + 0.039811596, + -0.016370349, + 0.045900222, + -0.02250936, + 0.023861, + 0.04912799, + 0.09111738, + -0.0024878879, + 0.049395334, + -0.03861115, + 0.020867983, + 0.076049894, + 0.084881924, + -0.051956687, + -0.06878504, + -0.061384037, + 0.077220954, + -0.06454818, + 0.044513144, + 0.008181126, + 0.015890416, + -0.04280811, + 0.005317184, + 0.0034429359, + 0.0031937633, + -0.013058055, + -0.09134677, + 0.06425565, + -0.054977305, + 0.0007087448, + -0.06258866, + -0.034974415, + -0.029966963, + 0.044276785, + 0.017868131, + -0.027976807, + -0.036579583, + 0.021142753, + 0.06057356, + -0.03133335, + -0.014331035, + 0.034653842, + 0.052315667, + -0.036585484, + 0.028209662 ], "index": 0, "object": "embedding" diff --git a/tests/integration/recordings/responses/8d035e153b6f.json b/tests/integration/recordings/responses/8d035e153b6f.json index 18f3ee3cd..6c08b1c56 100644 --- a/tests/integration/recordings/responses/8d035e153b6f.json +++ b/tests/integration/recordings/responses/8d035e153b6f.json @@ -20,7 +20,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-708", + "id": "chatcmpl-155", "choices": [ { "finish_reason": "stop", @@ -37,7 +37,7 @@ } } ], - "created": 1759012142, + "created": 1759437855, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/8deded211f21.json b/tests/integration/recordings/responses/8deded211f21.json new file mode 100644 index 000000000..8cb3e75af --- /dev/null +++ b/tests/integration/recordings/responses/8deded211f21.json @@ -0,0 +1,743 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"book_flight\",\n \"description\": \"\n Book a flight with passenger and payment information.\n\n This tool uses JSON Schema $ref and $defs for type reuse.\n \",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"flight\", \"passengers\", \"payment\"],\n \"properties\": {\n \"flight\": {\n \"type\": \"object\",\n \"description\": \"\"\n },\n \"passengers\": {\n \"type\": \"array\",\n \"description\": \"\"\n },\n \"payment\": {\n \"type\": \"object\",\n \"description\": \"\"\n }\n }\n }\n },\n {\n \"name\": \"process_order\",\n \"description\": \"\n Process an order with nested address information.\n\n Uses nested objects and $ref.\n \",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"order_data\"],\n \"properties\": {\n \"order_data\": {\n \"type\": \"object\",\n \"description\": \"\"\n }\n }\n }\n },\n {\n \"name\": \"flexible_contact\",\n \"description\": \"\n Accept flexible contact (email or phone).\n\n Uses anyOf schema.\n \",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"contact_info\"],\n \"properties\": {\n \"contact_info\": {\n \"type\": \"string\",\n \"description\": \"\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant that can process orders and book flights.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nProcess an order with 2 widgets going to 123 Main St, San Francisco<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:19.457795Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "[", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:19.499711Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "process", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:19.544576Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_order", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:19.588521Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "(order", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:19.633501Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_data", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:19.677395Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "={\"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:19.720407Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "order", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:19.763935Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_id", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:19.807169Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\":", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:19.851019Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " ", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:19.893637Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "1", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:19.935864Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ",", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:19.978334Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:20.020617Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "customer", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:20.063212Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_name", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:20.106093Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\":", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:20.149989Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:20.192674Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "John", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:20.236337Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " Doe", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:20.278777Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\",", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:20.320886Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:20.363891Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "address", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:20.40745Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\":", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:20.451859Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " {\"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:20.494751Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "street", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:20.536928Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\":", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:20.581229Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:20.623455Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "123", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:20.665328Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " Main", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:20.707445Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " St", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:20.749803Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\",", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:20.792527Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:20.835252Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "city", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:20.878606Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\":", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:20.921646Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:20.963436Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "San", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:21.012147Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " Francisco", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:21.063248Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\"}}", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:21.10591Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ")]", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:21.149804Z", + "done": true, + "done_reason": "stop", + "total_duration": 3544551625, + "load_duration": 122599250, + "prompt_eval_count": 556, + "prompt_eval_duration": 1727890958, + "eval_count": 40, + "eval_duration": 1693076542, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/8f000a878ccd.json b/tests/integration/recordings/responses/8f000a878ccd.json index dcca8d1b2..351804652 100644 --- a/tests/integration/recordings/responses/8f000a878ccd.json +++ b/tests/integration/recordings/responses/8f000a878ccd.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-422", + "id": "chatcmpl-988", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759368373, + "created": 1759437811, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/920c0495cde6.json b/tests/integration/recordings/responses/920c0495cde6.json index 09b967cff..dc433ce46 100644 --- a/tests/integration/recordings/responses/920c0495cde6.json +++ b/tests/integration/recordings/responses/920c0495cde6.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-992", + "id": "chatcmpl-724", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759245120, + "created": 1759437797, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/92a9a916ef02.json b/tests/integration/recordings/responses/92a9a916ef02.json index 5fe294826..5f2dfd618 100644 --- a/tests/integration/recordings/responses/92a9a916ef02.json +++ b/tests/integration/recordings/responses/92a9a916ef02.json @@ -20,14 +20,14 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-343", + "id": "chatcmpl-923", "choices": [ { "finish_reason": "stop", "index": 0, "logprobs": null, "message": { - "content": "The currency of Japan is the Japanese yen (, ry\u014d) and its symbol, \u00a5.", + "content": "The currency of Japan is the Japanese yen (\u00a5). It is represented by the symbol \u00a5. In some contexts, it's also abbreviated as \"JPY\" or written as \"yen\". The Bank of Japan is responsible for managing the country's monetary policy and issuing new yen banknotes and coins.", "refusal": null, "role": "assistant", "annotations": null, @@ -37,15 +37,15 @@ } } ], - "created": 1759012146, + "created": 1759437863, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion", "service_tier": null, "system_fingerprint": "fp_ollama", "usage": { - "completion_tokens": 20, + "completion_tokens": 61, "prompt_tokens": 32, - "total_tokens": 52, + "total_tokens": 93, "completion_tokens_details": null, "prompt_tokens_details": null } diff --git a/tests/integration/recordings/responses/930cf0cec376.json b/tests/integration/recordings/responses/930cf0cec376.json new file mode 100644 index 000000000..53b8d5f71 --- /dev/null +++ b/tests/integration/recordings/responses/930cf0cec376.json @@ -0,0 +1,1584 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "Call get_boiling_point tool and answer What is the boiling point of polyjuice?" + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_jlswgy4x", + "type": "function", + "function": { + "name": "get_boiling_point", + "arguments": "{\"celcius\":null,\"liquid_name\":\"polyjuice\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_jlswgy4x", + "content": "Error when running tool: 'ToolCall' object has no attribute 'arguments_json'" + } + ], + "max_tokens": 512, + "stream": true, + "temperature": 0.0001, + "tool_choice": "auto", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": "I", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437841, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " was", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437841, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " unable", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437841, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " to", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437841, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " find", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437841, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " the", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437841, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437841, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437841, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " of", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437841, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437841, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437841, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437841, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437841, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " The", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437841, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " get", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437841, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": "_bo", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437842, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": "iling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437842, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": "_point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437842, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " tool", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437842, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " does", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437842, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " not", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437842, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " have", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437842, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " information", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437842, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " on", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437842, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437842, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437842, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437842, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " in", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437842, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " its", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437842, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " database", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437842, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437842, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " If", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437842, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " you", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437842, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": "'re", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437842, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " looking", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437842, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " for", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437842, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " the", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437842, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437842, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437842, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " of", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437842, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " a", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437843, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " different", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437843, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " substance", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437843, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": ",", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437843, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " please", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437843, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " let", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437843, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " me", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437843, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " know", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437843, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " and", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437843, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " I", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437843, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": "'ll", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437843, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " be", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437843, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " happy", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437843, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " to", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437843, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " try", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437843, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": " again", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437843, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437843, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-188", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759437843, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/931ac7158789.json b/tests/integration/recordings/responses/931ac7158789.json new file mode 100644 index 000000000..44aa46105 --- /dev/null +++ b/tests/integration/recordings/responses/931ac7158789.json @@ -0,0 +1,86 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "user", + "content": "What's the weather in San Francisco?" + } + ], + "tools": [ + { + "type": "function", + "function": { + "name": "get_weather", + "description": "Get weather for a location", + "parameters": { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "City name" + } + }, + "required": [ + "location" + ] + } + } + } + ] + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": { + "__type__": "openai.types.chat.chat_completion.ChatCompletion", + "__data__": { + "id": "chatcmpl-505", + "choices": [ + { + "finish_reason": "tool_calls", + "index": 0, + "logprobs": null, + "message": { + "content": "", + "refusal": null, + "role": "assistant", + "annotations": null, + "audio": null, + "function_call": null, + "tool_calls": [ + { + "id": "call_t7y6oe6q", + "function": { + "arguments": "{\"location\":\"San Francisco\"}", + "name": "get_weather" + }, + "type": "function", + "index": 0 + } + ] + } + } + ], + "created": 1759437802, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": { + "completion_tokens": 18, + "prompt_tokens": 161, + "total_tokens": 179, + "completion_tokens_details": null, + "prompt_tokens_details": null + } + } + }, + "is_streaming": false + } +} diff --git a/tests/integration/recordings/responses/9db34836a1a7.json b/tests/integration/recordings/responses/9db34836a1a7.json new file mode 100644 index 000000000..b98ea52df --- /dev/null +++ b/tests/integration/recordings/responses/9db34836a1a7.json @@ -0,0 +1,119 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "What is the boiling point of the liquid polyjuice in celsius?" + } + ], + "max_tokens": 512, + "stream": true, + "temperature": 0.0001, + "tool_choice": "required", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-624", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": [ + { + "index": 0, + "id": "call_j2jdmkk1", + "function": { + "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}", + "name": "get_boiling_point" + }, + "type": "function" + } + ] + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441665, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-624", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "tool_calls", + "index": 0, + "logprobs": null + } + ], + "created": 1759441665, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/9e0b1ac678f6.json b/tests/integration/recordings/responses/9e0b1ac678f6.json index 8aa06d495..02491daed 100644 --- a/tests/integration/recordings/responses/9e0b1ac678f6.json +++ b/tests/integration/recordings/responses/9e0b1ac678f6.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-122", + "id": "chatcmpl-141", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759245126, + "created": 1759437800, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/9ffc75524647.json b/tests/integration/recordings/responses/9ffc75524647.json new file mode 100644 index 000000000..8f7e2480b --- /dev/null +++ b/tests/integration/recordings/responses/9ffc75524647.json @@ -0,0 +1,119 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "What is the boiling point of the liquid polyjuice in celsius?" + } + ], + "max_tokens": 0, + "stream": true, + "temperature": 0.0001, + "tool_choice": "required", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-704", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": [ + { + "index": 0, + "id": "call_ew600lfr", + "function": { + "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}", + "name": "get_boiling_point" + }, + "type": "function" + } + ] + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429347, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-704", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "tool_calls", + "index": 0, + "logprobs": null + } + ], + "created": 1759429347, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/a0c4df33879f.json b/tests/integration/recordings/responses/a0c4df33879f.json index 7898e5b02..e2bc1da33 100644 --- a/tests/integration/recordings/responses/a0c4df33879f.json +++ b/tests/integration/recordings/responses/a0c4df33879f.json @@ -21,7 +21,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-792", + "id": "chatcmpl-957", "choices": [ { "delta": { @@ -36,7 +36,7 @@ "logprobs": null } ], - "created": 1756921356, + "created": 1759437880, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -47,7 +47,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-792", + "id": "chatcmpl-957", "choices": [ { "delta": { @@ -62,7 +62,7 @@ "logprobs": null } ], - "created": 1756921356, + "created": 1759437880, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -73,11 +73,11 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-792", + "id": "chatcmpl-957", "choices": [ { "delta": { - "content": " name", + "content": " word", "function_call": null, "refusal": null, "role": "assistant", @@ -88,7 +88,7 @@ "logprobs": null } ], - "created": 1756921356, + "created": 1759437880, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -99,7 +99,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-792", + "id": "chatcmpl-957", "choices": [ { "delta": { @@ -114,7 +114,7 @@ "logprobs": null } ], - "created": 1756921356, + "created": 1759437880, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -125,1099 +125,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921356, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " Sun", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921356, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " is", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921356, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " Sol", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921356, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921356, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " In", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921356, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " ancient", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921356, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " Roman", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921356, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " mythology", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921356, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921356, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " Sol", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921356, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " was", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921356, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " a", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921356, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " god", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921356, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " equivalent", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921356, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " to", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921356, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921356, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " Greek", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921356, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " god", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921356, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " Hel", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921356, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": "ios", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921357, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": ",", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921357, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " and", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921357, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " he", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921357, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " was", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921357, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " often", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921357, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " depicted", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921357, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " as", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921357, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " a", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921357, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " radi", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921357, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": "ating", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921357, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " sun", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921357, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " with", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921357, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " rays", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921357, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " eman", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921357, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": "ating", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921357, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " from", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921357, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " his", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921357, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " body", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921357, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": ".", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921357, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " The", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921357, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " term", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921357, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", + "id": "chatcmpl-957", "choices": [ { "delta": { @@ -1232,7 +140,7 @@ "logprobs": null } ], - "created": 1756921357, + "created": 1759437880, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1243,11 +151,11 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-792", + "id": "chatcmpl-957", "choices": [ { "delta": { - "content": "s", + "content": "sun", "function_call": null, "refusal": null, "role": "assistant", @@ -1258,7 +166,7 @@ "logprobs": null } ], - "created": 1756921357, + "created": 1759437880, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1269,33 +177,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": "olar", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921357, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", + "id": "chatcmpl-957", "choices": [ { "delta": { @@ -1310,7 +192,7 @@ "logprobs": null } ], - "created": 1756921358, + "created": 1759437880, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1321,7 +203,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-792", + "id": "chatcmpl-957", "choices": [ { "delta": { @@ -1336,7 +218,7 @@ "logprobs": null } ], - "created": 1756921358, + "created": 1759437880, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1347,11 +229,11 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-792", + "id": "chatcmpl-957", "choices": [ { "delta": { - "content": " still", + "content": " Sol", "function_call": null, "refusal": null, "role": "assistant", @@ -1362,7 +244,7 @@ "logprobs": null } ], - "created": 1756921358, + "created": 1759437880, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1373,475 +255,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " used", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921358, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " in", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921358, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " scientific", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921358, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " and", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921358, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " astronomical", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921358, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " contexts", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921358, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " to", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921358, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " refer", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921358, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " to", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921358, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " phenomena", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921358, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " related", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921358, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " to", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921358, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921358, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " Sun", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921358, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " or", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921358, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " the", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921358, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " solar", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921358, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", - "choices": [ - { - "delta": { - "content": " system", - "function_call": null, - "refusal": null, - "role": "assistant", - "tool_calls": null - }, - "finish_reason": null, - "index": 0, - "logprobs": null - } - ], - "created": 1756921358, - "model": "llama3.2:3b-instruct-fp16", - "object": "chat.completion.chunk", - "service_tier": null, - "system_fingerprint": "fp_ollama", - "usage": null - } - }, - { - "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", - "__data__": { - "id": "chatcmpl-792", + "id": "chatcmpl-957", "choices": [ { "delta": { @@ -1856,7 +270,7 @@ "logprobs": null } ], - "created": 1756921358, + "created": 1759437880, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -1867,7 +281,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-792", + "id": "chatcmpl-957", "choices": [ { "delta": { @@ -1882,7 +296,7 @@ "logprobs": null } ], - "created": 1756921358, + "created": 1759437880, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, diff --git a/tests/integration/recordings/responses/a11b11923cc8.json b/tests/integration/recordings/responses/a11b11923cc8.json new file mode 100644 index 000000000..f3031b8fd --- /dev/null +++ b/tests/integration/recordings/responses/a11b11923cc8.json @@ -0,0 +1,119 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "What is the boiling point of the liquid polyjuice in celsius?" + } + ], + "max_tokens": 0, + "stream": true, + "temperature": 0.0001, + "tool_choice": "auto", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "str", + "description": "The name of the liquid" + }, + "celcius": { + "type": "bool", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-410", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": [ + { + "index": 0, + "id": "call_4476969q", + "function": { + "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}", + "name": "get_boiling_point" + }, + "type": "function" + } + ] + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759425215, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-410", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "tool_calls", + "index": 0, + "logprobs": null + } + ], + "created": 1759425215, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/a46b77ffd494.json b/tests/integration/recordings/responses/a46b77ffd494.json index dff3d3fd7..469fe098d 100644 --- a/tests/integration/recordings/responses/a46b77ffd494.json +++ b/tests/integration/recordings/responses/a46b77ffd494.json @@ -17,7 +17,7 @@ "body": { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-183", + "id": "cmpl-253", "choices": [ { "finish_reason": "stop", @@ -26,7 +26,7 @@ "text": "Michael Jordan was born in the year of " } ], - "created": 1758978053, + "created": 1759376606, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", diff --git a/tests/integration/recordings/responses/a4c8d19bb1eb.json b/tests/integration/recordings/responses/a4c8d19bb1eb.json index 89f52f82e..e71bd9b89 100644 --- a/tests/integration/recordings/responses/a4c8d19bb1eb.json +++ b/tests/integration/recordings/responses/a4c8d19bb1eb.json @@ -20,7 +20,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-715", + "id": "chatcmpl-415", "choices": [ { "finish_reason": "stop", @@ -37,7 +37,7 @@ } } ], - "created": 1756921367, + "created": 1759437885, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/a689181d64d3.json b/tests/integration/recordings/responses/a689181d64d3.json new file mode 100644 index 000000000..61c34a3e4 --- /dev/null +++ b/tests/integration/recordings/responses/a689181d64d3.json @@ -0,0 +1,86 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "user", + "content": "What's the weather in Tokyo?" + } + ], + "tools": [ + { + "type": "function", + "function": { + "name": "get_weather", + "description": "Get weather information", + "parameters": { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "City name" + } + }, + "required": [ + "location" + ] + } + } + } + ] + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": { + "__type__": "openai.types.chat.chat_completion.ChatCompletion", + "__data__": { + "id": "chatcmpl-54", + "choices": [ + { + "finish_reason": "tool_calls", + "index": 0, + "logprobs": null, + "message": { + "content": "", + "refusal": null, + "role": "assistant", + "annotations": null, + "audio": null, + "function_call": null, + "tool_calls": [ + { + "id": "call_v05v3tmn", + "function": { + "arguments": "{\"location\":\"Tokyo\"}", + "name": "get_weather" + }, + "type": "function", + "index": 0 + } + ] + } + } + ], + "created": 1759376607, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": { + "completion_tokens": 18, + "prompt_tokens": 158, + "total_tokens": 176, + "completion_tokens_details": null, + "prompt_tokens_details": null + } + } + }, + "is_streaming": false + } +} diff --git a/tests/integration/recordings/responses/a92b8fc775d5.json b/tests/integration/recordings/responses/a92b8fc775d5.json index b7fa9fc1d..2bf18d6cc 100644 --- a/tests/integration/recordings/responses/a92b8fc775d5.json +++ b/tests/integration/recordings/responses/a92b8fc775d5.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-952", + "id": "chatcmpl-973", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759245123, + "created": 1759437798, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/adf150be9638.json b/tests/integration/recordings/responses/adf150be9638.json new file mode 100644 index 000000000..a4b636cea --- /dev/null +++ b/tests/integration/recordings/responses/adf150be9638.json @@ -0,0 +1,419 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "What is the boiling point of the liquid polyjuice in celsius?" + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_k3oc5cxw", + "type": "function", + "function": { + "name": "get_boiling_point", + "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_k3oc5cxw", + "content": "-100" + } + ], + "max_tokens": 512, + "stream": true, + "temperature": 0.0001, + "tool_choice": { + "type": "function", + "function": { + "name": "get_boiling_point" + } + }, + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-378", + "choices": [ + { + "delta": { + "content": "The", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441673, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-378", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441673, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-378", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441673, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-378", + "choices": [ + { + "delta": { + "content": " of", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441673, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-378", + "choices": [ + { + "delta": { + "content": " Poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441673, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-378", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441673, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-378", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441673, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-378", + "choices": [ + { + "delta": { + "content": " is", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441673, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-378", + "choices": [ + { + "delta": { + "content": " -", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441673, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-378", + "choices": [ + { + "delta": { + "content": "100", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441674, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-378", + "choices": [ + { + "delta": { + "content": "\u00b0C", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441674, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-378", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441674, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-378", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759441674, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/b050e5a7e4a3.json b/tests/integration/recordings/responses/b050e5a7e4a3.json index 5cefe7190..b3d55a211 100644 --- a/tests/integration/recordings/responses/b050e5a7e4a3.json +++ b/tests/integration/recordings/responses/b050e5a7e4a3.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-207", + "id": "chatcmpl-112", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759245127, + "created": 1759437800, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/b178d000a14a.json b/tests/integration/recordings/responses/b178d000a14a.json new file mode 100644 index 000000000..715bfe484 --- /dev/null +++ b/tests/integration/recordings/responses/b178d000a14a.json @@ -0,0 +1,57 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama-guard3:1b", + "messages": [ + { + "role": "user", + "content": "Task: Check if there is unsafe content in 'Assistant' messages in conversations according our safety policy with the below categories.\n\n\nS1: Violent Crimes.\nS2: Non-Violent Crimes.\nS3: Sex Crimes.\nS4: Child Exploitation.\nS5: Defamation.\nS6: Specialized Advice.\nS7: Privacy.\nS8: Intellectual Property.\nS9: Indiscriminate Weapons.\nS10: Hate.\nS11: Self-Harm.\nS12: Sexual Content.\nS13: Elections.\nS14: Code Interpreter Abuse.\n\n\n\n\nUser: What is the boiling point of the liquid polyjuice in celsius?\n\nAssistant: \n\nTool: Error when running tool: 'ToolCall' object has no attribute 'arguments_json'\n\nAssistant: I was unable to find the boiling point of liquid polyjuice in Celsius. The boiling point could not be located in my database.\n\n\n\nProvide your safety assessment for ONLY THE LAST Assistant message in the above conversation:\n - First line must read 'safe' or 'unsafe'.\n - If unsafe, a second line must include a comma-separated list of violated categories." + } + ], + "stream": false, + "temperature": 0.0 + }, + "endpoint": "/v1/chat/completions", + "model": "llama-guard3:1b" + }, + "response": { + "body": { + "__type__": "openai.types.chat.chat_completion.ChatCompletion", + "__data__": { + "id": "chatcmpl-9", + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "safe", + "refusal": null, + "role": "assistant", + "annotations": null, + "audio": null, + "function_call": null, + "tool_calls": null + } + } + ], + "created": 1759437833, + "model": "llama-guard3:1b", + "object": "chat.completion", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": { + "completion_tokens": 2, + "prompt_tokens": 449, + "total_tokens": 451, + "completion_tokens_details": null, + "prompt_tokens_details": null + } + } + }, + "is_streaming": false + } +} diff --git a/tests/integration/recordings/responses/b28f75bd87dc.json b/tests/integration/recordings/responses/b28f75bd87dc.json index d37fbede8..f01da4be5 100644 --- a/tests/integration/recordings/responses/b28f75bd87dc.json +++ b/tests/integration/recordings/responses/b28f75bd87dc.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-489", + "id": "chatcmpl-36", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759282539, + "created": 1759441671, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/b374fc18c641.json b/tests/integration/recordings/responses/b374fc18c641.json new file mode 100644 index 000000000..55cf0d7f3 --- /dev/null +++ b/tests/integration/recordings/responses/b374fc18c641.json @@ -0,0 +1,258 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant\nYou MUST use the tool `get_boiling_point` to answer the user query.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nWhat is the boiling point of the liquid polyjuice in celsius?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n[get_boiling_point(liquid_name=\"polyjuice\", celcius=True)]<|eot_id|><|start_header_id|>ipython<|end_header_id|>\n\n-100<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0001, + "top_p": 0.9 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.268889Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "The", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.310661Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " boiling", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.35195Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " point", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.393537Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " of", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.435595Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " poly", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.481337Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ju", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.526974Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ice", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.569942Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " is", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.612747Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " -", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.656585Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "100", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.697454Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\u00b0C", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.738529Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ".", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:10.781405Z", + "done": true, + "done_reason": "stop", + "total_duration": 663905208, + "load_duration": 85733250, + "prompt_eval_count": 410, + "prompt_eval_duration": 64272708, + "eval_count": 13, + "eval_duration": 513001750, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/b57525af4982.json b/tests/integration/recordings/responses/b57525af4982.json new file mode 100644 index 000000000..651478385 --- /dev/null +++ b/tests/integration/recordings/responses/b57525af4982.json @@ -0,0 +1,119 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "Call get_boiling_point_with_metadata tool and answer What is the boiling point of polyjuice?" + } + ], + "max_tokens": 512, + "stream": true, + "temperature": 0.0001, + "tool_choice": "auto", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point_with_metadata", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-613", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": [ + { + "index": 0, + "id": "call_gefseirj", + "function": { + "arguments": "{\"celcius\":false,\"liquid_name\":\"polyjuice\"}", + "name": "get_boiling_point_with_metadata" + }, + "type": "function" + } + ] + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441678, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-613", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "tool_calls", + "index": 0, + "logprobs": null + } + ], + "created": 1759441678, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/b58e35a624b0.json b/tests/integration/recordings/responses/b58e35a624b0.json index f3eb65091..4f93947bc 100644 --- a/tests/integration/recordings/responses/b58e35a624b0.json +++ b/tests/integration/recordings/responses/b58e35a624b0.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-944", + "id": "chatcmpl-912", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759368373, + "created": 1759437811, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/c13d7510774c.json b/tests/integration/recordings/responses/c13d7510774c.json index 00e9659e9..b51ac089e 100644 --- a/tests/integration/recordings/responses/c13d7510774c.json +++ b/tests/integration/recordings/responses/c13d7510774c.json @@ -18,390 +18,390 @@ "data": [ { "embedding": [ - -0.0011296043, - 0.06740522, - 0.015186453, - 0.037259158, - 0.02935556, - 0.015181291, - 0.07432997, - -0.0033194474, - 0.0658106, - -0.021833794, - 0.034404922, - 0.05099269, - -0.011411872, - -0.025082853, - -0.051754408, - 0.027195254, - 0.07849019, - -0.06000248, - 0.010478361, - -0.003392346, - 0.043441977, - 0.12292443, - 9.388175e-05, - 0.0021187037, - 0.018079525, - 0.045084555, - -0.097606525, - 0.11185215, - 0.049650617, - -0.0348426, - -0.039580915, - 0.0035499185, - 0.15893514, - 0.063421525, - 0.047970187, - 0.011613767, - 0.009793674, - 0.01536712, - 0.009413064, - 0.07999014, - 0.01915802, - -0.13722447, - 0.017290922, - 0.013689777, - 0.014259784, - -0.00021621982, - -0.017730612, - 0.022902183, - 0.035927463, - -0.015361024, - -0.00975885, - -0.040180918, - -0.011500755, - 0.00012558368, - 0.08540788, - 0.08731169, - 0.004690206, - 0.006160604, - 0.003023499, - 0.008887178, - -0.006278653, - 0.050593477, - 0.00053471717, - 0.04677382, - 0.09365536, - -0.012813678, - 0.0177166, - -0.06271032, - -0.11535796, - 0.04110661, - -0.014942371, - 0.044813167, - -0.020877626, - 0.04299617, - -0.06107898, - 0.01997848, - -0.0687263, - -0.035494387, - 0.04186985, - 0.012177578, - -0.029081868, - -0.066437304, - 0.030620316, - 0.05150629, - -0.12813967, - 0.06819209, - -0.047090717, - -0.032926783, - 0.007485966, - -0.017814271, - 0.038294822, - -0.015788501, - 0.07054281, - 0.03807343, - -0.114283286, - 0.042118594, - -0.111601785, - -0.04573834, - -0.02895515, - 0.12735783, - -0.013941619, - -0.027150463, - 0.072897464, - 0.024098374, - -0.054044593, - -0.13128933, - 0.030136578, - -0.023237763, - -0.019079136, - -0.0078745885, - -0.021944366, - -0.053324133, - -0.070892006, - -0.011552823, - -0.023377078, - -0.01562657, - 0.051452935, - 0.029251281, - 0.06480842, - 0.06403676, - 0.014424153, - -0.057994097, - -0.06993807, - -0.023921017, - -0.08493092, - -0.087801315, - 0.048142783, - -6.124397e-33, - 0.0103092175, - 0.038688924, - 0.003180582, - 0.03575604, - 0.005059993, - -0.0041896994, - -0.05389261, - -0.029881287, - -0.075520456, - -0.07879111, - -0.012291425, - -0.05053033, - 0.020719253, - -0.05190443, - -0.05927485, - -0.05987536, - -0.05572788, - 0.03220933, - -0.006331632, - -0.021651596, - -0.059913907, - 0.051977657, - 0.05122985, - -0.06350782, - -0.04872765, - -0.014282773, - 0.0025304393, - -0.024342295, - -0.0055265254, - 0.020074077, - -0.10194665, - 0.010741537, - -0.02318619, - -0.08105595, - -0.014973416, - 0.0017918752, - 0.045083463, - -0.05282281, - -0.053680934, - -0.013229242, - -0.019794637, - 0.020036008, - -0.00081875344, - -0.10115686, - -0.0006884125, - 0.09664284, - -0.03943104, - 0.04955554, - 0.042241447, - 0.007962193, - -0.052323878, - 0.05189162, - 0.037112337, - 0.034818016, - 0.063431285, - -0.02657652, - -0.009212341, - -0.0025556423, - -0.05609933, - 0.0020433308, - -0.020113751, - 0.0012227942, - -0.0017669081, - 0.019119242, - 0.016553605, - -0.011386767, - 0.010368127, - -0.00788346, - 0.046651863, - -0.046871297, - -0.085224025, - -0.008958986, - 0.012052177, - 0.013311017, - 0.015157192, - 0.03708167, - 0.026588887, - 0.014486772, - -0.013955214, - 0.019986698, - -0.06885552, - -0.07106239, - 0.012334861, - 0.03284816, - -0.03151976, - 0.045773514, - 0.067994975, - -0.077492714, - 0.018440822, - 0.06622958, - -0.08641996, - 0.008967366, - 0.04134085, - 0.009518882, - 0.006565088, - 4.711897e-33, - -0.02617601, - 0.0013207985, - -0.014141556, - -0.024331013, - 0.06929469, - 0.03143924, - 0.03726272, - 0.064707026, - 0.049426436, - 0.11073603, - 0.0498569, - 0.066796474, - 0.04154851, - -0.034098588, - 0.07028382, - 0.034863915, - 0.12904617, - -0.021078404, - 0.008925486, - 0.03016334, - -0.02286831, - 0.03649071, - -0.13193603, - 0.045608096, - -0.012805477, - 0.041747537, - 0.12321406, - -0.013507891, - -0.007307474, - -0.02975696, - 0.025006123, - -0.009506256, - 0.024761083, - 0.023204166, - -0.019123148, - 0.02259915, - 0.013744109, - -0.03847919, - -0.014476444, - 0.07522499, - 0.13586833, - 0.009872778, - -0.03752485, - -0.0273059, - -0.016470777, - -0.048831154, - -0.03521732, - -0.054363117, - -0.0017890002, - 0.035665076, - -0.010268516, - -0.018602924, - -0.036469962, - -0.055976517, - -0.007821111, - 0.00907826, - -0.0073335953, - 0.050373644, - -0.00025981313, - -0.036349144, - -0.024950698, - 0.058883175, - -0.07245624, - 0.07399545, - 0.053919416, - -0.051881794, - -0.0063462397, - 0.07852022, - -0.016959544, - -0.0066832895, - 0.01265072, - -0.014152041, - -0.13643119, - -0.085250236, - -0.017519519, - -0.00466121, - 0.0136799645, - 0.0009118405, - -0.071966685, - -0.06886893, - 0.14207116, - 0.03186518, - -0.05592076, - 0.030404905, - 0.061872244, - 0.029894035, - -0.00096155383, - -0.06500391, - -0.020616096, - 0.039591115, - -0.12383165, - 0.0028830946, - 0.051231142, - 0.13391772, - -0.08845233, - -1.7589368e-08, - -0.025769057, - -0.080324695, - -0.09164953, - 0.032005485, - 0.005889216, - 0.114638664, - 0.0233727, - -0.069048144, - -0.05594302, - -0.05788277, - 0.014665582, - 0.080326974, - 0.0036707798, - -0.030798541, - 0.024442635, - 0.008542568, - -0.05288123, - -0.06640491, - 0.00074039627, - -0.023801958, - 0.030778948, - 0.054075025, - -0.0027453878, - -0.09929041, - -0.0150463935, - 0.01624328, - -0.0015419688, - 0.011909824, - 0.007890519, - 0.0489657, - 0.004866092, - 0.08265809, - -0.0145542445, - -0.04386104, - 0.004611713, - 0.024626419, - 0.023854014, - 0.0236921, - 0.05076065, - -0.051832993, - 0.021252805, - -0.0033932943, - -0.021158189, - 0.020595197, - -0.06475187, - 0.054174356, - 0.027812954, - -0.05294382, - 0.015094968, - -0.119794324, - -0.034157146, - -0.012219483, - 0.047453884, - 0.020896995, - -0.026357891, - 0.015037571, - 0.033969007, - 0.05981613, - -0.052542053, - 0.033553857, - 0.06119396, - 0.09635468, - 0.11632743, - -0.016134953 + -0.0010839553, + 0.067364, + 0.015185306, + 0.037240896, + 0.029337138, + 0.015160007, + 0.0743005, + -0.0032980628, + 0.06581814, + -0.021851996, + 0.034412965, + 0.051005766, + -0.011422501, + -0.025062356, + -0.051756065, + 0.027193472, + 0.07849549, + -0.05999108, + 0.010471458, + -0.003400683, + 0.043449093, + 0.122919865, + 9.668583e-05, + 0.002153268, + 0.018064681, + 0.045069378, + -0.09762388, + 0.11186886, + 0.049657565, + -0.03485217, + -0.039568134, + 0.003532146, + 0.15894793, + 0.06341193, + 0.047953114, + 0.011617699, + 0.009799243, + 0.015377702, + 0.009379663, + 0.079989135, + 0.019207356, + -0.13718612, + 0.01730099, + 0.013687199, + 0.014266827, + -0.00022628276, + -0.017710257, + 0.02291068, + 0.03590651, + -0.015361055, + -0.00978436, + -0.0401825, + -0.011481894, + 0.00014050963, + 0.08540761, + 0.08730027, + 0.0046967245, + 0.006164595, + 0.003031956, + 0.008891807, + -0.006260525, + 0.05061661, + 0.0005252785, + 0.0467754, + 0.09363822, + -0.012814104, + 0.017708639, + -0.062698044, + -0.11535818, + 0.041123625, + -0.014939021, + 0.044815876, + -0.020868087, + 0.042999975, + -0.061038766, + 0.019998673, + -0.068740115, + -0.035516046, + 0.041884515, + 0.012185281, + -0.029084096, + -0.06643917, + 0.030638866, + 0.05149607, + -0.12815061, + 0.06821646, + -0.047070153, + -0.032925386, + 0.007499353, + -0.017841771, + 0.038296465, + -0.015792726, + 0.07054022, + 0.038072467, + -0.11428876, + 0.04210153, + -0.11162366, + -0.045723915, + -0.028951947, + 0.12735675, + -0.013946637, + -0.027157523, + 0.07295939, + 0.024098422, + -0.054050542, + -0.13125896, + 0.03013205, + -0.023223283, + -0.019072957, + -0.007864101, + -0.021954412, + -0.05329901, + -0.07088355, + -0.0115214065, + -0.023399564, + -0.015638318, + 0.05148062, + 0.029261008, + 0.06481798, + 0.064031154, + 0.014445124, + -0.058017716, + -0.069921836, + -0.023950975, + -0.08490842, + -0.08779567, + 0.048162255, + -6.1240354e-33, + 0.010315817, + 0.038685724, + 0.0031864564, + 0.0357421, + 0.0050265454, + -0.004210234, + -0.053900674, + -0.02988569, + -0.07548199, + -0.078777455, + -0.012271205, + -0.05056629, + 0.020729113, + -0.051866043, + -0.059254467, + -0.059903424, + -0.055699438, + 0.032196835, + -0.006328442, + -0.021668624, + -0.059921067, + 0.0519611, + 0.051227964, + -0.063502096, + -0.04873505, + -0.014265467, + 0.0025537873, + -0.024346355, + -0.0055181426, + 0.02007461, + -0.10196586, + 0.010727814, + -0.023194604, + -0.081025146, + -0.014997581, + 0.0017926424, + 0.045078833, + -0.052792255, + -0.05368693, + -0.013245513, + -0.019808132, + 0.020031843, + -0.00081401254, + -0.10117647, + -0.0007066768, + 0.09663035, + -0.03946875, + 0.04954661, + 0.042237334, + 0.007943922, + -0.05234212, + 0.051887065, + 0.03711589, + 0.034850314, + 0.063441575, + -0.026583876, + -0.009227281, + -0.0025737104, + -0.056082893, + 0.0020716325, + -0.020129146, + 0.0012315192, + -0.0017609745, + 0.019111704, + 0.016572498, + -0.011374, + 0.010381644, + -0.007864189, + 0.04664868, + -0.046856377, + -0.08523834, + -0.008974813, + 0.012022968, + 0.013285977, + 0.015182303, + 0.03708482, + 0.026587088, + 0.014473839, + -0.013946565, + 0.01999883, + -0.06888259, + -0.07111367, + 0.012369427, + 0.032828625, + -0.03152666, + 0.045777358, + 0.06801705, + -0.07747748, + 0.018461134, + 0.06620267, + -0.086365156, + 0.008950603, + 0.041320425, + 0.009541193, + 0.0066037327, + 4.71081e-33, + -0.026172558, + 0.0013145636, + -0.014140948, + -0.024360213, + 0.06931815, + 0.031448748, + 0.037257418, + 0.06468137, + 0.049403396, + 0.11072201, + 0.04985356, + 0.06679111, + 0.04153249, + -0.034106053, + 0.070283465, + 0.034855895, + 0.12902643, + -0.021033453, + 0.008940618, + 0.030177405, + -0.022881329, + 0.036504544, + -0.13194299, + 0.045612644, + -0.0127895875, + 0.04174139, + 0.1232064, + -0.013484046, + -0.007285246, + -0.029776007, + 0.025007037, + -0.009516822, + 0.02475585, + 0.023208592, + -0.019141924, + 0.02259424, + 0.013740329, + -0.038490705, + -0.014461541, + 0.075218394, + 0.13589163, + 0.009839605, + -0.037563317, + -0.02737327, + -0.016485116, + -0.048845276, + -0.03523722, + -0.05439929, + -0.0017957076, + 0.03563579, + -0.010255764, + -0.01859244, + -0.03647324, + -0.055985246, + -0.007833892, + 0.009086756, + -0.007333394, + 0.050386623, + -0.0002305643, + -0.03637248, + -0.024937423, + 0.058877032, + -0.07250415, + 0.07401245, + 0.053917013, + -0.051895224, + -0.006332244, + 0.07850189, + -0.01695057, + -0.006673017, + 0.012659739, + -0.014127065, + -0.13639799, + -0.08524976, + -0.017533274, + -0.0046930755, + 0.013687301, + 0.0009185522, + -0.0719948, + -0.06887779, + 0.14208324, + 0.03187123, + -0.055919908, + 0.030401653, + 0.061900012, + 0.029921472, + -0.00096237566, + -0.065010294, + -0.020657646, + 0.039562404, + -0.123846576, + 0.0028867351, + 0.051196404, + 0.13397509, + -0.088453874, + -1.7590333e-08, + -0.025786474, + -0.080303885, + -0.09164947, + 0.031999, + 0.00584884, + 0.11464121, + 0.023377793, + -0.06902527, + -0.055941124, + -0.05787791, + 0.014640494, + 0.080320895, + 0.0037027278, + -0.030824674, + 0.024432683, + 0.008549355, + -0.05291309, + -0.06636625, + 0.0007468212, + -0.02379191, + 0.030766092, + 0.054053318, + -0.0027251292, + -0.09928475, + -0.0150488615, + 0.016240431, + -0.0015727071, + 0.01190173, + 0.007895162, + 0.04894733, + 0.00487708, + 0.08263861, + -0.014527478, + -0.043879665, + 0.004633697, + 0.024611989, + 0.023827499, + 0.02366802, + 0.050754935, + -0.051841788, + 0.0212632, + -0.0034418616, + -0.021175656, + 0.020591663, + -0.06475325, + 0.0542002, + 0.027792262, + -0.05295982, + 0.01509645, + -0.11977527, + -0.03416359, + -0.012206606, + 0.047451705, + 0.020876253, + -0.026368074, + 0.01502373, + 0.033982284, + 0.059788153, + -0.052526973, + 0.03356499, + 0.061180886, + 0.096336305, + 0.116353564, + -0.016122948 ], "index": 0, "object": "embedding" diff --git a/tests/integration/recordings/responses/c1f63bb6469c.json b/tests/integration/recordings/responses/c1f63bb6469c.json new file mode 100644 index 000000000..0f25e35da --- /dev/null +++ b/tests/integration/recordings/responses/c1f63bb6469c.json @@ -0,0 +1,119 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "Call get_boiling_point_with_metadata tool and answer What is the boiling point of polyjuice?" + } + ], + "max_tokens": 0, + "stream": true, + "temperature": 0.0001, + "tool_choice": "auto", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point_with_metadata", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "str", + "description": "The name of the liquid" + }, + "celcius": { + "type": "bool", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-14", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": [ + { + "index": 0, + "id": "call_1fnozor9", + "function": { + "arguments": "{\"celcius\":null,\"liquid_name\":\"polyjuice\"}", + "name": "get_boiling_point_with_metadata" + }, + "type": "function" + } + ] + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759425243, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-14", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "tool_calls", + "index": 0, + "logprobs": null + } + ], + "created": 1759425243, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/c2ac76cbf66d.json b/tests/integration/recordings/responses/c2ac76cbf66d.json index 496f41815..d9b0d7f1d 100644 --- a/tests/integration/recordings/responses/c2ac76cbf66d.json +++ b/tests/integration/recordings/responses/c2ac76cbf66d.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-876", + "id": "chatcmpl-368", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759282400, + "created": 1759373692, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/c3dbccc5de74.json b/tests/integration/recordings/responses/c3dbccc5de74.json index a2043db9a..699297a59 100644 --- a/tests/integration/recordings/responses/c3dbccc5de74.json +++ b/tests/integration/recordings/responses/c3dbccc5de74.json @@ -47,7 +47,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-634", + "id": "chatcmpl-688", "choices": [ { "delta": { @@ -58,7 +58,7 @@ "tool_calls": [ { "index": 0, - "id": "call_wubm4yax", + "id": "call_bnha2w8y", "function": { "arguments": "{\"location\":\"San Francisco, CA\"}", "name": "get_weather" @@ -72,7 +72,7 @@ "logprobs": null } ], - "created": 1758975115, + "created": 1759376611, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -83,7 +83,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-634", + "id": "chatcmpl-688", "choices": [ { "delta": { @@ -98,7 +98,7 @@ "logprobs": null } ], - "created": 1758975115, + "created": 1759376611, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, diff --git a/tests/integration/recordings/responses/c4991de37dfb.json b/tests/integration/recordings/responses/c4991de37dfb.json new file mode 100644 index 000000000..e7feca5ca --- /dev/null +++ b/tests/integration/recordings/responses/c4991de37dfb.json @@ -0,0 +1,78 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "user", + "content": "Call the no args tool" + } + ], + "tools": [ + { + "type": "function", + "function": { + "name": "no_args_tool", + "description": "Tool with no arguments", + "parameters": { + "type": "object", + "properties": {} + } + } + } + ] + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": { + "__type__": "openai.types.chat.chat_completion.ChatCompletion", + "__data__": { + "id": "chatcmpl-978", + "choices": [ + { + "finish_reason": "tool_calls", + "index": 0, + "logprobs": null, + "message": { + "content": "", + "refusal": null, + "role": "assistant", + "annotations": null, + "audio": null, + "function_call": null, + "tool_calls": [ + { + "id": "call_wbx3rwxz", + "function": { + "arguments": "{}", + "name": "no_args_tool" + }, + "type": "function", + "index": 0 + } + ] + } + } + ], + "created": 1759437808, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": { + "completion_tokens": 14, + "prompt_tokens": 148, + "total_tokens": 162, + "completion_tokens_details": null, + "prompt_tokens_details": null + } + } + }, + "is_streaming": false + } +} diff --git a/tests/integration/recordings/responses/c62eb5d7115e.json b/tests/integration/recordings/responses/c62eb5d7115e.json index fa872ac44..9dcd317f7 100644 --- a/tests/integration/recordings/responses/c62eb5d7115e.json +++ b/tests/integration/recordings/responses/c62eb5d7115e.json @@ -20,14 +20,14 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-842", + "id": "chatcmpl-422", "choices": [ { "finish_reason": "stop", "index": 0, "logprobs": null, "message": { - "content": "The smallest country in the world is the Vatican City, an independent city-state located within Rome, Italy. It has a total area of approximately 0.44 km\u00b2 (0.17 sq mi) and a population of around 800 people.\n\nDespite its tiny size, the Vatican City is a sovereign state with its own government, currency, postal system, and even a small army (the Gendarmeria Romana). It's also home to numerous iconic landmarks, including St. Peter's Basilica, the Sistine Chapel, and the Vatican Museums.\n\nThe Vatican City is so small that it can fit entirely within an average American city park!", + "content": "The smallest country in the world is the Vatican City, with an area of approximately 0.44 km\u00b2 (0.17 sq mi). It is an independent city-state located within Rome, Italy, and is the headquarters of the Catholic Church. Despite its small size, the Vatican City has a population of around 800 people, including the Pope and other high-ranking officials.", "refusal": null, "role": "assistant", "annotations": null, @@ -37,15 +37,15 @@ } } ], - "created": 1759012145, + "created": 1759437861, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion", "service_tier": null, "system_fingerprint": "fp_ollama", "usage": { - "completion_tokens": 133, + "completion_tokens": 77, "prompt_tokens": 34, - "total_tokens": 167, + "total_tokens": 111, "completion_tokens_details": null, "prompt_tokens_details": null } diff --git a/tests/integration/recordings/responses/c6fc83f0a1d5.json b/tests/integration/recordings/responses/c6fc83f0a1d5.json new file mode 100644 index 000000000..f13430cc4 --- /dev/null +++ b/tests/integration/recordings/responses/c6fc83f0a1d5.json @@ -0,0 +1,1922 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "Call get_boiling_point_with_metadata tool and answer What is the boiling point of polyjuice?" + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_bhmzk2sp", + "type": "function", + "function": { + "name": "get_boiling_point_with_metadata", + "arguments": "{\"celcius\":false,\"liquid_name\":\"polyjuice\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_bhmzk2sp", + "content": "Error when running tool: 'ToolCall' object has no attribute 'arguments_json'" + } + ], + "max_tokens": 512, + "stream": true, + "temperature": 0.0001, + "tool_choice": "auto", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point_with_metadata", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": "I", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437867, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " apologize", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437867, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " for", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437867, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " the", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437868, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " error", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437868, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437868, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " It", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437868, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " seems", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437868, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " that", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437868, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " the", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437868, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " `", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437868, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": "get", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437868, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": "_bo", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437868, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": "iling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437868, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": "_point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437868, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": "_with", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437868, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": "_metadata", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437868, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": "`", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437868, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " tool", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437868, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " requires", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437868, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " a", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437868, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " different", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437868, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " format", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437868, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": ".\n\n", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437868, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": "Unfortunately", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437868, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": ",", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437868, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " I", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437868, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " don", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437869, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": "'t", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437869, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " have", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437869, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " enough", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437869, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " information", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437869, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " to", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437869, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " provide", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437869, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " the", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437869, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437869, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437869, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " of", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437869, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437869, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437869, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437869, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437869, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " Can", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437869, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " you", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437869, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " please", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437869, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " provide", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437869, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " more", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437869, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " context", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437869, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " or", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437869, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " clarify", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437869, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " what", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437869, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " you", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437869, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " are", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437870, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " looking", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437870, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " for", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437870, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": "?", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437870, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " Is", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437870, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " it", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437870, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " a", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437870, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " specific", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437870, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " type", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437870, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " of", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437870, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437870, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437870, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437870, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " or", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437870, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " a", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437870, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " general", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437870, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": " answer", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437870, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": "?", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437870, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-723", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759437870, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/c7fc52830c4c.json b/tests/integration/recordings/responses/c7fc52830c4c.json new file mode 100644 index 000000000..a6315dc50 --- /dev/null +++ b/tests/integration/recordings/responses/c7fc52830c4c.json @@ -0,0 +1,119 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "What is the boiling point of the liquid polyjuice in celsius?" + } + ], + "max_tokens": 512, + "stream": true, + "temperature": 0.0001, + "tool_choice": "auto", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-52", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": [ + { + "index": 0, + "id": "call_s1g1se8b", + "function": { + "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}", + "name": "get_boiling_point" + }, + "type": "function" + } + ] + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441155, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-52", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "tool_calls", + "index": 0, + "logprobs": null + } + ], + "created": 1759441155, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/c8234a1171f3.json b/tests/integration/recordings/responses/c8234a1171f3.json index 241e998e1..10318c9eb 100644 --- a/tests/integration/recordings/responses/c8234a1171f3.json +++ b/tests/integration/recordings/responses/c8234a1171f3.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-306", + "id": "chatcmpl-753", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759282478, + "created": 1759373699, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/c8e196049fe4.json b/tests/integration/recordings/responses/c8e196049fe4.json index 3a1495f07..62d6674e6 100644 --- a/tests/integration/recordings/responses/c8e196049fe4.json +++ b/tests/integration/recordings/responses/c8e196049fe4.json @@ -20,7 +20,7 @@ "body": { "__type__": "openai.types.completion.Completion", "__data__": { - "id": "cmpl-381", + "id": "cmpl-130", "choices": [ { "finish_reason": "stop", @@ -29,7 +29,7 @@ "text": "Michael Jordan was born in the year of " } ], - "created": 1758978056, + "created": 1759376606, "model": "llama3.2:3b-instruct-fp16", "object": "text_completion", "system_fingerprint": "fp_ollama", diff --git a/tests/integration/recordings/responses/ca5e40a262f5.json b/tests/integration/recordings/responses/ca5e40a262f5.json index d0a48b37d..5584cdbec 100644 --- a/tests/integration/recordings/responses/ca5e40a262f5.json +++ b/tests/integration/recordings/responses/ca5e40a262f5.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-116", + "id": "chatcmpl-582", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759368377, + "created": 1759441161, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/ca92e698d8cd.json b/tests/integration/recordings/responses/ca92e698d8cd.json new file mode 100644 index 000000000..d6a488ffb --- /dev/null +++ b/tests/integration/recordings/responses/ca92e698d8cd.json @@ -0,0 +1,119 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant Always respond with tool calls no matter what. " + }, + { + "role": "user", + "content": "Get the boiling point of polyjuice with a tool call." + } + ], + "max_tokens": 0, + "stream": true, + "temperature": 0.0001, + "tool_choice": "auto", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-803", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": [ + { + "index": 0, + "id": "call_l2ovyvtm", + "function": { + "arguments": "{\"celcius\":\"true\",\"liquid_name\":\"polyjuice\"}", + "name": "get_boiling_point" + }, + "type": "function" + } + ] + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429341, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-803", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "tool_calls", + "index": 0, + "logprobs": null + } + ], + "created": 1759429342, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/cb0e0321c53c.json b/tests/integration/recordings/responses/cb0e0321c53c.json new file mode 100644 index 000000000..0e46fc195 --- /dev/null +++ b/tests/integration/recordings/responses/cb0e0321c53c.json @@ -0,0 +1,414 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "What is the boiling point of the liquid polyjuice in celsius?" + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_j2jdmkk1", + "type": "function", + "function": { + "name": "get_boiling_point", + "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_j2jdmkk1", + "content": "-100" + } + ], + "max_tokens": 512, + "stream": true, + "temperature": 0.0001, + "tool_choice": "required", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-214", + "choices": [ + { + "delta": { + "content": "The", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441666, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-214", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441666, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-214", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441666, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-214", + "choices": [ + { + "delta": { + "content": " of", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441666, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-214", + "choices": [ + { + "delta": { + "content": " Poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441666, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-214", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441667, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-214", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441667, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-214", + "choices": [ + { + "delta": { + "content": " is", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441667, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-214", + "choices": [ + { + "delta": { + "content": " -", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441667, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-214", + "choices": [ + { + "delta": { + "content": "100", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441667, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-214", + "choices": [ + { + "delta": { + "content": "\u00b0C", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441667, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-214", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441667, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-214", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759441667, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/cca0267555a6.json b/tests/integration/recordings/responses/cca0267555a6.json new file mode 100644 index 000000000..7468ecf0a --- /dev/null +++ b/tests/integration/recordings/responses/cca0267555a6.json @@ -0,0 +1,97 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "user", + "content": "Calculate 5 + 3" + } + ], + "tools": [ + { + "type": "function", + "function": { + "name": "calculate", + "description": "", + "parameters": { + "properties": { + "x": { + "title": "X", + "type": "number" + }, + "y": { + "title": "Y", + "type": "number" + }, + "operation": { + "title": "Operation", + "type": "string" + } + }, + "required": [ + "x", + "y", + "operation" + ], + "title": "calculateArguments", + "type": "object" + } + } + } + ] + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": { + "__type__": "openai.types.chat.chat_completion.ChatCompletion", + "__data__": { + "id": "chatcmpl-376", + "choices": [ + { + "finish_reason": "tool_calls", + "index": 0, + "logprobs": null, + "message": { + "content": "", + "refusal": null, + "role": "assistant", + "annotations": null, + "audio": null, + "function_call": null, + "tool_calls": [ + { + "id": "call_px63ad04", + "function": { + "arguments": "{\"operation\":\"+\",\"x\":\"5\",\"y\":\"3\"}", + "name": "calculate" + }, + "type": "function", + "index": 0 + } + ] + } + } + ], + "created": 1759437806, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": { + "completion_tokens": 27, + "prompt_tokens": 172, + "total_tokens": 199, + "completion_tokens_details": null, + "prompt_tokens_details": null + } + } + }, + "is_streaming": false + } +} diff --git a/tests/integration/recordings/responses/cd0ece88d392.json b/tests/integration/recordings/responses/cd0ece88d392.json new file mode 100644 index 000000000..3e0f5cd14 --- /dev/null +++ b/tests/integration/recordings/responses/cd0ece88d392.json @@ -0,0 +1,258 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nWhat is the boiling point of the liquid polyjuice in celsius?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n[get_boiling_point(liquid_name=\"polyjuice\", celcius=True)]<|eot_id|><|start_header_id|>ipython<|end_header_id|>\n\n-100<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0001, + "top_p": 0.9 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:55.86924Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "The", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:55.911521Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " boiling", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:55.95324Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " point", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:55.996666Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " of", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:56.038076Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " poly", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:56.079306Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ju", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:56.121626Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ice", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:56.162658Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " is", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:56.203804Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " -", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:56.245419Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "100", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:56.286364Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\u00b0C", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:56.327683Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ".", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:56.369528Z", + "done": true, + "done_reason": "stop", + "total_duration": 708500166, + "load_duration": 138748458, + "prompt_eval_count": 392, + "prompt_eval_duration": 68099125, + "eval_count": 13, + "eval_duration": 500834417, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/cd294c2e0038.json b/tests/integration/recordings/responses/cd294c2e0038.json index 985cfa1bb..944ccbf52 100644 --- a/tests/integration/recordings/responses/cd294c2e0038.json +++ b/tests/integration/recordings/responses/cd294c2e0038.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-251", + "id": "chatcmpl-249", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759282591, + "created": 1759373711, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/ce21235ebde2.json b/tests/integration/recordings/responses/ce21235ebde2.json new file mode 100644 index 000000000..25518bca7 --- /dev/null +++ b/tests/integration/recordings/responses/ce21235ebde2.json @@ -0,0 +1,124 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "What is the boiling point of the liquid polyjuice in celsius?" + } + ], + "max_tokens": 0, + "stream": true, + "temperature": 0.0001, + "tool_choice": { + "type": "function", + "function": { + "name": "get_boiling_point" + } + }, + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "str", + "description": "The name of the liquid" + }, + "celcius": { + "type": "bool", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-993", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": [ + { + "index": 0, + "id": "call_mw57o9vn", + "function": { + "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}", + "name": "get_boiling_point" + }, + "type": "function" + } + ] + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759425519, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-993", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "tool_calls", + "index": 0, + "logprobs": null + } + ], + "created": 1759425519, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/cf776b1aa432.json b/tests/integration/recordings/responses/cf776b1aa432.json index 3b08967d5..844905a35 100644 --- a/tests/integration/recordings/responses/cf776b1aa432.json +++ b/tests/integration/recordings/responses/cf776b1aa432.json @@ -21,7 +21,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-615", + "id": "chatcmpl-883", "choices": [ { "delta": { @@ -36,7 +36,7 @@ "logprobs": null } ], - "created": 1759282661, + "created": 1759437865, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -47,7 +47,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-615", + "id": "chatcmpl-883", "choices": [ { "delta": { @@ -62,7 +62,7 @@ "logprobs": null } ], - "created": 1759282661, + "created": 1759437865, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -73,7 +73,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-615", + "id": "chatcmpl-883", "choices": [ { "delta": { @@ -88,7 +88,7 @@ "logprobs": null } ], - "created": 1759282661, + "created": 1759437865, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -99,7 +99,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-615", + "id": "chatcmpl-883", "choices": [ { "delta": { @@ -114,7 +114,7 @@ "logprobs": null } ], - "created": 1759282661, + "created": 1759437865, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -125,7 +125,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-615", + "id": "chatcmpl-883", "choices": [ { "delta": { @@ -140,7 +140,7 @@ "logprobs": null } ], - "created": 1759282661, + "created": 1759437865, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -151,7 +151,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-615", + "id": "chatcmpl-883", "choices": [ { "delta": { @@ -166,7 +166,7 @@ "logprobs": null } ], - "created": 1759282662, + "created": 1759437865, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -177,7 +177,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-615", + "id": "chatcmpl-883", "choices": [ { "delta": { @@ -192,7 +192,7 @@ "logprobs": null } ], - "created": 1759282662, + "created": 1759437865, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, @@ -203,7 +203,7 @@ { "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", "__data__": { - "id": "chatcmpl-615", + "id": "chatcmpl-883", "choices": [ { "delta": { @@ -218,7 +218,7 @@ "logprobs": null } ], - "created": 1759282662, + "created": 1759437865, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion.chunk", "service_tier": null, diff --git a/tests/integration/recordings/responses/d7caf68e394e.json b/tests/integration/recordings/responses/d7caf68e394e.json index 2347344c1..8bf2ef23e 100644 --- a/tests/integration/recordings/responses/d7caf68e394e.json +++ b/tests/integration/recordings/responses/d7caf68e394e.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-480", + "id": "chatcmpl-953", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759282535, + "created": 1759373707, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/d9e8f66e1d85.json b/tests/integration/recordings/responses/d9e8f66e1d85.json new file mode 100644 index 000000000..0dd6d2a17 --- /dev/null +++ b/tests/integration/recordings/responses/d9e8f66e1d85.json @@ -0,0 +1,117 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "user", + "content": "Book a flight from SFO to JFK for John Doe" + } + ], + "tools": [ + { + "type": "function", + "function": { + "name": "book_flight", + "description": "Book a flight", + "parameters": { + "type": "object", + "properties": { + "flight": { + "$ref": "#/$defs/FlightInfo" + }, + "passenger": { + "$ref": "#/$defs/Passenger" + } + }, + "required": [ + "flight", + "passenger" + ], + "$defs": { + "FlightInfo": { + "type": "object", + "properties": { + "from": { + "type": "string" + }, + "to": { + "type": "string" + }, + "date": { + "type": "string", + "format": "date" + } + } + }, + "Passenger": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "age": { + "type": "integer" + } + } + } + } + } + } + } + ] + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": { + "__type__": "openai.types.chat.chat_completion.ChatCompletion", + "__data__": { + "id": "chatcmpl-128", + "choices": [ + { + "finish_reason": "tool_calls", + "index": 0, + "logprobs": null, + "message": { + "content": "", + "refusal": null, + "role": "assistant", + "annotations": null, + "audio": null, + "function_call": null, + "tool_calls": [ + { + "id": "call_bdq6iic2", + "function": { + "arguments": "{\"flight\":\"{\\\"date\\\":\\\"2023-08-20\\\",\\\"from\\\":\\\"SFO\\\",\\\"to\\\":\\\"JFK\\\"}\",\"passenger\":\"{\\\"age\\\":30,\\\"name\\\":\\\"John Doe\\\"}\"}", + "name": "book_flight" + }, + "type": "function", + "index": 0 + } + ] + } + } + ], + "created": 1759437805, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": { + "completion_tokens": 52, + "prompt_tokens": 227, + "total_tokens": 279, + "completion_tokens_details": null, + "prompt_tokens_details": null + } + } + }, + "is_streaming": false + } +} diff --git a/tests/integration/recordings/responses/df20f4b62da7.json b/tests/integration/recordings/responses/df20f4b62da7.json new file mode 100644 index 000000000..9c22642d5 --- /dev/null +++ b/tests/integration/recordings/responses/df20f4b62da7.json @@ -0,0 +1,258 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant Always respond with tool calls no matter what. <|eot_id|><|start_header_id|>user<|end_header_id|>\n\nGet the boiling point of polyjuice with a tool call.<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n[get_boiling_point(liquid_name=\"polyjuice\", celcius=True)]<|eot_id|><|start_header_id|>ipython<|end_header_id|>\n\n-100<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0001, + "top_p": 0.9 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:58.856153Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "The", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:58.898198Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " boiling", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:58.939822Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " point", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:58.981421Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " of", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:59.023342Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " poly", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:59.065147Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ju", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:59.106081Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ice", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:59.147339Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " is", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:59.189027Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " -", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:59.230097Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "100", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:59.271249Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\u00b0C", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:59.312423Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ".", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:54:59.353748Z", + "done": true, + "done_reason": "stop", + "total_duration": 699082625, + "load_duration": 131157125, + "prompt_eval_count": 400, + "prompt_eval_duration": 68858833, + "eval_count": 13, + "eval_duration": 498145250, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/e0c71820f395.json b/tests/integration/recordings/responses/e0c71820f395.json new file mode 100644 index 000000000..191b107b2 --- /dev/null +++ b/tests/integration/recordings/responses/e0c71820f395.json @@ -0,0 +1,122 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "user", + "content": "Use one of the available tools" + } + ], + "tools": [ + { + "type": "function", + "function": { + "name": "simple", + "parameters": { + "type": "object", + "properties": { + "x": { + "type": "string" + } + } + } + } + }, + { + "type": "function", + "function": { + "name": "complex", + "parameters": { + "type": "object", + "properties": { + "data": { + "$ref": "#/$defs/Complex" + } + }, + "$defs": { + "Complex": { + "type": "object", + "properties": { + "nested": { + "type": "array", + "items": { + "type": "number" + } + } + } + } + } + } + } + }, + { + "type": "function", + "function": { + "name": "with_output", + "parameters": { + "type": "object", + "properties": { + "input": { + "type": "string" + } + } + } + } + } + ] + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": { + "__type__": "openai.types.chat.chat_completion.ChatCompletion", + "__data__": { + "id": "chatcmpl-271", + "choices": [ + { + "finish_reason": "tool_calls", + "index": 0, + "logprobs": null, + "message": { + "content": "", + "refusal": null, + "role": "assistant", + "annotations": null, + "audio": null, + "function_call": null, + "tool_calls": [ + { + "id": "call_vxiwiifd", + "function": { + "arguments": "{\"x\":\"\"}", + "name": "simple" + }, + "type": "function", + "index": 0 + } + ] + } + } + ], + "created": 1759437809, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": { + "completion_tokens": 15, + "prompt_tokens": 246, + "total_tokens": 261, + "completion_tokens_details": null, + "prompt_tokens_details": null + } + } + }, + "is_streaming": false + } +} diff --git a/tests/integration/recordings/responses/e1ccaa261725.json b/tests/integration/recordings/responses/e1ccaa261725.json new file mode 100644 index 000000000..0128f924d --- /dev/null +++ b/tests/integration/recordings/responses/e1ccaa261725.json @@ -0,0 +1,414 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "What is the boiling point of the liquid polyjuice in celsius?" + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_q48y3xup", + "type": "function", + "function": { + "name": "get_boiling_point", + "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_q48y3xup", + "content": "-100" + } + ], + "max_tokens": 0, + "stream": true, + "temperature": 0.0001, + "tool_choice": "required", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-131", + "choices": [ + { + "delta": { + "content": "The", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427475, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-131", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427475, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-131", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427475, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-131", + "choices": [ + { + "delta": { + "content": " of", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427475, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-131", + "choices": [ + { + "delta": { + "content": " Poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427475, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-131", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427476, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-131", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427476, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-131", + "choices": [ + { + "delta": { + "content": " is", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427476, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-131", + "choices": [ + { + "delta": { + "content": " -", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427476, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-131", + "choices": [ + { + "delta": { + "content": "100", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427476, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-131", + "choices": [ + { + "delta": { + "content": "\u00b0C", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427476, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-131", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759427476, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-131", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759427476, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/e25ab43491af.json b/tests/integration/recordings/responses/e25ab43491af.json index 9fb331942..686508102 100644 --- a/tests/integration/recordings/responses/e25ab43491af.json +++ b/tests/integration/recordings/responses/e25ab43491af.json @@ -20,7 +20,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-808", + "id": "chatcmpl-602", "choices": [ { "finish_reason": "stop", @@ -37,7 +37,7 @@ } } ], - "created": 1759012142, + "created": 1759437854, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/e3b94833d349.json b/tests/integration/recordings/responses/e3b94833d349.json new file mode 100644 index 000000000..71f3a99b4 --- /dev/null +++ b/tests/integration/recordings/responses/e3b94833d349.json @@ -0,0 +1,388 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "Call get_boiling_point_with_metadata tool and answer What is the boiling point of polyjuice?" + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_gefseirj", + "type": "function", + "function": { + "name": "get_boiling_point_with_metadata", + "arguments": "{\"celcius\":false,\"liquid_name\":\"polyjuice\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_gefseirj", + "content": "-212" + } + ], + "max_tokens": 512, + "stream": true, + "temperature": 0.0001, + "tool_choice": "auto", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point_with_metadata", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-509", + "choices": [ + { + "delta": { + "content": "The", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441678, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-509", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441678, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-509", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441678, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-509", + "choices": [ + { + "delta": { + "content": " of", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441678, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-509", + "choices": [ + { + "delta": { + "content": " poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441678, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-509", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441678, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-509", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441678, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-509", + "choices": [ + { + "delta": { + "content": " is", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441678, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-509", + "choices": [ + { + "delta": { + "content": " -", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441678, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-509", + "choices": [ + { + "delta": { + "content": "212", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441678, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-509", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759441678, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-509", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759441678, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/e59abd091d90.json b/tests/integration/recordings/responses/e59abd091d90.json new file mode 100644 index 000000000..fd88e832e --- /dev/null +++ b/tests/integration/recordings/responses/e59abd091d90.json @@ -0,0 +1,804 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant" + }, + { + "role": "user", + "content": "What is the boiling point of the liquid polyjuice in celsius?" + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_ew600lfr", + "type": "function", + "function": { + "name": "get_boiling_point", + "arguments": "{\"celcius\":true,\"liquid_name\":\"polyjuice\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_ew600lfr", + "content": "Error when running tool: 'ToolCall' object has no attribute 'arguments_json'" + } + ], + "max_tokens": 0, + "stream": true, + "temperature": 0.0001, + "tool_choice": "required", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": "I", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429348, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": " was", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429348, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": " unable", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429348, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": " to", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429348, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": " find", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429348, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": " the", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429348, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429348, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429348, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": " of", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429348, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": " liquid", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429348, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": " poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429348, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429348, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429348, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": " in", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429348, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": " Celsius", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429348, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429348, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": " The", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429348, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": " boiling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429348, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": " point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429348, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": " could", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429348, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": " not", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429348, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": " be", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429348, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": " located", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429348, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": " in", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429349, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": " my", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429349, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": " database", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429349, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759429349, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-447", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759429349, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/e9c8a0e4f0e0.json b/tests/integration/recordings/responses/e9c8a0e4f0e0.json index 87a208405..1bdf9e1f1 100644 --- a/tests/integration/recordings/responses/e9c8a0e4f0e0.json +++ b/tests/integration/recordings/responses/e9c8a0e4f0e0.json @@ -20,14 +20,14 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-957", + "id": "chatcmpl-380", "choices": [ { "finish_reason": "stop", "index": 0, "logprobs": null, "message": { - "content": "Humans live on Earth. It's a terrestrial planet in the Solar System, located in the outer reaches of the Sun's gravitational pull.", + "content": "Humans have not yet established a permanent, self-sustaining presence on another planet. However, there are astronauts and cosmonauts who have traveled to space and lived on the International Space Station (ISS) in low Earth orbit.\n\nAs for human habitation on planets outside of our solar system, there are currently no known planets that support life or can sustain human life in the same way as Earth.\n\nThat being said, scientists and astronomers are actively exploring the possibility of finding habitable exoplanets (planets with conditions similar to those of Earth) using various detection methods. Some notable examples include:\n\n1. Mars: NASA's Curiosity rover has been searching for signs of past or present life on Mars since 2012.\n2. Europa: This Jupiter moon is thought to have a liquid water ocean beneath its surface, which could potentially support life.\n\nHowever, it's essential to note that humans have not yet established any permanent settlements or habitats on other planets or moons in our solar system.\n\nSo, for now, Earth remains the only planet known to support human life.", "refusal": null, "role": "assistant", "annotations": null, @@ -37,15 +37,15 @@ } } ], - "created": 1756921355, + "created": 1759437879, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion", "service_tier": null, "system_fingerprint": "fp_ollama", "usage": { - "completion_tokens": 28, + "completion_tokens": 217, "prompt_tokens": 32, - "total_tokens": 60, + "total_tokens": 249, "completion_tokens_details": null, "prompt_tokens_details": null } diff --git a/tests/integration/recordings/responses/eeb26200786f.json b/tests/integration/recordings/responses/eeb26200786f.json new file mode 100644 index 000000000..0bfe1b613 --- /dev/null +++ b/tests/integration/recordings/responses/eeb26200786f.json @@ -0,0 +1,1355 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"book_flight\",\n \"description\": \"\n Book a flight with passenger and payment information.\n\n This tool uses JSON Schema $ref and $defs for type reuse.\n \",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"flight\", \"passengers\", \"payment\"],\n \"properties\": {\n \"flight\": {\n \"type\": \"object\",\n \"description\": \"\"\n },\n \"passengers\": {\n \"type\": \"array\",\n \"description\": \"\"\n },\n \"payment\": {\n \"type\": \"object\",\n \"description\": \"\"\n }\n }\n }\n },\n {\n \"name\": \"process_order\",\n \"description\": \"\n Process an order with nested address information.\n\n Uses nested objects and $ref.\n \",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"order_data\"],\n \"properties\": {\n \"order_data\": {\n \"type\": \"object\",\n \"description\": \"\"\n }\n }\n }\n },\n {\n \"name\": \"flexible_contact\",\n \"description\": \"\n Accept flexible contact (email or phone).\n\n Uses anyOf schema.\n \",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"contact_info\"],\n \"properties\": {\n \"contact_info\": {\n \"type\": \"string\",\n \"description\": \"\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant that can process orders and book flights.<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nProcess an order with 2 widgets going to 123 Main St, San Francisco<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n[process_order(order_data={order_id=1, customer_name=\"John Doe\", address={street=\"123 Main St\", city=\"San Francisco\"}})]<|eot_id|><|start_header_id|>ipython<|end_header_id|>\n\n{\n \"order_id\": \"ORD789\",\n \"status\": \"processing\",\n \"data\": {\n \"order_id\": 1,\n \"customer_name\": \"John Doe\",\n \"address\": {\n \"street\": \"123 Main St\",\n \"city\": \"San Francisco\"\n }\n }\n}<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:21.509066Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "[", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:21.551814Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "book", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:21.596704Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_flight", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:21.641302Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "(f", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:21.683974Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "light", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:21.726757Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "={\"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:21.769592Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "flight", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:21.811613Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_number", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:21.853673Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\":", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:21.896273Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:21.938557Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "AA", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:21.980765Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "101", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:22.022949Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\",", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:22.065012Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:22.10732Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "departure", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:22.149511Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\":", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:22.19172Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:22.234788Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "New", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:22.277472Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " York", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:22.321037Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\",", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:22.364313Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:22.407033Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "arrival", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:22.449572Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\":", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:22.492159Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:22.534652Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "Los", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:22.578509Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " Angeles", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:22.625903Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\",", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:22.671828Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:22.71768Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "pass", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:22.765213Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "engers", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:22.811377Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\":", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:22.8582Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " [{\"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:22.904666Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "name", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:22.950992Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\":", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:22.997067Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:23.042723Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "John", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:23.088476Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " Doe", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:23.135032Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\",", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:23.181489Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:23.227284Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "email", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:23.273828Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\":", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:23.320518Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:23.365466Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "j", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:23.410208Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "oh", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:23.455306Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "nd", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:23.500535Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "oe", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:23.54581Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "@example", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:23.591529Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ".com", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:23.638938Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\"}", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:23.683537Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "],", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:23.727957Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:23.771084Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "payment", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:23.81393Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\":", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:23.856746Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " {\"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:23.899213Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "method", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:23.941386Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\":", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:23.984154Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:24.028068Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "credit", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:24.070217Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_card", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:24.111913Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\",", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:24.153705Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:24.196172Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "card", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:24.240061Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_number", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:24.283763Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\":", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:24.325975Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " \"", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:24.368432Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "123", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:24.411036Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "456", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:24.45408Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "789", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:24.496458Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "012", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:24.538894Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "345", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:24.581294Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "6", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:24.624685Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\"}}", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:24.667599Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ")]", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T23:00:24.709585Z", + "done": true, + "done_reason": "stop", + "total_duration": 3497578917, + "load_duration": 104591083, + "prompt_eval_count": 664, + "prompt_eval_duration": 191187834, + "eval_count": 74, + "eval_duration": 3201095416, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/f22b7da7ad75.json b/tests/integration/recordings/responses/f22b7da7ad75.json new file mode 100644 index 000000000..ef1ee8414 --- /dev/null +++ b/tests/integration/recordings/responses/f22b7da7ad75.json @@ -0,0 +1,1204 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/embeddings", + "headers": {}, + "body": { + "model": "all-minilm:l6-v2", + "input": [ + "First text for base64", + "Second text for base64", + "Third text for base64" + ], + "encoding_format": "base64" + }, + "endpoint": "/v1/embeddings", + "model": "all-minilm:l6-v2" + }, + "response": { + "body": { + "__type__": "openai.types.create_embedding_response.CreateEmbeddingResponse", + "__data__": { + "data": [ + { + "embedding": [ + 0.047443096, + 0.1030663, + -0.02994777, + -0.0019610971, + -0.0004458719, + 0.013241453, + -0.022299973, + 0.035796557, + 0.042697832, + -0.013352145, + 0.014903893, + 0.059454504, + -0.030757127, + -0.016443565, + 0.04413251, + -0.01962478, + -0.052001625, + 0.023652397, + 0.038157385, + -0.019067932, + 0.07790512, + 0.065477535, + 0.0063924147, + 0.01184581, + 0.008469548, + 0.055321243, + -0.08488264, + 0.1419959, + 0.07208884, + -0.052270085, + 0.04475413, + -0.043897122, + 0.19948907, + 0.0521248, + 0.039570127, + 0.047736328, + -0.0031801846, + -0.027514923, + 0.016915824, + 0.08785543, + 0.018578053, + -0.062505305, + 0.025584552, + 0.039979465, + 0.013755796, + -0.029615713, + 0.050677385, + 0.09286756, + 0.046862997, + -0.046437945, + 0.09620637, + -0.037828952, + -0.021523252, + 0.053332504, + 0.008366923, + 0.016525395, + -0.04379942, + 0.057431653, + -0.042829104, + 0.053737152, + -0.05284048, + -0.025113432, + 0.040097877, + -0.05878011, + 0.04986854, + -0.016612675, + 0.06288202, + -0.057936136, + 0.0014946498, + 0.011902256, + -0.02110201, + -0.045040447, + -0.028943324, + 0.112218715, + -0.04346062, + 0.02658561, + -0.08660781, + 0.0075222226, + 0.040901423, + -0.013788897, + -0.0034571695, + -0.105320804, + 0.13145688, + 0.1387978, + -0.024207905, + 0.00011780889, + 0.0027130456, + 0.08893496, + -0.0404282, + -0.013090902, + -0.042802725, + -0.019277347, + -0.0072423737, + -0.012584974, + -0.0758852, + 0.042088367, + -0.028754171, + -0.046412025, + -0.08769414, + 0.011706997, + 0.033290867, + -0.047082063, + 0.036054734, + 0.02562872, + -0.064266376, + -0.041589364, + 0.022733012, + 0.03523196, + -0.030952249, + -0.030285591, + -0.030893793, + -0.014268825, + -0.064496316, + -0.029686624, + -0.037651353, + -0.07263676, + -0.05136519, + 0.01860713, + 0.015172685, + 0.0192144, + -0.0116023095, + -0.012719093, + -0.029429333, + 0.032753803, + -0.10127056, + -0.08305989, + 0.07203204, + -1.6656048e-33, + -0.003488058, + 0.0655988, + -0.007163306, + 0.038025133, + -0.042687092, + -0.008737161, + -0.037520815, + 0.038469143, + -0.120509155, + 0.03023451, + -0.026864765, + -0.06805885, + 0.05592863, + -0.07489512, + -0.017807316, + -0.049285922, + -0.08905791, + 0.011731217, + 0.017883036, + 0.00015935759, + -0.030456739, + 0.024376402, + -0.027947344, + -0.049716905, + 0.014850297, + -0.0068702376, + -0.037318625, + -0.050714917, + 0.03216811, + -0.03513996, + -0.040848706, + 0.0031008294, + -0.06374552, + -0.07015488, + 0.040950127, + -0.031313762, + 0.06336745, + 0.015497221, + -0.08470297, + 0.034139305, + 0.047749784, + 0.063429475, + 0.08305951, + -0.031543955, + -0.02092045, + 0.024276698, + -0.050816093, + -0.00951583, + 0.11460215, + -0.011085907, + 0.0006970512, + 0.08304137, + -0.018151749, + 0.012668774, + 0.023483729, + -0.068380035, + 0.008017319, + 0.005103147, + -0.033619083, + -0.045522273, + -0.007610588, + -0.0031189255, + 0.02023118, + 0.048001137, + 0.018279912, + -0.06083473, + 0.0025614651, + -0.051604036, + -0.0712584, + 0.0049647917, + -0.056144852, + -0.03460778, + 0.084107466, + -0.051244184, + -0.07208066, + 0.082872786, + -0.042616084, + 0.032226164, + 0.038903847, + -0.043644667, + 0.03114516, + -0.037657745, + -0.0051392126, + -0.0399705, + -0.01362006, + 0.062149994, + 0.009436811, + -0.10927611, + 0.0054878076, + 0.035581235, + 0.06060475, + -0.051899396, + 0.013453982, + -0.02607209, + 0.03149, + 5.778151e-34, + 0.04866742, + -0.026154209, + 0.028786905, + -0.009705908, + 0.036763143, + 0.07683042, + 0.124761656, + 0.02430845, + -0.0055978484, + -0.011855667, + 0.08782188, + 0.03667143, + -0.01590326, + -0.005430289, + 0.026028333, + -0.047321074, + -0.0042727133, + 0.026540313, + 0.0465339, + -0.042490445, + -0.015054837, + -0.032038923, + -0.10492689, + 0.10122033, + 0.07957377, + 0.042453364, + 0.011124516, + 0.010934764, + 0.045186315, + -0.02283475, + -0.06222954, + 0.04523413, + 0.048799627, + 0.060591288, + -0.048021708, + -0.03465323, + -0.045096762, + 0.017476292, + 0.036111128, + 0.05623506, + 0.062889755, + -0.07529307, + -0.065171525, + 0.0069152173, + 0.05907177, + -0.0603988, + 0.045391977, + 0.03989815, + 0.017313296, + -0.010879031, + 0.014901746, + 0.05576297, + -0.064136796, + -0.05788592, + 0.049781807, + -0.04160058, + -0.116747804, + 0.037745718, + 0.0020103676, + -0.01814592, + 0.013506867, + 0.00341396, + 0.014206663, + -0.009217883, + -0.011821457, + -0.033057805, + -0.051591158, + 0.031610493, + -0.07041633, + 0.007702183, + -0.009296349, + -0.058487307, + -0.01271879, + 0.043650433, + 0.017939351, + -0.034527123, + 0.037774917, + 0.0450543, + -0.03789838, + 0.0016587796, + -0.017690128, + 0.046084408, + -0.10634635, + 0.058015924, + 0.09367202, + -0.03887253, + -0.030778354, + -0.04526167, + -0.042162772, + -0.019281171, + -0.094072275, + 0.08443694, + 0.04598175, + 0.11420337, + -0.016542073, + -1.3092824e-08, + 0.01029157, + -0.05607101, + -0.053273894, + 0.04327644, + -0.012097581, + 0.075499125, + 8.911722e-05, + -0.059431333, + -0.039473776, + -0.12459489, + -0.01031571, + 0.01610335, + 0.016960384, + -0.07947821, + 0.01820896, + 0.040425852, + 0.0060324515, + -0.13502608, + 0.016641272, + -0.020874891, + 0.021407917, + 0.030175129, + -0.045509353, + -0.10665387, + -0.071301624, + 0.027237656, + -0.0072193583, + 0.120991066, + -0.008656499, + 0.0011201953, + 0.0039784242, + 0.0341344, + -0.06401818, + -0.036852792, + 0.035282534, + -0.011923041, + 0.067173794, + 0.014300814, + 0.06770646, + -0.066512346, + 0.085266545, + -0.037755802, + -0.094363555, + -0.0124826655, + -0.014590712, + 0.026925279, + 0.04410473, + 0.015496688, + 0.004318949, + -0.031916477, + 0.017218966, + 0.016201599, + -0.033119682, + 0.06837974, + -0.02781091, + -0.01779888, + 0.057812553, + -0.016622763, + -0.0718051, + 0.07917062, + 0.027705258, + -0.0024773679, + 0.11784412, + -0.02393799 + ], + "index": 0, + "object": "embedding" + }, + { + "embedding": [ + 0.04654041, + 0.100457005, + -0.03960695, + 0.0054190895, + -0.00061261636, + 0.022978926, + -0.015349646, + 0.05174952, + 0.04080002, + -0.040600445, + 0.02253602, + 0.024573963, + -0.0061854525, + -0.024768595, + 0.097017914, + 0.0037721908, + -0.1071271, + 0.05670194, + 0.021320485, + -0.023483735, + 0.10240627, + 0.046724126, + 0.014405091, + 0.017862096, + 0.0076312926, + 0.084439315, + -0.08968022, + 0.16757359, + 0.046978492, + -0.029951245, + 0.07417616, + 0.00019549856, + 0.118695736, + 0.026067322, + 0.035530325, + 0.0063190986, + -0.016918957, + -0.011904382, + 0.02159433, + 0.04011584, + 0.020048723, + -0.053142868, + 0.022441626, + 0.016903853, + -0.023708675, + -0.02648895, + 0.019766012, + 0.062821016, + 0.04764414, + -0.052348837, + 0.07352589, + -0.06325153, + -0.0331663, + 0.04175679, + 0.0015468705, + 0.05215102, + -0.04930485, + 0.05475271, + -0.037362292, + 0.048984047, + 0.00668616, + 0.0077575357, + 0.033763032, + -0.045534473, + 0.04478127, + -0.041897986, + 0.058399495, + -0.053956937, + -0.066097215, + 0.006726588, + 0.0038363277, + -0.03608817, + 0.008571994, + 0.07390713, + 0.006064092, + 0.0057486463, + -0.08874643, + -0.0021642765, + 0.045340028, + -0.051646378, + 0.0056842417, + -0.10331014, + 0.120456606, + 0.12761793, + -0.024176907, + -0.05479328, + 0.0034843183, + 0.07641806, + -0.059855074, + -0.0195081, + -0.0150292525, + -0.00992928, + 0.045797862, + -0.015174619, + -0.07924758, + 0.023096986, + -0.040744357, + -0.0101818275, + -0.08914291, + 0.013643887, + 0.011581099, + -0.049888827, + -0.00021994562, + -0.02913472, + -0.029171223, + -0.04352264, + 0.0076333424, + 0.012210982, + 0.016095871, + -0.06401206, + 0.0016354738, + 0.028166138, + -0.07800048, + -0.013365193, + -0.0013295119, + -0.019354483, + -0.0043497235, + 0.025218496, + 0.033494957, + 0.007653746, + -0.033507217, + -0.03213291, + -0.022418406, + 0.0067284796, + -0.08024248, + -0.12522098, + 0.069272675, + -1.9683093e-33, + -0.012249598, + 0.070073105, + -0.016373688, + 0.03268669, + -0.0011716175, + 0.008970948, + -0.05875696, + 0.031790286, + -0.09962546, + -0.011529516, + -0.042214815, + -0.08385974, + 0.050325025, + -0.058266874, + -0.01614801, + -0.07460485, + -0.056625802, + 0.049216725, + 0.09685523, + 0.02972927, + -0.010797609, + 0.096737646, + -0.008734601, + -0.024298675, + 0.054711536, + 0.020422578, + -0.0040869303, + -0.041413024, + 0.039046016, + -0.027355552, + 0.022152912, + 0.015635848, + -0.040486902, + -0.046137046, + 0.067116976, + -0.050166503, + 0.05231306, + 0.03977189, + -0.08200705, + 0.04208007, + 0.06871361, + 0.0415384, + 0.08255112, + -0.019878006, + 0.009672142, + -0.0013818855, + -0.02187854, + -0.03571946, + 0.1019913, + -0.040465977, + 0.0029030787, + 0.071231104, + -0.018016066, + 0.022290476, + 0.053263694, + -0.05915711, + -0.024596125, + 0.042284742, + 0.0125378035, + -0.026088756, + -0.007868452, + 0.018145658, + 0.025348024, + 0.048246585, + 0.032595333, + -0.04322502, + -0.024803862, + -0.070749104, + -0.07416428, + 0.0484724, + -0.05546208, + -0.041756414, + 0.12654942, + -0.04357299, + -0.08900543, + 0.016302116, + -0.040754095, + 0.024944471, + 0.041844428, + -0.06273068, + 0.0006748941, + -0.05448637, + -0.013658018, + -0.03356399, + -0.0060005034, + 0.05786807, + -0.030056076, + -0.12787268, + -0.027650442, + 0.083788656, + 0.021819875, + -0.040701445, + -0.041838806, + -0.047018126, + 0.08002261, + 4.734239e-34, + 0.02015769, + -0.00014442818, + 0.0072734207, + -0.01035945, + 0.0436576, + 0.060642734, + 0.1473969, + -0.023643956, + -0.018900618, + -0.026930645, + 0.054844704, + 0.029314412, + 0.016708935, + -0.009290097, + -0.002891506, + -0.057237446, + -0.0032285063, + 0.05497127, + 0.048353076, + -0.067556486, + -0.02002941, + -0.013762125, + -0.060434237, + 0.075815536, + 0.092324585, + 0.021875912, + -0.028627641, + 0.02281807, + 0.04816562, + -0.029499082, + -0.07594795, + 0.028744346, + 0.045300674, + 0.061325517, + -0.017799513, + -0.06497018, + -0.043381255, + -0.012436013, + -0.017595029, + 0.038607694, + 0.03692832, + -0.06317727, + -0.03189631, + 0.0163061, + 0.066662505, + -0.01747777, + 0.0455436, + 0.032373946, + 0.019391501, + -0.029496003, + 0.026255092, + -0.003917891, + -0.12487856, + -0.012247588, + 0.015688721, + -0.044113353, + -0.11468337, + 0.040689792, + 0.031688645, + -0.027883623, + 0.03565975, + -0.029930554, + 0.0272684, + -0.0078877555, + 0.026264768, + -0.06124056, + -0.06071735, + 0.009353228, + -0.09204558, + 0.05202069, + -0.042713076, + -0.07342886, + 0.004044382, + 0.06092453, + -0.003994553, + -0.025158737, + 0.02733044, + 0.032295305, + -0.03984234, + 0.017935337, + -0.028768739, + 0.01554963, + -0.073981866, + 0.0739418, + 0.04965046, + -0.04301918, + -0.035159755, + 0.027055329, + -0.03693953, + -0.036715843, + -0.06353325, + 0.12646905, + -0.003499326, + 0.093309924, + 0.00889324, + -1.38464875e-08, + 0.0231563, + -0.075574, + -0.040843725, + 0.0071973656, + -0.032683276, + 0.025759073, + -0.039060622, + -0.070802435, + -0.026421575, + -0.12223953, + -0.01567019, + 0.008273527, + 0.021523712, + -0.077978514, + 0.008511451, + 0.038049843, + 0.013643623, + -0.12606904, + 0.024690265, + -0.049368616, + 0.022910642, + 0.012570536, + -0.038921557, + -0.0539728, + -0.11401533, + 0.0717154, + -0.02019053, + 0.09689256, + -0.03522339, + -0.01902355, + 0.052379142, + 0.015264651, + -0.059212603, + -0.029434869, + 0.040918592, + -0.050510794, + 0.07031127, + 0.010864601, + 0.08412114, + -0.034533564, + 0.10262946, + -0.060668074, + -0.121650845, + 0.033533875, + 0.064201616, + 0.021554638, + 0.059297472, + -0.009686148, + -0.0021906071, + -0.013715586, + 0.050112963, + -0.014887802, + -0.010682921, + 0.07304227, + -0.034087624, + -0.024696104, + 0.0442271, + -0.00089669036, + -0.08143203, + 0.06717475, + 0.03451422, + -0.0024682316, + 0.09635781, + -0.04145595 + ], + "index": 1, + "object": "embedding" + }, + { + "embedding": [ + 0.045375798, + 0.07258055, + -0.08003706, + -0.032656744, + 0.0139935585, + 0.017206425, + -0.0085616745, + 0.019218331, + 0.0527245, + -0.017329019, + 0.020587556, + 0.011539302, + -0.02006116, + -0.0116708, + 0.116046146, + -0.010887594, + -0.112962514, + 0.07470017, + -0.008835863, + -0.038513727, + 0.1079511, + 0.05575882, + 0.05465468, + 0.028420603, + 0.012869476, + 0.078700624, + -0.07481292, + 0.10657601, + 0.048312515, + -0.019187614, + 0.043496132, + -0.014120566, + 0.16143475, + -0.006972843, + 0.059548676, + -0.002742684, + -0.06421385, + -0.03753407, + -0.00034186858, + 0.103141606, + 0.021242032, + -0.035123263, + 0.039595246, + 0.03465166, + -0.007700848, + -0.016779039, + -0.017973451, + 0.03797483, + 0.06914695, + -0.06505097, + 0.0768558, + -0.063415445, + -0.047812812, + 0.081876844, + -0.03468853, + -0.010242799, + -0.04682619, + 0.05593955, + -0.037297264, + 0.048033547, + 0.0084374575, + 0.013531666, + 0.03961178, + -0.06994999, + 0.07862166, + -0.014270066, + 0.022243122, + -0.08205504, + -0.06690809, + 0.016866608, + -0.005296731, + -0.039822105, + -0.026300494, + 0.06192888, + 0.003208919, + 0.038568772, + -0.03837477, + -0.0075851064, + 0.019920006, + -0.056322522, + -0.0022795193, + -0.08178385, + 0.13542512, + 0.18784039, + -0.016274614, + -0.053139277, + -0.032727182, + 0.06850126, + -0.07511497, + 0.02570966, + -0.03359296, + -0.0060070264, + -0.0014385056, + -0.0030237471, + -0.07544867, + 0.05513981, + -0.015720192, + -0.05642966, + -0.08506004, + 0.02179422, + 0.038471166, + -0.0283351, + 0.015446086, + -0.023619834, + -0.029330725, + 0.010942997, + -0.0015495635, + 0.04477932, + -0.038915448, + -0.044640813, + -0.035229694, + -0.017752215, + -0.08401524, + -0.044855777, + -0.02621097, + -0.029825464, + -0.008823935, + -0.019113153, + 0.06113879, + 0.017369257, + -0.018114269, + -0.017956765, + -0.0055642324, + -0.0022192416, + -0.074853644, + -0.098001055, + 0.08262387, + -1.7699036e-33, + -0.03260984, + 0.088475876, + -0.02405542, + 0.043462854, + -0.008397535, + 0.020519359, + -0.049513564, + 0.018314049, + -0.11363644, + -0.0017021305, + -0.046051882, + -0.07227338, + 0.062427472, + -0.063298784, + -0.0043539773, + -0.07343966, + -0.08858381, + 0.04477799, + 0.04930878, + 0.034854405, + 0.007476164, + 0.046887144, + -0.03770322, + -0.025251219, + 0.0446619, + 0.03149236, + -0.0053032744, + -0.032395095, + 0.050810106, + -0.037147496, + 0.053301577, + 0.021033086, + -0.031951237, + -0.07252799, + 0.052170422, + -0.02576369, + 0.026887013, + 0.01079958, + -0.073781185, + 0.07478704, + 0.05142738, + 0.013788507, + 0.09066831, + -0.011272152, + 0.012055797, + 0.05094217, + 0.01781682, + -0.04303251, + 0.10018772, + -0.009778261, + 0.031500068, + 0.08470662, + 0.006889941, + 0.0029960799, + 0.052113816, + -0.07264866, + -0.028845811, + 0.05798962, + 0.026194785, + -0.053314455, + -0.013308107, + -0.005074615, + 0.039697673, + 0.05761601, + 0.018443743, + -0.024383908, + -0.04246694, + -0.057976462, + -0.045537386, + 0.038462877, + -0.06458701, + -0.021180486, + 0.10092568, + -0.0217069, + -0.09957015, + 0.023281459, + -0.06976486, + 0.03478707, + 0.021886345, + -0.07436989, + 0.0059652724, + -0.045952816, + 0.011156351, + -0.0023965703, + -0.020232527, + 0.051849972, + -0.016511427, + -0.14282945, + 0.0007839438, + 0.05143813, + 0.045633797, + -0.047449116, + -0.031150315, + -0.028784428, + 0.022110209, + 8.540206e-34, + 0.035680003, + -0.004454516, + 0.0019904706, + -0.03159778, + 0.039594337, + 0.055580996, + 0.11990417, + 0.007444201, + 0.0014800398, + -0.035671443, + 0.054802123, + 0.013518193, + 0.015369701, + -0.042170182, + 0.00910241, + -0.03393552, + -0.011560881, + 0.008206326, + 0.03244244, + -0.057579078, + 0.001215648, + -0.037337195, + -0.09628385, + 0.10470648, + 0.073387526, + 0.034718595, + -0.031235449, + -0.008077066, + 0.0532558, + -0.007544639, + -0.06481378, + 0.0078824125, + 0.059332505, + 0.07509864, + -0.023143422, + -0.053352714, + -0.0049984492, + 0.020093009, + 0.005558518, + 0.02055946, + 0.040190052, + -0.058405206, + -0.019410733, + 0.040003065, + 0.043201532, + 0.0153706325, + 0.038072105, + 0.044809878, + 0.03211562, + 0.02581734, + 0.016989984, + -0.031887848, + -0.072636016, + -0.008867823, + 0.043845262, + -0.032801606, + -0.10555597, + -0.008874612, + 0.037949465, + -0.008839974, + 0.0024741436, + -0.005779733, + 0.06775476, + -0.016673656, + 0.020682104, + -0.02387207, + -0.08558911, + 0.008887117, + -0.07502815, + 0.034403294, + -0.04082733, + -0.06821772, + -0.018959502, + 0.03903044, + 0.011770784, + -0.042644627, + 0.021807244, + 0.069912925, + -0.027863, + 0.021612082, + -0.017177302, + 0.013199131, + -0.06342314, + 0.11476938, + 0.055228394, + -0.057914026, + -0.018466951, + 0.029547459, + -0.025892112, + -0.061446555, + -0.051833864, + 0.12864126, + 0.013783986, + 0.10842094, + 0.025589032, + -1.3291747e-08, + 0.04438634, + -0.035043437, + -0.059084963, + 0.007846919, + -0.03533786, + 0.04078865, + -0.0045822817, + -0.044390634, + -0.017847955, + -0.11152658, + 0.019488214, + -0.04202167, + -0.010433255, + -0.09392986, + 0.031165348, + 0.0037942217, + 0.011776091, + -0.11188344, + 0.019489327, + -0.059643954, + 5.5016415e-05, + 0.023693599, + -0.03426268, + -0.067298956, + -0.05988965, + 0.09677909, + -0.026113264, + 0.11115747, + -0.032836337, + -0.002883786, + 0.048552252, + 0.027802175, + -0.06964344, + -0.024443185, + 0.01612565, + -0.020989701, + 0.062907666, + -0.00074260257, + 0.067105986, + -0.040433157, + 0.077970855, + -0.04189095, + -0.1258856, + 0.0058066114, + 0.03658347, + -0.015551063, + 0.021594083, + -0.008647476, + -0.026618915, + -0.04521969, + 0.02759545, + -0.02447648, + -0.016449116, + 0.1025887, + -0.016808366, + -0.04455479, + 0.023937078, + -0.017120138, + -0.07922125, + 0.062927626, + 0.038930148, + -0.018900929, + 0.09125473, + -0.017347038 + ], + "index": 2, + "object": "embedding" + } + ], + "model": "all-minilm:l6-v2", + "object": "list", + "usage": { + "prompt_tokens": 15, + "total_tokens": 15 + } + } + }, + "is_streaming": false + } +} diff --git a/tests/integration/recordings/responses/f23defea82ec.json b/tests/integration/recordings/responses/f23defea82ec.json index 1e964af04..5d37b2524 100644 --- a/tests/integration/recordings/responses/f23defea82ec.json +++ b/tests/integration/recordings/responses/f23defea82ec.json @@ -19,22 +19,390 @@ "data": [ { "embedding": [ - 0.253706, - 0.016367152, - -0.29664654, - 0.31654558, - -0.18624601, - 0.07602756, - -0.031531323, - 0.2986085, - -0.49672848, - -0.36617878, - 0.25328273, - -0.33349335, - 0.0060151755, - 0.14081024, - -0.13757885, - -0.14679416 + 0.04635219, + 0.002988263, + -0.054220885, + 0.057812735, + -0.0340614, + 0.013923248, + -0.005755826, + 0.054555666, + -0.09073176, + -0.066910096, + 0.046287432, + -0.060912322, + 0.0010950539, + 0.025724398, + -0.025169374, + -0.026821515, + -0.030190151, + 0.0019341545, + -0.0754819, + 0.057380512, + 0.020332545, + -0.005591279, + -0.0022273492, + 0.012063173, + -0.011033521, + -0.03300947, + 0.05462081, + 0.014426073, + 0.024025004, + 0.004224287, + 0.09837723, + 0.08385713, + -0.049175426, + 0.03877149, + 0.08748876, + -0.0223024, + 0.006552746, + -0.0070359865, + 0.017893821, + 0.015465863, + 0.05007282, + -0.019349905, + 0.064887345, + 0.03184605, + 0.0034936152, + 0.02317752, + -0.06297051, + 0.044468515, + -0.022246253, + -0.017976552, + 0.040390052, + -0.0020998395, + -0.05173264, + 0.014722753, + 0.01640469, + -0.06438627, + -0.043313596, + -0.040564552, + 0.044412937, + -0.0031199565, + -0.007237415, + -0.05158015, + 0.059660934, + -0.014839656, + 0.012902056, + 0.028181136, + -0.019578207, + -0.0664231, + -0.06333673, + 0.028995825, + -0.114707075, + 0.041575413, + -0.022128351, + 0.01979776, + 0.0630018, + 0.011822141, + -0.06492722, + -0.066328146, + 0.021114407, + -0.020638306, + -0.009599678, + 0.013701863, + -0.060742326, + 0.005395315, + 0.026589092, + 0.11719033, + 0.067120634, + 0.008300158, + 0.036319703, + 0.00772981, + 0.071582936, + 0.019818509, + -0.15945566, + 0.047943458, + 0.00031571978, + -0.04666597, + 0.007148715, + -0.08839544, + 0.038042437, + 0.06620088, + 0.034336157, + -0.035366412, + 0.041598067, + 0.073756054, + -0.018818064, + -0.017260034, + 0.058635473, + -0.01371376, + 0.048319146, + -0.023727186, + 0.024134034, + 0.015763162, + 0.06681245, + 0.01748244, + 0.0825409, + -0.044568237, + 0.0015441044, + -0.011225885, + 0.0153481, + -0.061364066, + 0.05792184, + 0.044216745, + -0.047036964, + -0.02634555, + -0.033504363, + 0.06713578, + 0.030866034, + 2.024336e-34, + -0.03532978, + 0.021929236, + 0.030160688, + 0.09271786, + -0.010355268, + 0.07196569, + 0.052604284, + 0.085753724, + 0.094942175, + 0.053786535, + -0.08900509, + -0.024382822, + -0.008744401, + -0.03167582, + 0.01025236, + 0.1818434, + -0.0022662894, + 0.118558116, + -0.072208576, + -0.005867667, + 0.0746222, + -0.024001855, + -0.013938801, + -0.030681474, + -0.029207803, + -0.117624186, + -0.046466038, + -0.002622228, + -0.0902171, + -0.038626853, + -0.037497964, + -0.02418436, + -0.069297835, + 0.06424038, + 0.0045628003, + -0.0041498984, + -0.01649947, + 0.051125433, + -0.0058985935, + -0.0122523345, + -0.047424458, + -0.007806876, + 0.07906618, + 0.03244041, + -0.044682544, + -0.022625683, + 0.028852794, + -0.050480433, + 0.043801326, + -0.023512814, + -0.029832385, + 0.031089257, + 0.07129686, + -0.089649536, + 0.011963804, + -0.018448317, + 0.019637493, + 0.020081993, + 0.0012980831, + 0.093201645, + -0.064436235, + -0.040581323, + -0.01193043, + 0.043884862, + -0.010675756, + -0.030739127, + 0.005605308, + -0.110498495, + 0.044510514, + 0.037110664, + 0.04116233, + -0.039460793, + -0.04470639, + -0.027589805, + -0.02073358, + -0.067221105, + 0.050390884, + 0.031397663, + -0.008031462, + -0.009285899, + 0.0013141648, + -0.017254544, + 0.010367782, + -0.05940024, + -0.018042587, + -0.15487815, + 0.0069424273, + -0.05208202, + 0.0014201442, + -0.13956298, + -0.040203292, + 0.027910054, + -0.064872995, + -0.016270144, + 0.07052549, + 5.3188943e-34, + 0.012666737, + 0.016728623, + -0.013163009, + 0.06391275, + -0.043404065, + 0.015435096, + 0.03720438, + 0.05997576, + -0.07789181, + -0.0408386, + 0.024137221, + -0.019834999, + -0.034739267, + 0.00042199617, + 0.048484907, + 0.08716056, + -0.101133205, + -0.07535088, + -0.03912376, + -0.031597532, + -0.052266575, + 0.022085808, + -0.011040282, + 0.005077135, + -0.088432744, + -0.010477913, + 0.047780182, + -0.073345095, + 0.014382301, + 0.038075384, + 0.02176859, + -0.029071847, + -0.036925532, + 0.14317243, + 0.020646103, + -0.08367964, + 0.111576855, + -0.009943396, + 0.023071144, + 0.0926832, + 0.011242715, + 0.068017475, + -0.007714686, + 0.03060742, + -0.011360289, + 0.109015204, + 0.12930514, + -0.07566831, + 0.09001269, + -0.0090979, + 0.0148039665, + 0.048663232, + 0.08894293, + 0.038565516, + 0.005821986, + 0.016084671, + -0.106283545, + -0.033372246, + 0.05440088, + -0.005663873, + 0.0011572369, + -0.024969472, + 0.043092247, + -0.009314855, + -0.11836073, + -0.027310666, + 0.009811885, + -0.0052975323, + -0.044883158, + 0.066436425, + -0.06750139, + -0.02696421, + 0.01402391, + -0.04950559, + -0.084093384, + -0.07380851, + 0.04709705, + 4.9404687e-05, + 0.01672617, + 0.01849747, + 0.027683195, + 0.0047972985, + 0.0017495222, + 0.07066204, + -0.022430636, + 0.06875498, + 0.093927115, + 0.11101308, + -0.015589739, + 0.021178465, + 0.033638563, + 0.034676168, + -0.026882911, + -0.010514364, + 0.0073013064, + -1.2070348e-08, + -0.10034882, + -0.028641108, + -0.061462097, + -0.009792086, + -0.081652306, + -0.011814046, + 0.002039501, + 0.010384326, + 0.01639641, + 0.09542911, + 0.012538498, + -0.03542602, + 0.018125113, + 0.062750235, + 0.0007333235, + -0.13612862, + -0.049830034, + 0.021177148, + 0.006589976, + 0.007859552, + -0.03270378, + 0.024738451, + -0.02542262, + -0.0033008803, + 0.030640591, + -0.032442387, + 0.04598555, + 0.03903257, + 0.035755396, + 0.01686084, + 0.13498692, + 0.028296864, + -0.0035224769, + -0.036735818, + -0.046355885, + 0.057701495, + 0.008000554, + 0.047822826, + 0.04911064, + 0.035214324, + -0.09817153, + 0.0050856513, + -0.018094635, + -0.04385158, + 0.06649695, + -0.037648164, + -0.006218895, + -0.037976924, + -0.0036204353, + -0.03149386, + 0.031777944, + -0.011333557, + 0.009081317, + 0.022486951, + 0.032106593, + 0.023041077, + -0.06739943, + 0.06294171, + -0.057333894, + -0.041295, + 0.060841344, + 0.03247397, + -0.05132725, + -0.04992364 ], "index": 0, "object": "embedding" diff --git a/tests/integration/recordings/responses/f28a44c97ea7.json b/tests/integration/recordings/responses/f28a44c97ea7.json index d50851dfd..fd4fb9025 100644 --- a/tests/integration/recordings/responses/f28a44c97ea7.json +++ b/tests/integration/recordings/responses/f28a44c97ea7.json @@ -20,14 +20,14 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-282", + "id": "chatcmpl-685", "choices": [ { "finish_reason": "stop", "index": 0, "logprobs": null, "message": { - "content": "The largest planet in our solar system is Jupiter. It is a gas giant, with a diameter of approximately 142,984 kilometers (88,846 miles). This makes it more than 11 times the diameter of the Earth and more than 2.5 times the mass of all the other planets in our solar system combined.", + "content": "The largest planet in our solar system is Jupiter. It is a gas giant and has a diameter of approximately 142,984 kilometers (88,846 miles). Jupiter is more than 1,300 times the size of Earth and is the fifth planet from the Sun.", "refusal": null, "role": "assistant", "annotations": null, @@ -37,15 +37,15 @@ } } ], - "created": 1759012143, + "created": 1759437857, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion", "service_tier": null, "system_fingerprint": "fp_ollama", "usage": { - "completion_tokens": 67, + "completion_tokens": 55, "prompt_tokens": 35, - "total_tokens": 102, + "total_tokens": 90, "completion_tokens_details": null, "prompt_tokens_details": null } diff --git a/tests/integration/recordings/responses/f340a394f6e0.json b/tests/integration/recordings/responses/f340a394f6e0.json index 50826e3c1..96ea5dab0 100644 --- a/tests/integration/recordings/responses/f340a394f6e0.json +++ b/tests/integration/recordings/responses/f340a394f6e0.json @@ -21,7 +21,7 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-494", + "id": "chatcmpl-233", "choices": [ { "finish_reason": "stop", @@ -38,7 +38,7 @@ } } ], - "created": 1759245126, + "created": 1759437799, "model": "llama-guard3:1b", "object": "chat.completion", "service_tier": null, diff --git a/tests/integration/recordings/responses/f6a1cb47dfe8.json b/tests/integration/recordings/responses/f6a1cb47dfe8.json new file mode 100644 index 000000000..e5677335b --- /dev/null +++ b/tests/integration/recordings/responses/f6a1cb47dfe8.json @@ -0,0 +1,170 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant." + }, + { + "role": "user", + "content": "Say hi to the world. Use tools to do so." + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_qvp9u80l", + "type": "function", + "function": { + "name": "greet_everyone", + "arguments": "{\"url\":\"world\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_qvp9u80l", + "content": [ + { + "type": "text", + "text": "Hello, world!" + } + ] + }, + { + "role": "assistant", + "content": "<|python_tag|>{\"message\": \"Hello, world!\", \"type\": \"hello_world\"}" + }, + { + "role": "user", + "content": "What is the boiling point of polyjuice? Use tools to answer." + } + ], + "max_tokens": 0, + "stream": true, + "tool_choice": "auto", + "tools": [ + { + "type": "function", + "function": { + "name": "greet_everyone", + "parameters": { + "properties": { + "url": { + "title": "Url", + "type": "string" + } + }, + "required": [ + "url" + ], + "title": "greet_everyoneArguments", + "type": "object" + } + } + }, + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "\n Returns the boiling point of a liquid in Celsius or Fahrenheit.\n\n :param liquid_name: The name of the liquid\n :param celsius: Whether to return the boiling point in Celsius\n :return: The boiling point of the liquid in Celcius or Fahrenheit\n ", + "parameters": { + "properties": { + "liquid_name": { + "title": "Liquid Name", + "type": "string" + }, + "celsius": { + "default": true, + "title": "Celsius", + "type": "boolean" + } + }, + "required": [ + "liquid_name" + ], + "title": "get_boiling_pointArguments", + "type": "object" + } + } + } + ] + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-827", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": [ + { + "index": 0, + "id": "call_y1jmdav5", + "function": { + "arguments": "{\"celsius\":\"false\",\"liquid_name\":\"polyjuice\"}", + "name": "get_boiling_point" + }, + "type": "function" + } + ] + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437847, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-827", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "tool_calls", + "index": 0, + "logprobs": null + } + ], + "created": 1759437848, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/f70f30f54211.json b/tests/integration/recordings/responses/f70f30f54211.json index c4dd90e68..ba0d1d59d 100644 --- a/tests/integration/recordings/responses/f70f30f54211.json +++ b/tests/integration/recordings/responses/f70f30f54211.json @@ -38,42 +38,32 @@ "body": { "__type__": "openai.types.chat.chat_completion.ChatCompletion", "__data__": { - "id": "chatcmpl-10", + "id": "chatcmpl-755", "choices": [ { - "finish_reason": "tool_calls", + "finish_reason": "stop", "index": 0, "logprobs": null, "message": { - "content": "", + "content": "{\"name\":\"get_weather\",\"parameters\":{\\>\"city\": \"Tokyo\"}}", "refusal": null, "role": "assistant", "annotations": null, "audio": null, "function_call": null, - "tool_calls": [ - { - "id": "call_7cm57k1b", - "function": { - "arguments": "{\"city\":\"Tokyo\"}", - "name": "get_weather" - }, - "type": "function", - "index": 0 - } - ] + "tool_calls": null } } ], - "created": 1756921368, + "created": 1759437886, "model": "llama3.2:3b-instruct-fp16", "object": "chat.completion", "service_tier": null, "system_fingerprint": "fp_ollama", "usage": { - "completion_tokens": 18, + "completion_tokens": 17, "prompt_tokens": 177, - "total_tokens": 195, + "total_tokens": 194, "completion_tokens_details": null, "prompt_tokens_details": null } diff --git a/tests/integration/recordings/responses/f8ba05a5ce61.json b/tests/integration/recordings/responses/f8ba05a5ce61.json new file mode 100644 index 000000000..a09e430bd --- /dev/null +++ b/tests/integration/recordings/responses/f8ba05a5ce61.json @@ -0,0 +1,402 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point_with_metadata\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nCall get_boiling_point_with_metadata tool and answer What is the boiling point of polyjuice?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0001, + "top_p": 0.9 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.137398Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "[", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.179615Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "get", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.221193Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_bo", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.264409Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "iling", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.30586Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_point", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.347477Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_with", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.389016Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_metadata", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.430288Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "(", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.471941Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "liquid", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.513993Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "_name", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.555492Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "='", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.596851Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "poly", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.638274Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ju", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.680806Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ice", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.723172Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "',", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.764626Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " cel", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.806696Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ci", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.848776Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "us", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.891751Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "=True", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.933562Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ")]", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:14.975196Z", + "done": true, + "done_reason": "stop", + "total_duration": 1471473500, + "load_duration": 104730458, + "prompt_eval_count": 368, + "prompt_eval_duration": 527632084, + "eval_count": 21, + "eval_duration": 838372750, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/fced8b60ae5f.json b/tests/integration/recordings/responses/fced8b60ae5f.json new file mode 100644 index 000000000..9d90b6ca3 --- /dev/null +++ b/tests/integration/recordings/responses/fced8b60ae5f.json @@ -0,0 +1,986 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "messages": [ + { + "role": "system", + "content": "You are a helpful assistant Always respond with tool calls no matter what. " + }, + { + "role": "user", + "content": "Get the boiling point of polyjuice with a tool call." + }, + { + "role": "assistant", + "content": "", + "tool_calls": [ + { + "id": "call_6ufbs6q1", + "type": "function", + "function": { + "name": "get_boiling_point", + "arguments": "{\"celcius\":\"true\",\"liquid_name\":\"polyjuice\"}" + } + } + ] + }, + { + "role": "tool", + "tool_call_id": "call_6ufbs6q1", + "content": "Error when running tool: 'ToolCall' object has no attribute 'arguments_json'" + } + ], + "max_tokens": 512, + "stream": true, + "temperature": 0.0001, + "tool_choice": "auto", + "tools": [ + { + "type": "function", + "function": { + "name": "get_boiling_point", + "description": "Returns the boiling point of a liquid in Celcius or Fahrenheit.", + "parameters": { + "type": "object", + "properties": { + "liquid_name": { + "type": "string", + "description": "The name of the liquid" + }, + "celcius": { + "type": "boolean", + "description": "Whether to return the boiling point in Celcius" + } + }, + "required": [ + "liquid_name" + ] + } + } + } + ], + "top_p": 0.9 + }, + "endpoint": "/v1/chat/completions", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": "I", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437819, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": " apologize", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437819, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": " for", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437819, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": " the", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437819, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": " error", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437819, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": ".", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437819, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": " Here", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437819, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": " is", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437819, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": " the", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437819, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": " revised", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437819, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": " tool", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437819, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": " call", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437819, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": ":\n\n", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437819, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": "{\"", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437820, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": "name", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437820, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": "\":", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437820, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": " \"", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437820, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": "get", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437820, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": "_bo", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437820, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": "iling", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437820, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": "_point", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437820, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": "\",", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437820, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": " \"", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437820, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": "parameters", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437820, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": "\":", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437820, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": " {\"", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437820, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": "liquid", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437820, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": "_name", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437820, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": "\":", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437820, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": " \"", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437820, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": "poly", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437820, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": "ju", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437820, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": "ice", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437820, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": "\"}}", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": null, + "index": 0, + "logprobs": null + } + ], + "created": 1759437820, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + }, + { + "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk", + "__data__": { + "id": "chatcmpl-371", + "choices": [ + { + "delta": { + "content": "", + "function_call": null, + "refusal": null, + "role": "assistant", + "tool_calls": null + }, + "finish_reason": "stop", + "index": 0, + "logprobs": null + } + ], + "created": 1759437820, + "model": "llama3.2:3b-instruct-fp16", + "object": "chat.completion.chunk", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/feae037e2abd.json b/tests/integration/recordings/responses/feae037e2abd.json new file mode 100644 index 000000000..732b71b23 --- /dev/null +++ b/tests/integration/recordings/responses/feae037e2abd.json @@ -0,0 +1,258 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "raw": true, + "prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\nYou are a helpful assistant. You have access to functions, but you should only use them if they are required.\nYou are an expert in composing functions. You are given a question and a set of possible functions.\nBased on the question, you may or may not need to make one function/tool call to achieve the purpose.\n\nIf you decide to invoke any of the function(s), you MUST put it in the format of [func_name1(params_name1=params_value1, params_name2=params_value2...), func_name2(params)]\nIf you decide to invoke a function, you SHOULD NOT include any other text in the response. besides the function call in the above format.\nFor a boolean parameter, be sure to use `True` or `False` (capitalized) for the value.\n\n\nHere is a list of functions in JSON format that you can invoke.\n\n[\n {\n \"name\": \"get_boiling_point\",\n \"description\": \"Returns the boiling point of a liquid in Celcius or Fahrenheit.\",\n \"parameters\": {\n \"type\": \"dict\",\n \"required\": [\"liquid_name\"],\n \"properties\": {\n \"liquid_name\": {\n \"type\": \"str\",\n \"description\": \"The name of the liquid\"\n },\n \"celcius\": {\n \"type\": \"bool\",\n \"description\": \"Whether to return the boiling point in Celcius\"\n }\n }\n }\n }\n]\n\nYou can answer general questions or invoke tools when necessary.\nIn addition to tool calls, you should also augment your responses by using the tool outputs.\nYou are a helpful assistant<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nCall get_boiling_point tool and answer What is the boiling point of polyjuice?<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n[get_boiling_point(liquid_name=\"polyjuice\", celcius=True)]<|eot_id|><|start_header_id|>ipython<|end_header_id|>\n\n-100<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n", + "options": { + "temperature": 0.0001, + "top_p": 0.9 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.185676Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "The", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.227434Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " boiling", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.268751Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " point", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.310105Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " of", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.351683Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " poly", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.396988Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ju", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.439384Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "ice", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.481075Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " is", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.522627Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": " -", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.564154Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "100", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.605696Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "\u00b0C", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.647134Z", + "done": false, + "done_reason": null, + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": ".", + "thinking": null, + "context": null + } + }, + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-02T02:55:12.688465Z", + "done": true, + "done_reason": "stop", + "total_duration": 646686792, + "load_duration": 78333875, + "prompt_eval_count": 395, + "prompt_eval_duration": 64602125, + "eval_count": 13, + "eval_duration": 503233541, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/integration/recordings/responses/models-bd032f995f2a-16718308.json b/tests/integration/recordings/responses/models-bd032f995f2a-16718308.json new file mode 100644 index 000000000..cf7ed5924 --- /dev/null +++ b/tests/integration/recordings/responses/models-bd032f995f2a-16718308.json @@ -0,0 +1,843 @@ +{ + "request": { + "method": "POST", + "url": "https://api.openai.com/v1/v1/models", + "headers": {}, + "body": {}, + "endpoint": "/v1/models", + "model": "" + }, + "response": { + "body": [ + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4-0613", + "created": 1686588896, + "object": "model", + "owned_by": "openai" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4", + "created": 1687882411, + "object": "model", + "owned_by": "openai" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-3.5-turbo", + "created": 1677610602, + "object": "model", + "owned_by": "openai" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "text-embedding-3-small-okan-test", + "created": 1759393278, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-realtime", + "created": 1756271701, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-realtime-2025-08-28", + "created": 1756271773, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-audio", + "created": 1756339249, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-5-codex", + "created": 1757527818, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "davinci-002", + "created": 1692634301, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "babbage-002", + "created": 1692634615, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-3.5-turbo-instruct", + "created": 1692901427, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-3.5-turbo-instruct-0914", + "created": 1694122472, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "dall-e-3", + "created": 1698785189, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "dall-e-2", + "created": 1698798177, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4-1106-preview", + "created": 1698957206, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-3.5-turbo-1106", + "created": 1698959748, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "tts-1-hd", + "created": 1699046015, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "tts-1-1106", + "created": 1699053241, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "tts-1-hd-1106", + "created": 1699053533, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "text-embedding-3-small", + "created": 1705948997, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "text-embedding-3-large", + "created": 1705953180, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4-0125-preview", + "created": 1706037612, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4-turbo-preview", + "created": 1706037777, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-3.5-turbo-0125", + "created": 1706048358, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4-turbo", + "created": 1712361441, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4-turbo-2024-04-09", + "created": 1712601677, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4o", + "created": 1715367049, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4o-2024-05-13", + "created": 1715368132, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4o-mini-2024-07-18", + "created": 1721172717, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4o-mini", + "created": 1721172741, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4o-2024-08-06", + "created": 1722814719, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "chatgpt-4o-latest", + "created": 1723515131, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "o1-mini-2024-09-12", + "created": 1725648979, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "o1-mini", + "created": 1725649008, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4o-realtime-preview-2024-10-01", + "created": 1727131766, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4o-audio-preview-2024-10-01", + "created": 1727389042, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4o-audio-preview", + "created": 1727460443, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4o-realtime-preview", + "created": 1727659998, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "omni-moderation-latest", + "created": 1731689265, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "omni-moderation-2024-09-26", + "created": 1732734466, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4o-realtime-preview-2024-12-17", + "created": 1733945430, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4o-audio-preview-2024-12-17", + "created": 1734034239, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4o-mini-realtime-preview-2024-12-17", + "created": 1734112601, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4o-mini-audio-preview-2024-12-17", + "created": 1734115920, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "o1-2024-12-17", + "created": 1734326976, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "o1", + "created": 1734375816, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4o-mini-realtime-preview", + "created": 1734387380, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4o-mini-audio-preview", + "created": 1734387424, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "o3-mini", + "created": 1737146383, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "o3-mini-2025-01-31", + "created": 1738010200, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4o-2024-11-20", + "created": 1739331543, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4o-search-preview-2025-03-11", + "created": 1741388170, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4o-search-preview", + "created": 1741388720, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4o-mini-search-preview-2025-03-11", + "created": 1741390858, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4o-mini-search-preview", + "created": 1741391161, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4o-transcribe", + "created": 1742068463, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4o-mini-transcribe", + "created": 1742068596, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "o1-pro-2025-03-19", + "created": 1742251504, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "o1-pro", + "created": 1742251791, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4o-mini-tts", + "created": 1742403959, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "o3-2025-04-16", + "created": 1744133301, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "o4-mini-2025-04-16", + "created": 1744133506, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "o3", + "created": 1744225308, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "o4-mini", + "created": 1744225351, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4.1-2025-04-14", + "created": 1744315746, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4.1", + "created": 1744316542, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4.1-mini-2025-04-14", + "created": 1744317547, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4.1-mini", + "created": 1744318173, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4.1-nano-2025-04-14", + "created": 1744321025, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4.1-nano", + "created": 1744321707, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-image-1", + "created": 1745517030, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "codex-mini-latest", + "created": 1746673257, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "o3-pro", + "created": 1748475349, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4o-realtime-preview-2025-06-03", + "created": 1748907838, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-4o-audio-preview-2025-06-03", + "created": 1748908498, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "o3-pro-2025-06-10", + "created": 1749166761, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "o4-mini-deep-research", + "created": 1749685485, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "o3-deep-research", + "created": 1749840121, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "o3-deep-research-2025-06-26", + "created": 1750865219, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "o4-mini-deep-research-2025-06-26", + "created": 1750866121, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-5-chat-latest", + "created": 1754073306, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-5-2025-08-07", + "created": 1754075360, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-5", + "created": 1754425777, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-5-mini-2025-08-07", + "created": 1754425867, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-5-mini", + "created": 1754425928, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-5-nano-2025-08-07", + "created": 1754426303, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-5-nano", + "created": 1754426384, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-audio-2025-08-28", + "created": 1756256146, + "object": "model", + "owned_by": "system" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "gpt-3.5-turbo-16k", + "created": 1683758102, + "object": "model", + "owned_by": "openai-internal" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "tts-1", + "created": 1681940951, + "object": "model", + "owned_by": "openai-internal" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "whisper-1", + "created": 1677532384, + "object": "model", + "owned_by": "openai-internal" + } + }, + { + "__type__": "openai.types.model.Model", + "__data__": { + "id": "text-embedding-ada-002", + "created": 1671217299, + "object": "model", + "owned_by": "openai-internal" + } + } + ], + "is_streaming": false + } +} diff --git a/tests/integration/tool_runtime/test_builtin_tools.py b/tests/integration/tool_runtime/test_builtin_tools.py index 1acf06719..97300a8dd 100644 --- a/tests/integration/tool_runtime/test_builtin_tools.py +++ b/tests/integration/tool_runtime/test_builtin_tools.py @@ -26,7 +26,7 @@ def test_web_search_tool(llama_stack_client, sample_search_query): pytest.skip("TAVILY_SEARCH_API_KEY not set, skipping test") tools = llama_stack_client.tool_runtime.list_tools() - assert any(tool.identifier == "web_search" for tool in tools) + assert any(tool.name == "web_search" for tool in tools) response = llama_stack_client.tool_runtime.invoke_tool( tool_name="web_search", kwargs={"query": sample_search_query} @@ -52,7 +52,7 @@ def test_wolfram_alpha_tool(llama_stack_client, sample_wolfram_alpha_query): pytest.skip("WOLFRAM_ALPHA_API_KEY not set, skipping test") tools = llama_stack_client.tool_runtime.list_tools() - assert any(tool.identifier == "wolfram_alpha" for tool in tools) + assert any(tool.name == "wolfram_alpha" for tool in tools) response = llama_stack_client.tool_runtime.invoke_tool( tool_name="wolfram_alpha", kwargs={"query": sample_wolfram_alpha_query} ) diff --git a/tests/integration/tool_runtime/test_mcp.py b/tests/integration/tool_runtime/test_mcp.py index 831186b15..9e22d3e58 100644 --- a/tests/integration/tool_runtime/test_mcp.py +++ b/tests/integration/tool_runtime/test_mcp.py @@ -54,14 +54,14 @@ def test_mcp_invocation(llama_stack_client, text_model_id, mcp_server): } with pytest.raises(Exception, match="Unauthorized"): - llama_stack_client.tools.list() + llama_stack_client.tools.list(toolgroup_id=test_toolgroup_id) response = llama_stack_client.tools.list( toolgroup_id=test_toolgroup_id, extra_headers=auth_headers, ) assert len(response) == 2 - assert {t.identifier for t in response} == {"greet_everyone", "get_boiling_point"} + assert {t.name for t in response} == {"greet_everyone", "get_boiling_point"} response = llama_stack_client.tool_runtime.invoke_tool( tool_name="greet_everyone", diff --git a/tests/integration/tool_runtime/test_mcp_json_schema.py b/tests/integration/tool_runtime/test_mcp_json_schema.py new file mode 100644 index 000000000..47e9ee029 --- /dev/null +++ b/tests/integration/tool_runtime/test_mcp_json_schema.py @@ -0,0 +1,404 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + +""" +Integration tests for MCP tools with complex JSON Schema support. +Tests $ref, $defs, and other JSON Schema features through MCP integration. +""" + +import json + +import pytest + +from llama_stack import LlamaStackAsLibraryClient +from tests.common.mcp import make_mcp_server + +AUTH_TOKEN = "test-token" + + +@pytest.fixture(scope="function") +def mcp_server_with_complex_schemas(): + """MCP server with tools that have complex schemas including $ref and $defs.""" + from mcp.server.fastmcp import Context + + async def book_flight(flight: dict, passengers: list[dict], payment: dict, ctx: Context) -> dict: + """ + Book a flight with passenger and payment information. + + This tool uses JSON Schema $ref and $defs for type reuse. + """ + return { + "booking_id": "BK12345", + "flight": flight, + "passengers": passengers, + "payment": payment, + "status": "confirmed", + } + + async def process_order(order_data: dict, ctx: Context) -> dict: + """ + Process an order with nested address information. + + Uses nested objects and $ref. + """ + return {"order_id": "ORD789", "status": "processing", "data": order_data} + + async def flexible_contact(contact_info: str, ctx: Context) -> dict: + """ + Accept flexible contact (email or phone). + + Uses anyOf schema. + """ + if "@" in contact_info: + return {"type": "email", "value": contact_info} + else: + return {"type": "phone", "value": contact_info} + + # Manually attach complex schemas to the functions + # (FastMCP might not support this by default, so this is test setup) + + # For MCP, we need to set the schema via tool annotations + # This is test infrastructure to force specific schemas + + tools = {"book_flight": book_flight, "process_order": process_order, "flexible_contact": flexible_contact} + + # Note: In real MCP implementation, we'd configure these schemas properly + # For testing, we may need to mock or extend the MCP server setup + + with make_mcp_server(required_auth_token=AUTH_TOKEN, tools=tools) as server_info: + yield server_info + + +@pytest.fixture(scope="function") +def mcp_server_with_output_schemas(): + """MCP server with tools that have output schemas defined.""" + from mcp.server.fastmcp import Context + + async def get_weather(location: str, ctx: Context) -> dict: + """ + Get weather with structured output. + + Has both input and output schemas. + """ + return {"temperature": 72.5, "conditions": "Sunny", "humidity": 45, "wind_speed": 10.2} + + async def calculate(x: float, y: float, operation: str, ctx: Context) -> dict: + """ + Perform calculation with validated output. + """ + operations = {"add": x + y, "subtract": x - y, "multiply": x * y, "divide": x / y if y != 0 else None} + result = operations.get(operation) + return {"result": result, "operation": operation} + + tools = {"get_weather": get_weather, "calculate": calculate} + + with make_mcp_server(required_auth_token=AUTH_TOKEN, tools=tools) as server_info: + yield server_info + + +class TestMCPSchemaPreservation: + """Test that MCP tool schemas are preserved correctly.""" + + def test_mcp_tools_list_with_schemas(self, llama_stack_client, mcp_server_with_complex_schemas): + """Test listing MCP tools preserves input_schema.""" + if not isinstance(llama_stack_client, LlamaStackAsLibraryClient): + pytest.skip("Library client required for local MCP server") + + test_toolgroup_id = "mcp::complex_list" + uri = mcp_server_with_complex_schemas["server_url"] + + # Clean up any existing registration + try: + llama_stack_client.toolgroups.unregister(toolgroup_id=test_toolgroup_id) + except Exception: + pass + + # Register MCP toolgroup + llama_stack_client.toolgroups.register( + toolgroup_id=test_toolgroup_id, + provider_id="model-context-protocol", + mcp_endpoint=dict(uri=uri), + ) + + provider_data = {"mcp_headers": {uri: {"Authorization": f"Bearer {AUTH_TOKEN}"}}} + auth_headers = { + "X-LlamaStack-Provider-Data": json.dumps(provider_data), + } + + # List runtime tools + response = llama_stack_client.tool_runtime.list_tools( + tool_group_id=test_toolgroup_id, + extra_headers=auth_headers, + ) + + tools = response + assert len(tools) > 0 + + # Check each tool has input_schema + for tool in tools: + assert hasattr(tool, "input_schema") + # Schema might be None or a dict depending on tool + if tool.input_schema is not None: + assert isinstance(tool.input_schema, dict) + # Should have basic JSON Schema structure + if "properties" in tool.input_schema: + assert "type" in tool.input_schema + + def test_mcp_schema_with_refs_preserved(self, llama_stack_client, mcp_server_with_complex_schemas): + """Test that $ref and $defs in MCP schemas are preserved.""" + if not isinstance(llama_stack_client, LlamaStackAsLibraryClient): + pytest.skip("Library client required for local MCP server") + + test_toolgroup_id = "mcp::complex_refs" + uri = mcp_server_with_complex_schemas["server_url"] + + # Register + try: + llama_stack_client.toolgroups.unregister(toolgroup_id=test_toolgroup_id) + except Exception: + pass + + llama_stack_client.toolgroups.register( + toolgroup_id=test_toolgroup_id, + provider_id="model-context-protocol", + mcp_endpoint=dict(uri=uri), + ) + provider_data = {"mcp_headers": {uri: {"Authorization": f"Bearer {AUTH_TOKEN}"}}} + auth_headers = { + "X-LlamaStack-Provider-Data": json.dumps(provider_data), + } + + # List tools + response = llama_stack_client.tool_runtime.list_tools( + tool_group_id=test_toolgroup_id, + extra_headers=auth_headers, + ) + + # Find book_flight tool (which should have $ref/$defs) + book_flight_tool = next((t for t in response if t.name == "book_flight"), None) + + if book_flight_tool and book_flight_tool.input_schema: + # If the MCP server provides $defs, they should be preserved + # This is the KEY test for the bug fix + schema = book_flight_tool.input_schema + + # Check if schema has properties (might vary based on MCP implementation) + if "properties" in schema: + # Verify schema structure is preserved (exact structure depends on MCP server) + assert isinstance(schema["properties"], dict) + + # If $defs are present, verify they're preserved + if "$defs" in schema: + assert isinstance(schema["$defs"], dict) + # Each definition should be a dict + for _def_name, def_schema in schema["$defs"].items(): + assert isinstance(def_schema, dict) + + def test_mcp_output_schema_preserved(self, llama_stack_client, mcp_server_with_output_schemas): + """Test that MCP outputSchema is preserved.""" + if not isinstance(llama_stack_client, LlamaStackAsLibraryClient): + pytest.skip("Library client required for local MCP server") + + test_toolgroup_id = "mcp::with_output" + uri = mcp_server_with_output_schemas["server_url"] + + try: + llama_stack_client.toolgroups.unregister(toolgroup_id=test_toolgroup_id) + except Exception: + pass + + llama_stack_client.toolgroups.register( + toolgroup_id=test_toolgroup_id, + provider_id="model-context-protocol", + mcp_endpoint=dict(uri=uri), + ) + + provider_data = {"mcp_headers": {uri: {"Authorization": f"Bearer {AUTH_TOKEN}"}}} + auth_headers = { + "X-LlamaStack-Provider-Data": json.dumps(provider_data), + } + + response = llama_stack_client.tool_runtime.list_tools( + tool_group_id=test_toolgroup_id, + extra_headers=auth_headers, + ) + + # Find get_weather tool + weather_tool = next((t for t in response if t.name == "get_weather"), None) + + if weather_tool: + # Check if output_schema field exists and is preserved + assert hasattr(weather_tool, "output_schema") + + # If MCP server provides output schema, it should be preserved + if weather_tool.output_schema is not None: + assert isinstance(weather_tool.output_schema, dict) + # Should have JSON Schema structure + if "properties" in weather_tool.output_schema: + assert "type" in weather_tool.output_schema + + +class TestMCPToolInvocation: + """Test invoking MCP tools with complex schemas.""" + + def test_invoke_mcp_tool_with_nested_data(self, llama_stack_client, mcp_server_with_complex_schemas): + """Test invoking MCP tool that expects nested object structure.""" + if not isinstance(llama_stack_client, LlamaStackAsLibraryClient): + pytest.skip("Library client required for local MCP server") + + test_toolgroup_id = "mcp::complex_invoke_nested" + uri = mcp_server_with_complex_schemas["server_url"] + + try: + llama_stack_client.toolgroups.unregister(toolgroup_id=test_toolgroup_id) + except Exception: + pass + + llama_stack_client.toolgroups.register( + toolgroup_id=test_toolgroup_id, + provider_id="model-context-protocol", + mcp_endpoint=dict(uri=uri), + ) + + provider_data = {"mcp_headers": {uri: {"Authorization": f"Bearer {AUTH_TOKEN}"}}} + auth_headers = { + "X-LlamaStack-Provider-Data": json.dumps(provider_data), + } + + # List tools to populate the tool index + llama_stack_client.tool_runtime.list_tools( + tool_group_id=test_toolgroup_id, + extra_headers=auth_headers, + ) + + # Invoke tool with complex nested data + result = llama_stack_client.tool_runtime.invoke_tool( + tool_name="process_order", + kwargs={ + "order_data": { + "items": [{"name": "Widget", "quantity": 2}, {"name": "Gadget", "quantity": 1}], + "shipping": {"address": {"street": "123 Main St", "city": "San Francisco", "zipcode": "94102"}}, + } + }, + extra_headers=auth_headers, + ) + + # Should succeed without schema validation errors + assert result.content is not None + assert result.error_message is None + + def test_invoke_with_flexible_schema(self, llama_stack_client, mcp_server_with_complex_schemas): + """Test invoking tool with anyOf schema (flexible input).""" + if not isinstance(llama_stack_client, LlamaStackAsLibraryClient): + pytest.skip("Library client required for local MCP server") + + test_toolgroup_id = "mcp::complex_invoke_flexible" + uri = mcp_server_with_complex_schemas["server_url"] + + try: + llama_stack_client.toolgroups.unregister(toolgroup_id=test_toolgroup_id) + except Exception: + pass + + llama_stack_client.toolgroups.register( + toolgroup_id=test_toolgroup_id, + provider_id="model-context-protocol", + mcp_endpoint=dict(uri=uri), + ) + + provider_data = {"mcp_headers": {uri: {"Authorization": f"Bearer {AUTH_TOKEN}"}}} + auth_headers = { + "X-LlamaStack-Provider-Data": json.dumps(provider_data), + } + + # List tools to populate the tool index + llama_stack_client.tool_runtime.list_tools( + tool_group_id=test_toolgroup_id, + extra_headers=auth_headers, + ) + + # Test with email format + result_email = llama_stack_client.tool_runtime.invoke_tool( + tool_name="flexible_contact", + kwargs={"contact_info": "user@example.com"}, + extra_headers=auth_headers, + ) + + assert result_email.error_message is None + + # Test with phone format + result_phone = llama_stack_client.tool_runtime.invoke_tool( + tool_name="flexible_contact", + kwargs={"contact_info": "+15551234567"}, + extra_headers=auth_headers, + ) + + assert result_phone.error_message is None + + +class TestAgentWithMCPTools: + """Test agents using MCP tools with complex schemas.""" + + @pytest.mark.skip(reason="we need tool call recording for this test since session_id is injected") + def test_agent_with_complex_mcp_tool(self, llama_stack_client, text_model_id, mcp_server_with_complex_schemas): + """Test agent can use MCP tools with $ref/$defs schemas.""" + if not isinstance(llama_stack_client, LlamaStackAsLibraryClient): + pytest.skip("Library client required for local MCP server") + + from llama_stack_client import Agent + + test_toolgroup_id = "mcp::complex_agent" + uri = mcp_server_with_complex_schemas["server_url"] + + try: + llama_stack_client.toolgroups.unregister(toolgroup_id=test_toolgroup_id) + except Exception: + pass + + llama_stack_client.toolgroups.register( + toolgroup_id=test_toolgroup_id, + provider_id="model-context-protocol", + mcp_endpoint=dict(uri=uri), + ) + + provider_data = {"mcp_headers": {uri: {"Authorization": f"Bearer {AUTH_TOKEN}"}}} + auth_headers = { + "X-LlamaStack-Provider-Data": json.dumps(provider_data), + } + + # Create agent with MCP tools + agent = Agent( + client=llama_stack_client, + model=text_model_id, + instructions="You are a helpful assistant that can process orders and book flights.", + tools=[test_toolgroup_id], + extra_headers=auth_headers, + ) + + session_id = agent.create_session("test-session-complex") + + # Ask agent to use a tool with complex schema + response = agent.create_turn( + session_id=session_id, + messages=[ + {"role": "user", "content": "Process an order with 2 widgets going to 123 Main St, San Francisco"} + ], + stream=False, + extra_headers=auth_headers, + ) + + steps = response.steps + + # Verify agent was able to call the tool + # (The LLM should have been able to understand the schema and formulate a valid call) + tool_execution_steps = [s for s in steps if s.step_type == "tool_execution"] + + # Agent might or might not call the tool depending on the model + # But if it does, there should be no errors + for step in tool_execution_steps: + if step.tool_responses: + for tool_response in step.tool_responses: + assert tool_response.content is not None diff --git a/tests/unit/distribution/routers/test_routing_tables.py b/tests/unit/distribution/routers/test_routing_tables.py index 456a5d041..54a9dd72e 100644 --- a/tests/unit/distribution/routers/test_routing_tables.py +++ b/tests/unit/distribution/routers/test_routing_tables.py @@ -16,7 +16,7 @@ from llama_stack.apis.datasets.datasets import Dataset, DatasetPurpose, URIDataS from llama_stack.apis.datatypes import Api from llama_stack.apis.models import Model, ModelType from llama_stack.apis.shields.shields import Shield -from llama_stack.apis.tools import ListToolDefsResponse, ToolDef, ToolGroup, ToolParameter +from llama_stack.apis.tools import ListToolDefsResponse, ToolDef, ToolGroup from llama_stack.apis.vector_dbs import VectorDB from llama_stack.core.datatypes import RegistryEntrySource from llama_stack.core.routing_tables.benchmarks import BenchmarksRoutingTable @@ -137,7 +137,10 @@ class ToolGroupsImpl(Impl): ToolDef( name="test-tool", description="Test tool", - parameters=[ToolParameter(name="test-param", description="Test param", parameter_type="string")], + input_schema={ + "type": "object", + "properties": {"test-param": {"type": "string", "description": "Test param"}}, + }, ) ] ) diff --git a/tests/unit/models/test_prompt_adapter.py b/tests/unit/models/test_prompt_adapter.py index 0362eb5dd..d31426135 100644 --- a/tests/unit/models/test_prompt_adapter.py +++ b/tests/unit/models/test_prompt_adapter.py @@ -18,7 +18,6 @@ from llama_stack.apis.inference import ( from llama_stack.models.llama.datatypes import ( BuiltinTool, ToolDefinition, - ToolParamDefinition, ToolPromptFormat, ) from llama_stack.providers.utils.inference.prompt_adapter import ( @@ -75,12 +74,15 @@ async def test_system_custom_only(): ToolDefinition( tool_name="custom1", description="custom1 tool", - parameters={ - "param1": ToolParamDefinition( - param_type="str", - description="param1 description", - required=True, - ), + input_schema={ + "type": "object", + "properties": { + "param1": { + "type": "str", + "description": "param1 description", + }, + }, + "required": ["param1"], }, ) ], @@ -107,12 +109,15 @@ async def test_system_custom_and_builtin(): ToolDefinition( tool_name="custom1", description="custom1 tool", - parameters={ - "param1": ToolParamDefinition( - param_type="str", - description="param1 description", - required=True, - ), + input_schema={ + "type": "object", + "properties": { + "param1": { + "type": "str", + "description": "param1 description", + }, + }, + "required": ["param1"], }, ), ], @@ -138,7 +143,7 @@ async def test_completion_message_encoding(): tool_calls=[ ToolCall( tool_name="custom1", - arguments={"param1": "value1"}, + arguments='{"param1": "value1"}', # arguments must be a JSON string call_id="123", ) ], @@ -148,12 +153,15 @@ async def test_completion_message_encoding(): ToolDefinition( tool_name="custom1", description="custom1 tool", - parameters={ - "param1": ToolParamDefinition( - param_type="str", - description="param1 description", - required=True, - ), + input_schema={ + "type": "object", + "properties": { + "param1": { + "type": "str", + "description": "param1 description", + }, + }, + "required": ["param1"], }, ), ], @@ -227,12 +235,15 @@ async def test_replace_system_message_behavior_custom_tools(): ToolDefinition( tool_name="custom1", description="custom1 tool", - parameters={ - "param1": ToolParamDefinition( - param_type="str", - description="param1 description", - required=True, - ), + input_schema={ + "type": "object", + "properties": { + "param1": { + "type": "str", + "description": "param1 description", + }, + }, + "required": ["param1"], }, ), ], @@ -264,12 +275,15 @@ async def test_replace_system_message_behavior_custom_tools_with_template(): ToolDefinition( tool_name="custom1", description="custom1 tool", - parameters={ - "param1": ToolParamDefinition( - param_type="str", - description="param1 description", - required=True, - ), + input_schema={ + "type": "object", + "properties": { + "param1": { + "type": "str", + "description": "param1 description", + }, + }, + "required": ["param1"], }, ), ], diff --git a/tests/unit/providers/agent/test_meta_reference_agent.py b/tests/unit/providers/agent/test_meta_reference_agent.py index 07e5aa79d..fdbb2b8e9 100644 --- a/tests/unit/providers/agent/test_meta_reference_agent.py +++ b/tests/unit/providers/agent/test_meta_reference_agent.py @@ -16,9 +16,8 @@ from llama_stack.apis.agents import ( ) from llama_stack.apis.common.responses import PaginatedResponse from llama_stack.apis.inference import Inference -from llama_stack.apis.resource import ResourceType from llama_stack.apis.safety import Safety -from llama_stack.apis.tools import ListToolsResponse, Tool, ToolGroups, ToolParameter, ToolRuntime +from llama_stack.apis.tools import ListToolDefsResponse, ToolDef, ToolGroups, ToolRuntime from llama_stack.apis.vector_io import VectorIO from llama_stack.providers.inline.agents.meta_reference.agent_instance import ChatAgent from llama_stack.providers.inline.agents.meta_reference.agents import MetaReferenceAgentsImpl @@ -232,32 +231,26 @@ async def test_delete_agent(agents_impl, sample_agent_config): async def test__initialize_tools(agents_impl, sample_agent_config): # Mock tool_groups_api.list_tools() - agents_impl.tool_groups_api.list_tools.return_value = ListToolsResponse( + agents_impl.tool_groups_api.list_tools.return_value = ListToolDefsResponse( data=[ - Tool( - identifier="story_maker", - provider_id="model-context-protocol", - type=ResourceType.tool, + ToolDef( + name="story_maker", toolgroup_id="mcp::my_mcp_server", description="Make a story", - parameters=[ - ToolParameter( - name="story_title", - parameter_type="string", - description="Title of the story", - required=True, - title="Story Title", - ), - ToolParameter( - name="input_words", - parameter_type="array", - description="Input words", - required=False, - items={"type": "string"}, - title="Input Words", - default=[], - ), - ], + input_schema={ + "type": "object", + "properties": { + "story_title": {"type": "string", "description": "Title of the story", "title": "Story Title"}, + "input_words": { + "type": "array", + "description": "Input words", + "items": {"type": "string"}, + "title": "Input Words", + "default": [], + }, + }, + "required": ["story_title"], + }, ) ] ) @@ -284,27 +277,27 @@ async def test__initialize_tools(agents_impl, sample_agent_config): assert second_tool.tool_name == "story_maker" assert second_tool.description == "Make a story" - parameters = second_tool.parameters - assert len(parameters) == 2 + # Verify the input schema + input_schema = second_tool.input_schema + assert input_schema is not None + assert input_schema["type"] == "object" + + properties = input_schema["properties"] + assert len(properties) == 2 # Verify a string property - story_title = parameters.get("story_title") - assert story_title is not None - assert story_title.param_type == "string" - assert story_title.description == "Title of the story" - assert story_title.required - assert story_title.items is None - assert story_title.title == "Story Title" - assert story_title.default is None + story_title = properties["story_title"] + assert story_title["type"] == "string" + assert story_title["description"] == "Title of the story" + assert story_title["title"] == "Story Title" # Verify an array property - input_words = parameters.get("input_words") - assert input_words is not None - assert input_words.param_type == "array" - assert input_words.description == "Input words" - assert not input_words.required - assert input_words.items is not None - assert len(input_words.items) == 1 - assert input_words.items.get("type") == "string" - assert input_words.title == "Input Words" - assert input_words.default == [] + input_words = properties["input_words"] + assert input_words["type"] == "array" + assert input_words["description"] == "Input words" + assert input_words["items"]["type"] == "string" + assert input_words["title"] == "Input Words" + assert input_words["default"] == [] + + # Verify required fields + assert input_schema["required"] == ["story_title"] diff --git a/tests/unit/providers/agents/meta_reference/test_openai_responses.py b/tests/unit/providers/agents/meta_reference/test_openai_responses.py index 5ddc1bda8..0b2e6ab82 100644 --- a/tests/unit/providers/agents/meta_reference/test_openai_responses.py +++ b/tests/unit/providers/agents/meta_reference/test_openai_responses.py @@ -39,7 +39,7 @@ from llama_stack.apis.inference import ( OpenAIResponseFormatJSONSchema, OpenAIUserMessageParam, ) -from llama_stack.apis.tools.tools import Tool, ToolGroups, ToolInvocationResult, ToolParameter, ToolRuntime +from llama_stack.apis.tools.tools import ToolDef, ToolGroups, ToolInvocationResult, ToolRuntime from llama_stack.core.access_control.access_control import default_policy from llama_stack.core.datatypes import ResponsesStoreConfig from llama_stack.providers.inline.agents.meta_reference.responses.openai_responses import ( @@ -186,14 +186,15 @@ async def test_create_openai_response_with_string_input_with_tools(openai_respon input_text = "What is the capital of Ireland?" model = "meta-llama/Llama-3.1-8B-Instruct" - openai_responses_impl.tool_groups_api.get_tool.return_value = Tool( - identifier="web_search", - provider_id="client", + openai_responses_impl.tool_groups_api.get_tool.return_value = ToolDef( + name="web_search", toolgroup_id="web_search", description="Search the web for information", - parameters=[ - ToolParameter(name="query", parameter_type="string", description="The query to search for", required=True) - ], + input_schema={ + "type": "object", + "properties": {"query": {"type": "string", "description": "The query to search for"}}, + "required": ["query"], + }, ) openai_responses_impl.tool_runtime_api.invoke_tool.return_value = ToolInvocationResult( diff --git a/tests/unit/providers/inference/test_remote_vllm.py b/tests/unit/providers/inference/test_remote_vllm.py index 4dc2e0c16..bb560d378 100644 --- a/tests/unit/providers/inference/test_remote_vllm.py +++ b/tests/unit/providers/inference/test_remote_vllm.py @@ -138,8 +138,7 @@ async def test_tool_call_response(vllm_inference_adapter): ToolCall( call_id="foo", tool_name="knowledge_search", - arguments={"query": "How many?"}, - arguments_json='{"query": "How many?"}', + arguments='{"query": "How many?"}', ) ], ), @@ -263,7 +262,7 @@ async def test_tool_call_delta_streaming_arguments_dict(): assert chunks[1].event.event_type.value == "progress" assert chunks[1].event.delta.type == "tool_call" assert chunks[1].event.delta.parse_status.value == "succeeded" - assert chunks[1].event.delta.tool_call.arguments_json == '{"number": 28, "power": 3}' + assert chunks[1].event.delta.tool_call.arguments == '{"number": 28, "power": 3}' assert chunks[2].event.event_type.value == "complete" @@ -339,11 +338,11 @@ async def test_multiple_tool_calls(): assert chunks[1].event.event_type.value == "progress" assert chunks[1].event.delta.type == "tool_call" assert chunks[1].event.delta.parse_status.value == "succeeded" - assert chunks[1].event.delta.tool_call.arguments_json == '{"number": 28, "power": 3}' + assert chunks[1].event.delta.tool_call.arguments == '{"number": 28, "power": 3}' assert chunks[2].event.event_type.value == "progress" assert chunks[2].event.delta.type == "tool_call" assert chunks[2].event.delta.parse_status.value == "succeeded" - assert chunks[2].event.delta.tool_call.arguments_json == '{"first_number": 4, "second_number": 7}' + assert chunks[2].event.delta.tool_call.arguments == '{"first_number": 4, "second_number": 7}' assert chunks[3].event.event_type.value == "complete" @@ -456,7 +455,7 @@ async def test_process_vllm_chat_completion_stream_response_tool_call_args_last_ assert chunks[-1].event.event_type == ChatCompletionResponseEventType.complete assert chunks[-2].event.delta.type == "tool_call" assert chunks[-2].event.delta.tool_call.tool_name == mock_tool_name - assert chunks[-2].event.delta.tool_call.arguments == mock_tool_arguments + assert chunks[-2].event.delta.tool_call.arguments == mock_tool_arguments_str async def test_process_vllm_chat_completion_stream_response_no_finish_reason(): @@ -468,7 +467,7 @@ async def test_process_vllm_chat_completion_stream_response_no_finish_reason(): mock_tool_name = "mock_tool" mock_tool_arguments = {"arg1": 0, "arg2": 100} - mock_tool_arguments_str = '"{\\"arg1\\": 0, \\"arg2\\": 100}"' + mock_tool_arguments_str = json.dumps(mock_tool_arguments) async def mock_stream(): mock_chunks = [ @@ -508,7 +507,7 @@ async def test_process_vllm_chat_completion_stream_response_no_finish_reason(): assert chunks[-1].event.event_type == ChatCompletionResponseEventType.complete assert chunks[-2].event.delta.type == "tool_call" assert chunks[-2].event.delta.tool_call.tool_name == mock_tool_name - assert chunks[-2].event.delta.tool_call.arguments == mock_tool_arguments + assert chunks[-2].event.delta.tool_call.arguments == mock_tool_arguments_str async def test_process_vllm_chat_completion_stream_response_tool_without_args(): @@ -556,7 +555,7 @@ async def test_process_vllm_chat_completion_stream_response_tool_without_args(): assert chunks[-1].event.event_type == ChatCompletionResponseEventType.complete assert chunks[-2].event.delta.type == "tool_call" assert chunks[-2].event.delta.tool_call.tool_name == mock_tool_name - assert chunks[-2].event.delta.tool_call.arguments == {} + assert chunks[-2].event.delta.tool_call.arguments == "{}" async def test_health_status_success(vllm_inference_adapter): diff --git a/tests/unit/providers/inline/agents/meta_reference/responses/test_streaming.py b/tests/unit/providers/inline/agents/meta_reference/responses/test_streaming.py index 6fda2b508..4b706717d 100644 --- a/tests/unit/providers/inline/agents/meta_reference/responses/test_streaming.py +++ b/tests/unit/providers/inline/agents/meta_reference/responses/test_streaming.py @@ -4,7 +4,7 @@ # This source code is licensed under the terms described in the LICENSE file in # the root directory of this source tree. -from llama_stack.apis.tools import ToolDef, ToolParameter +from llama_stack.apis.tools import ToolDef from llama_stack.providers.inline.agents.meta_reference.responses.streaming import ( convert_tooldef_to_chat_tool, ) @@ -20,15 +20,11 @@ def test_convert_tooldef_to_chat_tool_preserves_items_field(): tool_def = ToolDef( name="test_tool", description="A test tool with array parameter", - parameters=[ - ToolParameter( - name="tags", - parameter_type="array", - description="List of tags", - required=True, - items={"type": "string"}, - ) - ], + input_schema={ + "type": "object", + "properties": {"tags": {"type": "array", "description": "List of tags", "items": {"type": "string"}}}, + "required": ["tags"], + }, ) result = convert_tooldef_to_chat_tool(tool_def) diff --git a/tests/unit/providers/utils/inference/test_openai_compat.py b/tests/unit/providers/utils/inference/test_openai_compat.py index ddc70e102..c200c4395 100644 --- a/tests/unit/providers/utils/inference/test_openai_compat.py +++ b/tests/unit/providers/utils/inference/test_openai_compat.py @@ -41,9 +41,7 @@ async def test_convert_message_to_openai_dict(): async def test_convert_message_to_openai_dict_with_tool_call(): message = CompletionMessage( content="", - tool_calls=[ - ToolCall(call_id="123", tool_name="test_tool", arguments_json='{"foo": "bar"}', arguments={"foo": "bar"}) - ], + tool_calls=[ToolCall(call_id="123", tool_name="test_tool", arguments='{"foo": "bar"}')], stop_reason=StopReason.end_of_turn, ) @@ -65,8 +63,7 @@ async def test_convert_message_to_openai_dict_with_builtin_tool_call(): ToolCall( call_id="123", tool_name=BuiltinTool.brave_search, - arguments_json='{"foo": "bar"}', - arguments={"foo": "bar"}, + arguments='{"foo": "bar"}', ) ], stop_reason=StopReason.end_of_turn, @@ -202,8 +199,7 @@ async def test_convert_message_to_openai_dict_new_completion_message_with_tool_c ToolCall( call_id="call_123", tool_name="get_weather", - arguments={"city": "Sligo"}, - arguments_json='{"city": "Sligo"}', + arguments='{"city": "Sligo"}', ) ], stop_reason=StopReason.end_of_turn, diff --git a/tests/unit/providers/utils/test_openai_compat_conversion.py b/tests/unit/providers/utils/test_openai_compat_conversion.py new file mode 100644 index 000000000..2681068f1 --- /dev/null +++ b/tests/unit/providers/utils/test_openai_compat_conversion.py @@ -0,0 +1,381 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + +""" +Unit tests for OpenAI compatibility tool conversion. +Tests convert_tooldef_to_openai_tool with new JSON Schema approach. +""" + +from llama_stack.models.llama.datatypes import BuiltinTool, ToolDefinition +from llama_stack.providers.utils.inference.openai_compat import convert_tooldef_to_openai_tool + + +class TestSimpleSchemaConversion: + """Test basic schema conversions to OpenAI format.""" + + def test_simple_tool_conversion(self): + """Test conversion of simple tool with basic input schema.""" + tool = ToolDefinition( + tool_name="get_weather", + description="Get weather information", + input_schema={ + "type": "object", + "properties": {"location": {"type": "string", "description": "City name"}}, + "required": ["location"], + }, + ) + + result = convert_tooldef_to_openai_tool(tool) + + # Check OpenAI structure + assert result["type"] == "function" + assert "function" in result + + function = result["function"] + assert function["name"] == "get_weather" + assert function["description"] == "Get weather information" + + # Check parameters are passed through + assert "parameters" in function + assert function["parameters"] == tool.input_schema + assert function["parameters"]["type"] == "object" + assert "location" in function["parameters"]["properties"] + + def test_tool_without_description(self): + """Test tool conversion without description.""" + tool = ToolDefinition(tool_name="test_tool", input_schema={"type": "object", "properties": {}}) + + result = convert_tooldef_to_openai_tool(tool) + + assert result["function"]["name"] == "test_tool" + assert "description" not in result["function"] + assert "parameters" in result["function"] + + def test_builtin_tool_conversion(self): + """Test conversion of BuiltinTool enum.""" + tool = ToolDefinition( + tool_name=BuiltinTool.code_interpreter, + description="Run Python code", + input_schema={"type": "object", "properties": {"code": {"type": "string"}}}, + ) + + result = convert_tooldef_to_openai_tool(tool) + + # BuiltinTool should be converted to its value + assert result["function"]["name"] == "code_interpreter" + + +class TestComplexSchemaConversion: + """Test conversion of complex JSON Schema features.""" + + def test_schema_with_refs_and_defs(self): + """Test that $ref and $defs are passed through to OpenAI.""" + tool = ToolDefinition( + tool_name="book_flight", + description="Book a flight", + input_schema={ + "type": "object", + "properties": { + "flight": {"$ref": "#/$defs/FlightInfo"}, + "passengers": {"type": "array", "items": {"$ref": "#/$defs/Passenger"}}, + "payment": {"$ref": "#/$defs/Payment"}, + }, + "required": ["flight", "passengers", "payment"], + "$defs": { + "FlightInfo": { + "type": "object", + "properties": { + "from": {"type": "string", "description": "Departure airport"}, + "to": {"type": "string", "description": "Arrival airport"}, + "date": {"type": "string", "format": "date"}, + }, + "required": ["from", "to", "date"], + }, + "Passenger": { + "type": "object", + "properties": {"name": {"type": "string"}, "age": {"type": "integer", "minimum": 0}}, + "required": ["name", "age"], + }, + "Payment": { + "type": "object", + "properties": { + "method": {"type": "string", "enum": ["credit_card", "debit_card"]}, + "amount": {"type": "number", "minimum": 0}, + }, + }, + }, + }, + ) + + result = convert_tooldef_to_openai_tool(tool) + + params = result["function"]["parameters"] + + # Verify $defs are preserved + assert "$defs" in params + assert "FlightInfo" in params["$defs"] + assert "Passenger" in params["$defs"] + assert "Payment" in params["$defs"] + + # Verify $ref are preserved + assert params["properties"]["flight"]["$ref"] == "#/$defs/FlightInfo" + assert params["properties"]["passengers"]["items"]["$ref"] == "#/$defs/Passenger" + assert params["properties"]["payment"]["$ref"] == "#/$defs/Payment" + + # Verify nested schema details are preserved + assert params["$defs"]["FlightInfo"]["properties"]["date"]["format"] == "date" + assert params["$defs"]["Passenger"]["properties"]["age"]["minimum"] == 0 + assert params["$defs"]["Payment"]["properties"]["method"]["enum"] == ["credit_card", "debit_card"] + + def test_anyof_schema_conversion(self): + """Test conversion of anyOf schemas.""" + tool = ToolDefinition( + tool_name="flexible_input", + input_schema={ + "type": "object", + "properties": { + "contact": { + "anyOf": [ + {"type": "string", "format": "email"}, + {"type": "string", "pattern": "^\\+?[0-9]{10,15}$"}, + ], + "description": "Email or phone number", + } + }, + }, + ) + + result = convert_tooldef_to_openai_tool(tool) + + contact_schema = result["function"]["parameters"]["properties"]["contact"] + assert "anyOf" in contact_schema + assert len(contact_schema["anyOf"]) == 2 + assert contact_schema["anyOf"][0]["format"] == "email" + assert "pattern" in contact_schema["anyOf"][1] + + def test_nested_objects_conversion(self): + """Test conversion of deeply nested objects.""" + tool = ToolDefinition( + tool_name="nested_data", + input_schema={ + "type": "object", + "properties": { + "user": { + "type": "object", + "properties": { + "profile": { + "type": "object", + "properties": { + "name": {"type": "string"}, + "settings": { + "type": "object", + "properties": {"theme": {"type": "string", "enum": ["light", "dark"]}}, + }, + }, + } + }, + } + }, + }, + ) + + result = convert_tooldef_to_openai_tool(tool) + + # Navigate deep structure + user_schema = result["function"]["parameters"]["properties"]["user"] + profile_schema = user_schema["properties"]["profile"] + settings_schema = profile_schema["properties"]["settings"] + theme_schema = settings_schema["properties"]["theme"] + + assert theme_schema["enum"] == ["light", "dark"] + + def test_array_schemas_with_constraints(self): + """Test conversion of array schemas with constraints.""" + tool = ToolDefinition( + tool_name="list_processor", + input_schema={ + "type": "object", + "properties": { + "items": { + "type": "array", + "items": { + "type": "object", + "properties": {"id": {"type": "integer"}, "name": {"type": "string"}}, + "required": ["id"], + }, + "minItems": 1, + "maxItems": 100, + "uniqueItems": True, + } + }, + }, + ) + + result = convert_tooldef_to_openai_tool(tool) + + items_schema = result["function"]["parameters"]["properties"]["items"] + assert items_schema["type"] == "array" + assert items_schema["minItems"] == 1 + assert items_schema["maxItems"] == 100 + assert items_schema["uniqueItems"] is True + assert items_schema["items"]["type"] == "object" + + +class TestOutputSchemaHandling: + """Test that output_schema is correctly handled (or dropped) for OpenAI.""" + + def test_output_schema_is_dropped(self): + """Test that output_schema is NOT included in OpenAI format (API limitation).""" + tool = ToolDefinition( + tool_name="calculator", + description="Perform calculation", + input_schema={"type": "object", "properties": {"x": {"type": "number"}, "y": {"type": "number"}}}, + output_schema={"type": "object", "properties": {"result": {"type": "number"}}, "required": ["result"]}, + ) + + result = convert_tooldef_to_openai_tool(tool) + + # OpenAI doesn't support output schema + assert "outputSchema" not in result["function"] + assert "responseSchema" not in result["function"] + assert "output_schema" not in result["function"] + + # But input schema should be present + assert "parameters" in result["function"] + assert result["function"]["parameters"] == tool.input_schema + + def test_only_output_schema_no_input(self): + """Test tool with only output_schema (unusual but valid).""" + tool = ToolDefinition( + tool_name="no_input_tool", + description="Tool with no inputs", + output_schema={"type": "object", "properties": {"timestamp": {"type": "string"}}}, + ) + + result = convert_tooldef_to_openai_tool(tool) + + # No parameters should be set if input_schema is None + # (or we might set an empty object schema - implementation detail) + assert "outputSchema" not in result["function"] + + +class TestEdgeCases: + """Test edge cases and error conditions.""" + + def test_tool_with_no_schemas(self): + """Test tool with neither input nor output schema.""" + tool = ToolDefinition(tool_name="schemaless_tool", description="Tool without schemas") + + result = convert_tooldef_to_openai_tool(tool) + + assert result["function"]["name"] == "schemaless_tool" + assert result["function"]["description"] == "Tool without schemas" + # Implementation detail: might have no parameters or empty object + + def test_empty_input_schema(self): + """Test tool with empty object schema.""" + tool = ToolDefinition(tool_name="no_params", input_schema={"type": "object", "properties": {}}) + + result = convert_tooldef_to_openai_tool(tool) + + assert result["function"]["parameters"]["type"] == "object" + assert result["function"]["parameters"]["properties"] == {} + + def test_schema_with_additional_properties(self): + """Test that additionalProperties is preserved.""" + tool = ToolDefinition( + tool_name="flexible_tool", + input_schema={ + "type": "object", + "properties": {"known_field": {"type": "string"}}, + "additionalProperties": True, + }, + ) + + result = convert_tooldef_to_openai_tool(tool) + + assert result["function"]["parameters"]["additionalProperties"] is True + + def test_schema_with_pattern_properties(self): + """Test that patternProperties is preserved.""" + tool = ToolDefinition( + tool_name="pattern_tool", + input_schema={"type": "object", "patternProperties": {"^[a-z]+$": {"type": "string"}}}, + ) + + result = convert_tooldef_to_openai_tool(tool) + + assert "patternProperties" in result["function"]["parameters"] + + def test_schema_identity(self): + """Test that converted schema is identical to input (no lossy conversion).""" + original_schema = { + "type": "object", + "properties": {"complex": {"$ref": "#/$defs/Complex"}}, + "$defs": { + "Complex": { + "type": "object", + "properties": {"nested": {"anyOf": [{"type": "string"}, {"type": "number"}]}}, + } + }, + "required": ["complex"], + "additionalProperties": False, + } + + tool = ToolDefinition(tool_name="test", input_schema=original_schema) + + result = convert_tooldef_to_openai_tool(tool) + + # Converted parameters should be EXACTLY the same as input + assert result["function"]["parameters"] == original_schema + + +class TestConversionConsistency: + """Test consistency across multiple conversions.""" + + def test_multiple_tools_with_shared_defs(self): + """Test converting multiple tools that could share definitions.""" + tool1 = ToolDefinition( + tool_name="tool1", + input_schema={ + "type": "object", + "properties": {"data": {"$ref": "#/$defs/Data"}}, + "$defs": {"Data": {"type": "object", "properties": {"x": {"type": "number"}}}}, + }, + ) + + tool2 = ToolDefinition( + tool_name="tool2", + input_schema={ + "type": "object", + "properties": {"info": {"$ref": "#/$defs/Data"}}, + "$defs": {"Data": {"type": "object", "properties": {"y": {"type": "string"}}}}, + }, + ) + + result1 = convert_tooldef_to_openai_tool(tool1) + result2 = convert_tooldef_to_openai_tool(tool2) + + # Each tool maintains its own $defs independently + assert result1["function"]["parameters"]["$defs"]["Data"]["properties"]["x"]["type"] == "number" + assert result2["function"]["parameters"]["$defs"]["Data"]["properties"]["y"]["type"] == "string" + + def test_conversion_is_pure(self): + """Test that conversion doesn't modify the original tool.""" + original_schema = { + "type": "object", + "properties": {"x": {"type": "string"}}, + "$defs": {"T": {"type": "number"}}, + } + + tool = ToolDefinition(tool_name="test", input_schema=original_schema.copy()) + + # Convert + convert_tooldef_to_openai_tool(tool) + + # Original tool should be unchanged + assert tool.input_schema == original_schema + assert "$defs" in tool.input_schema diff --git a/tests/unit/tools/test_tools_json_schema.py b/tests/unit/tools/test_tools_json_schema.py new file mode 100644 index 000000000..8fe3103bc --- /dev/null +++ b/tests/unit/tools/test_tools_json_schema.py @@ -0,0 +1,297 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + +""" +Unit tests for JSON Schema-based tool definitions. +Tests the new input_schema and output_schema fields. +""" + +from pydantic import ValidationError + +from llama_stack.apis.tools import ToolDef +from llama_stack.models.llama.datatypes import BuiltinTool, ToolDefinition + + +class TestToolDefValidation: + """Test ToolDef validation with JSON Schema.""" + + def test_simple_input_schema(self): + """Test ToolDef with simple input schema.""" + tool = ToolDef( + name="get_weather", + description="Get weather information", + input_schema={ + "type": "object", + "properties": {"location": {"type": "string", "description": "City name"}}, + "required": ["location"], + }, + ) + + assert tool.name == "get_weather" + assert tool.input_schema["type"] == "object" + assert "location" in tool.input_schema["properties"] + assert tool.output_schema is None + + def test_input_and_output_schema(self): + """Test ToolDef with both input and output schemas.""" + tool = ToolDef( + name="calculate", + description="Perform calculation", + input_schema={ + "type": "object", + "properties": {"x": {"type": "number"}, "y": {"type": "number"}}, + "required": ["x", "y"], + }, + output_schema={"type": "object", "properties": {"result": {"type": "number"}}, "required": ["result"]}, + ) + + assert tool.input_schema is not None + assert tool.output_schema is not None + assert "result" in tool.output_schema["properties"] + + def test_schema_with_refs_and_defs(self): + """Test that $ref and $defs are preserved in schemas.""" + tool = ToolDef( + name="book_flight", + description="Book a flight", + input_schema={ + "type": "object", + "properties": { + "flight": {"$ref": "#/$defs/FlightInfo"}, + "passengers": {"type": "array", "items": {"$ref": "#/$defs/Passenger"}}, + }, + "$defs": { + "FlightInfo": { + "type": "object", + "properties": {"from": {"type": "string"}, "to": {"type": "string"}}, + }, + "Passenger": { + "type": "object", + "properties": {"name": {"type": "string"}, "age": {"type": "integer"}}, + }, + }, + }, + ) + + # Verify $defs are preserved + assert "$defs" in tool.input_schema + assert "FlightInfo" in tool.input_schema["$defs"] + assert "Passenger" in tool.input_schema["$defs"] + + # Verify $ref are preserved + assert tool.input_schema["properties"]["flight"]["$ref"] == "#/$defs/FlightInfo" + assert tool.input_schema["properties"]["passengers"]["items"]["$ref"] == "#/$defs/Passenger" + + def test_output_schema_with_refs(self): + """Test that output_schema also supports $ref and $defs.""" + tool = ToolDef( + name="search", + description="Search for items", + input_schema={"type": "object", "properties": {"query": {"type": "string"}}}, + output_schema={ + "type": "object", + "properties": {"results": {"type": "array", "items": {"$ref": "#/$defs/SearchResult"}}}, + "$defs": { + "SearchResult": { + "type": "object", + "properties": {"title": {"type": "string"}, "score": {"type": "number"}}, + } + }, + }, + ) + + assert "$defs" in tool.output_schema + assert "SearchResult" in tool.output_schema["$defs"] + + def test_complex_json_schema_features(self): + """Test various JSON Schema features are preserved.""" + tool = ToolDef( + name="complex_tool", + description="Tool with complex schema", + input_schema={ + "type": "object", + "properties": { + # anyOf + "contact": { + "anyOf": [ + {"type": "string", "format": "email"}, + {"type": "string", "pattern": "^\\+?[0-9]{10,15}$"}, + ] + }, + # enum + "status": {"type": "string", "enum": ["pending", "approved", "rejected"]}, + # nested objects + "address": { + "type": "object", + "properties": { + "street": {"type": "string"}, + "city": {"type": "string"}, + "zipcode": {"type": "string", "pattern": "^[0-9]{5}$"}, + }, + "required": ["street", "city"], + }, + # array with constraints + "tags": { + "type": "array", + "items": {"type": "string"}, + "minItems": 1, + "maxItems": 10, + "uniqueItems": True, + }, + }, + }, + ) + + # Verify anyOf + assert "anyOf" in tool.input_schema["properties"]["contact"] + + # Verify enum + assert tool.input_schema["properties"]["status"]["enum"] == ["pending", "approved", "rejected"] + + # Verify nested object + assert tool.input_schema["properties"]["address"]["type"] == "object" + assert "zipcode" in tool.input_schema["properties"]["address"]["properties"] + + # Verify array constraints + tags_schema = tool.input_schema["properties"]["tags"] + assert tags_schema["minItems"] == 1 + assert tags_schema["maxItems"] == 10 + assert tags_schema["uniqueItems"] is True + + def test_invalid_json_schema_raises_error(self): + """Test that invalid JSON Schema raises validation error.""" + # TODO: This test will pass once we add schema validation + # For now, Pydantic accepts any dict, so this is a placeholder + + # This should eventually raise an error due to invalid schema + try: + ToolDef( + name="bad_tool", + input_schema={ + "type": "invalid_type", # Not a valid JSON Schema type + "properties": "not_an_object", # Should be an object + }, + ) + # For now this passes, but shouldn't after we add validation + except ValidationError: + pass # Expected once validation is added + + +class TestToolDefinitionValidation: + """Test ToolDefinition (internal) validation with JSON Schema.""" + + def test_simple_tool_definition(self): + """Test ToolDefinition with simple schema.""" + tool = ToolDefinition( + tool_name="get_time", + description="Get current time", + input_schema={"type": "object", "properties": {"timezone": {"type": "string"}}}, + ) + + assert tool.tool_name == "get_time" + assert tool.input_schema is not None + + def test_builtin_tool_with_schema(self): + """Test ToolDefinition with BuiltinTool enum.""" + tool = ToolDefinition( + tool_name=BuiltinTool.code_interpreter, + description="Run Python code", + input_schema={"type": "object", "properties": {"code": {"type": "string"}}, "required": ["code"]}, + output_schema={"type": "object", "properties": {"output": {"type": "string"}, "error": {"type": "string"}}}, + ) + + assert isinstance(tool.tool_name, BuiltinTool) + assert tool.input_schema is not None + assert tool.output_schema is not None + + def test_tool_definition_with_refs(self): + """Test ToolDefinition preserves $ref/$defs.""" + tool = ToolDefinition( + tool_name="process_data", + input_schema={ + "type": "object", + "properties": {"data": {"$ref": "#/$defs/DataObject"}}, + "$defs": { + "DataObject": { + "type": "object", + "properties": { + "id": {"type": "integer"}, + "values": {"type": "array", "items": {"type": "number"}}, + }, + } + }, + }, + ) + + assert "$defs" in tool.input_schema + assert tool.input_schema["properties"]["data"]["$ref"] == "#/$defs/DataObject" + + +class TestSchemaEquivalence: + """Test that schemas remain unchanged through serialization.""" + + def test_schema_roundtrip(self): + """Test that schemas survive model_dump/model_validate roundtrip.""" + original = ToolDef( + name="test", + input_schema={ + "type": "object", + "properties": {"x": {"$ref": "#/$defs/X"}}, + "$defs": {"X": {"type": "string"}}, + }, + ) + + # Serialize and deserialize + dumped = original.model_dump() + restored = ToolDef(**dumped) + + # Schemas should be identical + assert restored.input_schema == original.input_schema + assert "$defs" in restored.input_schema + assert restored.input_schema["properties"]["x"]["$ref"] == "#/$defs/X" + + def test_json_serialization(self): + """Test JSON serialization preserves schema.""" + import json + + tool = ToolDef( + name="test", + input_schema={ + "type": "object", + "properties": {"a": {"type": "string"}}, + "$defs": {"T": {"type": "number"}}, + }, + output_schema={"type": "object", "properties": {"b": {"$ref": "#/$defs/T"}}}, + ) + + # Serialize to JSON and back + json_str = tool.model_dump_json() + parsed = json.loads(json_str) + restored = ToolDef(**parsed) + + assert restored.input_schema == tool.input_schema + assert restored.output_schema == tool.output_schema + assert "$defs" in restored.input_schema + + +class TestBackwardsCompatibility: + """Test handling of legacy code patterns.""" + + def test_none_schemas(self): + """Test tools with no schemas (legacy case).""" + tool = ToolDef(name="legacy_tool", description="Tool without schemas", input_schema=None, output_schema=None) + + assert tool.input_schema is None + assert tool.output_schema is None + + def test_metadata_preserved(self): + """Test that metadata field still works.""" + tool = ToolDef( + name="test", input_schema={"type": "object"}, metadata={"endpoint": "http://example.com", "version": "1.0"} + ) + + assert tool.metadata["endpoint"] == "http://example.com" + assert tool.metadata["version"] == "1.0" From cf422da8253b023be532ce9e9e183ef1c32fc599 Mon Sep 17 00:00:00 2001 From: ehhuang Date: Thu, 2 Oct 2025 16:01:08 -0700 Subject: [PATCH 12/13] fix: responses <> chat completion input conversion (#3645) # What does this PR do? closes #3268 closes #3498 When resuming from previous response ID, currently we attempt to convert from the stored responses input to chat completion messages, which is not always possible, e.g. for tool calls where some data is lost once converted from chat completion message to repsonses input format. This PR stores the chat completion messages that correspond to the _last_ call to chat completion, which is sufficient to be resumed from in the next responses API call, where we load these saved messages and skip conversion entirely. Separate issue to optimize storage: https://github.com/llamastack/llama-stack/issues/3646 ## Test Plan existing CI tests --- llama_stack/apis/agents/openai_responses.py | 4 ++ .../responses/openai_responses.py | 70 +++++++++++++------ .../meta_reference/responses/streaming.py | 5 ++ .../utils/responses/responses_store.py | 47 +++++++++---- .../responses/test_tool_responses.py | 64 +++++++++++++++++ .../meta_reference/test_openai_responses.py | 43 +++++++----- .../utils/responses/test_responses_store.py | 27 +++++-- 7 files changed, 202 insertions(+), 58 deletions(-) diff --git a/llama_stack/apis/agents/openai_responses.py b/llama_stack/apis/agents/openai_responses.py index 190e35fd0..0f3511ea3 100644 --- a/llama_stack/apis/agents/openai_responses.py +++ b/llama_stack/apis/agents/openai_responses.py @@ -888,6 +888,10 @@ class OpenAIResponseObjectWithInput(OpenAIResponseObject): input: list[OpenAIResponseInput] + def to_response_object(self) -> OpenAIResponseObject: + """Convert to OpenAIResponseObject by excluding input field.""" + return OpenAIResponseObject(**{k: v for k, v in self.model_dump().items() if k != "input"}) + @json_schema_type class ListOpenAIResponseObject(BaseModel): diff --git a/llama_stack/providers/inline/agents/meta_reference/responses/openai_responses.py b/llama_stack/providers/inline/agents/meta_reference/responses/openai_responses.py index 1a6d75710..352be3ded 100644 --- a/llama_stack/providers/inline/agents/meta_reference/responses/openai_responses.py +++ b/llama_stack/providers/inline/agents/meta_reference/responses/openai_responses.py @@ -8,7 +8,7 @@ import time import uuid from collections.abc import AsyncIterator -from pydantic import BaseModel +from pydantic import BaseModel, TypeAdapter from llama_stack.apis.agents import Order from llama_stack.apis.agents.openai_responses import ( @@ -26,12 +26,16 @@ from llama_stack.apis.agents.openai_responses import ( ) from llama_stack.apis.inference import ( Inference, + OpenAIMessageParam, OpenAISystemMessageParam, ) from llama_stack.apis.tools import ToolGroups, ToolRuntime from llama_stack.apis.vector_io import VectorIO from llama_stack.log import get_logger -from llama_stack.providers.utils.responses.responses_store import ResponsesStore +from llama_stack.providers.utils.responses.responses_store import ( + ResponsesStore, + _OpenAIResponseObjectWithInputAndMessages, +) from .streaming import StreamingResponseOrchestrator from .tool_executor import ToolExecutor @@ -72,26 +76,48 @@ class OpenAIResponsesImpl: async def _prepend_previous_response( self, input: str | list[OpenAIResponseInput], - previous_response_id: str | None = None, + previous_response: _OpenAIResponseObjectWithInputAndMessages, ): + new_input_items = previous_response.input.copy() + new_input_items.extend(previous_response.output) + + if isinstance(input, str): + new_input_items.append(OpenAIResponseMessage(content=input, role="user")) + else: + new_input_items.extend(input) + + return new_input_items + + async def _process_input_with_previous_response( + self, + input: str | list[OpenAIResponseInput], + previous_response_id: str | None, + ) -> tuple[str | list[OpenAIResponseInput], list[OpenAIMessageParam]]: + """Process input with optional previous response context. + + Returns: + tuple: (all_input for storage, messages for chat completion) + """ if previous_response_id: - previous_response_with_input = await self.responses_store.get_response_object(previous_response_id) + previous_response: _OpenAIResponseObjectWithInputAndMessages = ( + await self.responses_store.get_response_object(previous_response_id) + ) + all_input = await self._prepend_previous_response(input, previous_response) - # previous response input items - new_input_items = previous_response_with_input.input - - # previous response output items - new_input_items.extend(previous_response_with_input.output) - - # new input items from the current request - if isinstance(input, str): - new_input_items.append(OpenAIResponseMessage(content=input, role="user")) + if previous_response.messages: + # Use stored messages directly and convert only new input + message_adapter = TypeAdapter(list[OpenAIMessageParam]) + messages = message_adapter.validate_python(previous_response.messages) + new_messages = await convert_response_input_to_chat_messages(input) + messages.extend(new_messages) else: - new_input_items.extend(input) + # Backward compatibility: reconstruct from inputs + messages = await convert_response_input_to_chat_messages(all_input) + else: + all_input = input + messages = await convert_response_input_to_chat_messages(input) - input = new_input_items - - return input + return all_input, messages async def _prepend_instructions(self, messages, instructions): if instructions: @@ -102,7 +128,7 @@ class OpenAIResponsesImpl: response_id: str, ) -> OpenAIResponseObject: response_with_input = await self.responses_store.get_response_object(response_id) - return OpenAIResponseObject(**{k: v for k, v in response_with_input.model_dump().items() if k != "input"}) + return response_with_input.to_response_object() async def list_openai_responses( self, @@ -138,6 +164,7 @@ class OpenAIResponsesImpl: self, response: OpenAIResponseObject, input: str | list[OpenAIResponseInput], + messages: list[OpenAIMessageParam], ) -> None: new_input_id = f"msg_{uuid.uuid4()}" if isinstance(input, str): @@ -165,6 +192,7 @@ class OpenAIResponsesImpl: await self.responses_store.store_response_object( response_object=response, input=input_items_data, + messages=messages, ) async def create_openai_response( @@ -224,8 +252,7 @@ class OpenAIResponsesImpl: max_infer_iters: int | None = 10, ) -> AsyncIterator[OpenAIResponseObjectStream]: # Input preprocessing - input = await self._prepend_previous_response(input, previous_response_id) - messages = await convert_response_input_to_chat_messages(input) + all_input, messages = await self._process_input_with_previous_response(input, previous_response_id) await self._prepend_instructions(messages, instructions) # Structured outputs @@ -265,7 +292,8 @@ class OpenAIResponsesImpl: if store and final_response: await self._store_response( response=final_response, - input=input, + input=all_input, + messages=orchestrator.final_messages, ) async def delete_openai_response(self, response_id: str) -> OpenAIDeleteResponseObject: diff --git a/llama_stack/providers/inline/agents/meta_reference/responses/streaming.py b/llama_stack/providers/inline/agents/meta_reference/responses/streaming.py index 732ad708e..0bb524f5c 100644 --- a/llama_stack/providers/inline/agents/meta_reference/responses/streaming.py +++ b/llama_stack/providers/inline/agents/meta_reference/responses/streaming.py @@ -43,6 +43,7 @@ from llama_stack.apis.inference import ( OpenAIChatCompletion, OpenAIChatCompletionToolCall, OpenAIChoice, + OpenAIMessageParam, ) from llama_stack.log import get_logger @@ -94,6 +95,8 @@ class StreamingResponseOrchestrator: self.sequence_number = 0 # Store MCP tool mapping that gets built during tool processing self.mcp_tool_to_server: dict[str, OpenAIResponseInputToolMCP] = {} + # Track final messages after all tool executions + self.final_messages: list[OpenAIMessageParam] = [] async def create_response(self) -> AsyncIterator[OpenAIResponseObjectStream]: # Initialize output messages @@ -183,6 +186,8 @@ class StreamingResponseOrchestrator: messages = next_turn_messages + self.final_messages = messages.copy() + [current_response.choices[0].message] + # Create final response final_response = OpenAIResponseObject( created_at=self.created_at, diff --git a/llama_stack/providers/utils/responses/responses_store.py b/llama_stack/providers/utils/responses/responses_store.py index cb665b88e..e610a1ba2 100644 --- a/llama_stack/providers/utils/responses/responses_store.py +++ b/llama_stack/providers/utils/responses/responses_store.py @@ -17,6 +17,7 @@ from llama_stack.apis.agents.openai_responses import ( OpenAIResponseObject, OpenAIResponseObjectWithInput, ) +from llama_stack.apis.inference import OpenAIMessageParam from llama_stack.core.datatypes import AccessRule, ResponsesStoreConfig from llama_stack.core.utils.config_dirs import RUNTIME_BASE_DIR from llama_stack.log import get_logger @@ -28,6 +29,19 @@ from ..sqlstore.sqlstore import SqliteSqlStoreConfig, SqlStoreConfig, SqlStoreTy logger = get_logger(name=__name__, category="openai_responses") +class _OpenAIResponseObjectWithInputAndMessages(OpenAIResponseObjectWithInput): + """Internal class for storing responses with chat completion messages. + + This extends the public OpenAIResponseObjectWithInput with messages field + for internal storage. The messages field is not exposed in the public API. + + The messages field is optional for backward compatibility with responses + stored before this feature was added. + """ + + messages: list[OpenAIMessageParam] | None = None + + class ResponsesStore: def __init__( self, @@ -54,7 +68,9 @@ class ResponsesStore: self.enable_write_queue = self.sql_store_config.type != SqlStoreType.sqlite # Async write queue and worker control - self._queue: asyncio.Queue[tuple[OpenAIResponseObject, list[OpenAIResponseInput]]] | None = None + self._queue: ( + asyncio.Queue[tuple[OpenAIResponseObject, list[OpenAIResponseInput], list[OpenAIMessageParam]]] | None + ) = None self._worker_tasks: list[asyncio.Task[Any]] = [] self._max_write_queue_size: int = config.max_write_queue_size self._num_writers: int = max(1, config.num_writers) @@ -100,18 +116,21 @@ class ResponsesStore: await self._queue.join() async def store_response_object( - self, response_object: OpenAIResponseObject, input: list[OpenAIResponseInput] + self, + response_object: OpenAIResponseObject, + input: list[OpenAIResponseInput], + messages: list[OpenAIMessageParam], ) -> None: if self.enable_write_queue: if self._queue is None: raise ValueError("Responses store is not initialized") try: - self._queue.put_nowait((response_object, input)) + self._queue.put_nowait((response_object, input, messages)) except asyncio.QueueFull: logger.warning(f"Write queue full; adding response id={getattr(response_object, 'id', '')}") - await self._queue.put((response_object, input)) + await self._queue.put((response_object, input, messages)) else: - await self._write_response_object(response_object, input) + await self._write_response_object(response_object, input, messages) async def _worker_loop(self) -> None: assert self._queue is not None @@ -120,22 +139,26 @@ class ResponsesStore: item = await self._queue.get() except asyncio.CancelledError: break - response_object, input = item + response_object, input, messages = item try: - await self._write_response_object(response_object, input) + await self._write_response_object(response_object, input, messages) except Exception as e: # noqa: BLE001 logger.error(f"Error writing response object: {e}") finally: self._queue.task_done() async def _write_response_object( - self, response_object: OpenAIResponseObject, input: list[OpenAIResponseInput] + self, + response_object: OpenAIResponseObject, + input: list[OpenAIResponseInput], + messages: list[OpenAIMessageParam], ) -> None: if self.sql_store is None: raise ValueError("Responses store is not initialized") data = response_object.model_dump() data["input"] = [input_item.model_dump() for input_item in input] + data["messages"] = [msg.model_dump() for msg in messages] await self.sql_store.insert( "openai_responses", @@ -188,7 +211,7 @@ class ResponsesStore: last_id=data[-1].id if data else "", ) - async def get_response_object(self, response_id: str) -> OpenAIResponseObjectWithInput: + async def get_response_object(self, response_id: str) -> _OpenAIResponseObjectWithInputAndMessages: """ Get a response object with automatic access control checking. """ @@ -205,7 +228,7 @@ class ResponsesStore: # This provides security by not revealing whether the record exists raise ValueError(f"Response with id {response_id} not found") from None - return OpenAIResponseObjectWithInput(**row["response_object"]) + return _OpenAIResponseObjectWithInputAndMessages(**row["response_object"]) async def delete_response_object(self, response_id: str) -> OpenAIDeleteResponseObject: if not self.sql_store: @@ -241,8 +264,8 @@ class ResponsesStore: if before and after: raise ValueError("Cannot specify both 'before' and 'after' parameters") - response_with_input = await self.get_response_object(response_id) - items = response_with_input.input + response_with_input_and_messages = await self.get_response_object(response_id) + items = response_with_input_and_messages.input if order == Order.desc: items = list(reversed(items)) diff --git a/tests/integration/responses/test_tool_responses.py b/tests/integration/responses/test_tool_responses.py index f23734892..5d6899fa6 100644 --- a/tests/integration/responses/test_tool_responses.py +++ b/tests/integration/responses/test_tool_responses.py @@ -127,6 +127,70 @@ def test_response_non_streaming_file_search_empty_vector_store(compat_client, te assert response.output_text +def test_response_sequential_file_search(compat_client, text_model_id, tmp_path): + """Test file search with sequential responses using previous_response_id.""" + if isinstance(compat_client, LlamaStackAsLibraryClient): + pytest.skip("Responses API file search is not yet supported in library client.") + + vector_store = new_vector_store(compat_client, "test_vector_store") + + # Create a test file with content + file_content = "The Llama 4 Maverick model has 128 experts in its mixture of experts architecture." + file_name = "test_sequential_file_search.txt" + file_path = tmp_path / file_name + file_path.write_text(file_content) + + file_response = upload_file(compat_client, file_name, file_path) + + # Attach the file to the vector store + compat_client.vector_stores.files.create( + vector_store_id=vector_store.id, + file_id=file_response.id, + ) + + # Wait for the file to be attached + wait_for_file_attachment(compat_client, vector_store.id, file_response.id) + + tools = [{"type": "file_search", "vector_store_ids": [vector_store.id]}] + + # First response request with file search + response = compat_client.responses.create( + model=text_model_id, + input="How many experts does the Llama 4 Maverick model have?", + tools=tools, + stream=False, + include=["file_search_call.results"], + ) + + # Verify the file_search_tool was called + assert len(response.output) > 1 + assert response.output[0].type == "file_search_call" + assert response.output[0].status == "completed" + assert response.output[0].queries + assert response.output[0].results + assert "128" in response.output_text or "experts" in response.output_text.lower() + + # Second response request using previous_response_id + response2 = compat_client.responses.create( + model=text_model_id, + input="Can you tell me more about the architecture?", + tools=tools, + stream=False, + previous_response_id=response.id, + include=["file_search_call.results"], + ) + + # Verify the second response has output + assert len(response2.output) >= 1 + assert response2.output_text + + # The second response should maintain context from the first + final_message = [output for output in response2.output if output.type == "message"] + assert len(final_message) >= 1 + assert final_message[-1].role == "assistant" + assert final_message[-1].status == "completed" + + @pytest.mark.parametrize("case", mcp_tool_test_cases) def test_response_non_streaming_mcp_tool(compat_client, text_model_id, case): if not isinstance(compat_client, LlamaStackAsLibraryClient): diff --git a/tests/unit/providers/agents/meta_reference/test_openai_responses.py b/tests/unit/providers/agents/meta_reference/test_openai_responses.py index 0b2e6ab82..f2b29c1f7 100644 --- a/tests/unit/providers/agents/meta_reference/test_openai_responses.py +++ b/tests/unit/providers/agents/meta_reference/test_openai_responses.py @@ -22,7 +22,6 @@ from llama_stack.apis.agents.openai_responses import ( OpenAIResponseInputToolFunction, OpenAIResponseInputToolWebSearch, OpenAIResponseMessage, - OpenAIResponseObjectWithInput, OpenAIResponseOutputMessageContentOutputText, OpenAIResponseOutputMessageMCPCall, OpenAIResponseOutputMessageWebSearchToolCall, @@ -45,7 +44,10 @@ from llama_stack.core.datatypes import ResponsesStoreConfig from llama_stack.providers.inline.agents.meta_reference.responses.openai_responses import ( OpenAIResponsesImpl, ) -from llama_stack.providers.utils.responses.responses_store import ResponsesStore +from llama_stack.providers.utils.responses.responses_store import ( + ResponsesStore, + _OpenAIResponseObjectWithInputAndMessages, +) from llama_stack.providers.utils.sqlstore.sqlstore import SqliteSqlStoreConfig from tests.unit.providers.agents.meta_reference.fixtures import load_chat_completion_fixture @@ -499,13 +501,6 @@ async def test_create_openai_response_with_multiple_messages(openai_responses_im assert isinstance(inference_messages[i], OpenAIDeveloperMessageParam) -async def test_prepend_previous_response_none(openai_responses_impl): - """Test prepending no previous response to a new response.""" - - input = await openai_responses_impl._prepend_previous_response("fake_input", None) - assert input == "fake_input" - - async def test_prepend_previous_response_basic(openai_responses_impl, mock_responses_store): """Test prepending a basic previous response to a new response.""" @@ -520,7 +515,7 @@ async def test_prepend_previous_response_basic(openai_responses_impl, mock_respo status="completed", role="assistant", ) - previous_response = OpenAIResponseObjectWithInput( + previous_response = _OpenAIResponseObjectWithInputAndMessages( created_at=1, id="resp_123", model="fake_model", @@ -528,10 +523,11 @@ async def test_prepend_previous_response_basic(openai_responses_impl, mock_respo status="completed", text=OpenAIResponseText(format=OpenAIResponseTextFormat(type="text")), input=[input_item_message], + messages=[OpenAIUserMessageParam(content="fake_previous_input")], ) mock_responses_store.get_response_object.return_value = previous_response - input = await openai_responses_impl._prepend_previous_response("fake_input", "resp_123") + input = await openai_responses_impl._prepend_previous_response("fake_input", previous_response) assert len(input) == 3 # Check for previous input @@ -562,7 +558,7 @@ async def test_prepend_previous_response_web_search(openai_responses_impl, mock_ status="completed", role="assistant", ) - response = OpenAIResponseObjectWithInput( + response = _OpenAIResponseObjectWithInputAndMessages( created_at=1, id="resp_123", model="fake_model", @@ -570,11 +566,12 @@ async def test_prepend_previous_response_web_search(openai_responses_impl, mock_ status="completed", text=OpenAIResponseText(format=OpenAIResponseTextFormat(type="text")), input=[input_item_message], + messages=[OpenAIUserMessageParam(content="test input")], ) mock_responses_store.get_response_object.return_value = response input_messages = [OpenAIResponseMessage(content="fake_input", role="user")] - input = await openai_responses_impl._prepend_previous_response(input_messages, "resp_123") + input = await openai_responses_impl._prepend_previous_response(input_messages, response) assert len(input) == 4 # Check for previous input @@ -609,7 +606,7 @@ async def test_prepend_previous_response_mcp_tool_call(openai_responses_impl, mo status="completed", role="assistant", ) - response = OpenAIResponseObjectWithInput( + response = _OpenAIResponseObjectWithInputAndMessages( created_at=1, id="resp_123", model="fake_model", @@ -617,11 +614,12 @@ async def test_prepend_previous_response_mcp_tool_call(openai_responses_impl, mo status="completed", text=OpenAIResponseText(format=OpenAIResponseTextFormat(type="text")), input=[input_item_message], + messages=[OpenAIUserMessageParam(content="test input")], ) mock_responses_store.get_response_object.return_value = response input_messages = [OpenAIResponseMessage(content="fake_input", role="user")] - input = await openai_responses_impl._prepend_previous_response(input_messages, "resp_123") + input = await openai_responses_impl._prepend_previous_response(input_messages, response) assert len(input) == 4 # Check for previous input @@ -725,7 +723,7 @@ async def test_create_openai_response_with_instructions_and_previous_response( status="completed", role="assistant", ) - response = OpenAIResponseObjectWithInput( + response = _OpenAIResponseObjectWithInputAndMessages( created_at=1, id="resp_123", model="fake_model", @@ -733,6 +731,10 @@ async def test_create_openai_response_with_instructions_and_previous_response( status="completed", text=OpenAIResponseText(format=OpenAIResponseTextFormat(type="text")), input=[input_item_message], + messages=[ + OpenAIUserMessageParam(content="Name some towns in Ireland"), + OpenAIAssistantMessageParam(content="Galway, Longford, Sligo"), + ], ) mock_responses_store.get_response_object.return_value = response @@ -818,7 +820,7 @@ async def test_responses_store_list_input_items_logic(): OpenAIResponseMessage(id="msg_4", content="Fourth message", role="user"), ] - response_with_input = OpenAIResponseObjectWithInput( + response_with_input = _OpenAIResponseObjectWithInputAndMessages( id="resp_123", model="test_model", created_at=1234567890, @@ -827,6 +829,7 @@ async def test_responses_store_list_input_items_logic(): output=[], text=OpenAIResponseText(format=(OpenAIResponseTextFormat(type="text"))), input=input_items, + messages=[OpenAIUserMessageParam(content="First message")], ) # Mock the get_response_object method to return our test data @@ -887,7 +890,7 @@ async def test_store_response_uses_rehydrated_input_with_previous_response( rather than just the original input when previous_response_id is provided.""" # Setup - Create a previous response that should be included in the stored input - previous_response = OpenAIResponseObjectWithInput( + previous_response = _OpenAIResponseObjectWithInputAndMessages( id="resp-previous-123", object="response", created_at=1234567890, @@ -906,6 +909,10 @@ async def test_store_response_uses_rehydrated_input_with_previous_response( content=[OpenAIResponseOutputMessageContentOutputText(text="2+2 equals 4.")], ) ], + messages=[ + OpenAIUserMessageParam(content="What is 2+2?"), + OpenAIAssistantMessageParam(content="2+2 equals 4."), + ], ) mock_responses_store.get_response_object.return_value = previous_response diff --git a/tests/unit/utils/responses/test_responses_store.py b/tests/unit/utils/responses/test_responses_store.py index 4e5256c1b..c27b5a8e5 100644 --- a/tests/unit/utils/responses/test_responses_store.py +++ b/tests/unit/utils/responses/test_responses_store.py @@ -14,6 +14,7 @@ from llama_stack.apis.agents.openai_responses import ( OpenAIResponseInput, OpenAIResponseObject, ) +from llama_stack.apis.inference import OpenAIMessageParam, OpenAIUserMessageParam from llama_stack.providers.utils.responses.responses_store import ResponsesStore from llama_stack.providers.utils.sqlstore.sqlstore import SqliteSqlStoreConfig @@ -44,6 +45,11 @@ def create_test_response_input(content: str, input_id: str) -> OpenAIResponseInp ) +def create_test_messages(content: str) -> list[OpenAIMessageParam]: + """Helper to create test messages for chat completion.""" + return [OpenAIUserMessageParam(content=content)] + + async def test_responses_store_pagination_basic(): """Test basic pagination functionality for responses store.""" with TemporaryDirectory() as tmp_dir: @@ -65,7 +71,8 @@ async def test_responses_store_pagination_basic(): for response_id, timestamp in test_data: response = create_test_response_object(response_id, timestamp) input_list = [create_test_response_input(f"Input for {response_id}", f"input-{response_id}")] - await store.store_response_object(response, input_list) + messages = create_test_messages(f"Input for {response_id}") + await store.store_response_object(response, input_list, messages) # Wait for all queued writes to complete await store.flush() @@ -111,7 +118,8 @@ async def test_responses_store_pagination_ascending(): for response_id, timestamp in test_data: response = create_test_response_object(response_id, timestamp) input_list = [create_test_response_input(f"Input for {response_id}", f"input-{response_id}")] - await store.store_response_object(response, input_list) + messages = create_test_messages(f"Input for {response_id}") + await store.store_response_object(response, input_list, messages) # Wait for all queued writes to complete await store.flush() @@ -149,7 +157,8 @@ async def test_responses_store_pagination_with_model_filter(): for response_id, timestamp, model in test_data: response = create_test_response_object(response_id, timestamp, model) input_list = [create_test_response_input(f"Input for {response_id}", f"input-{response_id}")] - await store.store_response_object(response, input_list) + messages = create_test_messages(f"Input for {response_id}") + await store.store_response_object(response, input_list, messages) # Wait for all queued writes to complete await store.flush() @@ -199,7 +208,8 @@ async def test_responses_store_pagination_no_limit(): for response_id, timestamp in test_data: response = create_test_response_object(response_id, timestamp) input_list = [create_test_response_input(f"Input for {response_id}", f"input-{response_id}")] - await store.store_response_object(response, input_list) + messages = create_test_messages(f"Input for {response_id}") + await store.store_response_object(response, input_list, messages) # Wait for all queued writes to complete await store.flush() @@ -222,7 +232,8 @@ async def test_responses_store_get_response_object(): # Store a test response response = create_test_response_object("test-resp", int(time.time())) input_list = [create_test_response_input("Test input content", "input-test-resp")] - await store.store_response_object(response, input_list) + messages = create_test_messages("Test input content") + await store.store_response_object(response, input_list, messages) # Wait for all queued writes to complete await store.flush() @@ -255,7 +266,8 @@ async def test_responses_store_input_items_pagination(): create_test_response_input("Fourth input", "input-4"), create_test_response_input("Fifth input", "input-5"), ] - await store.store_response_object(response, input_list) + messages = create_test_messages("First input") + await store.store_response_object(response, input_list, messages) # Wait for all queued writes to complete await store.flush() @@ -335,7 +347,8 @@ async def test_responses_store_input_items_before_pagination(): create_test_response_input("Fourth input", "before-4"), create_test_response_input("Fifth input", "before-5"), ] - await store.store_response_object(response, input_list) + messages = create_test_messages("First input") + await store.store_response_object(response, input_list, messages) # Wait for all queued writes to complete await store.flush() From 873a4005440a6459d6297a110ff0edd13a9ae205 Mon Sep 17 00:00:00 2001 From: Matthew Farrellee Date: Fri, 3 Oct 2025 00:32:02 -0400 Subject: [PATCH 13/13] chore: OpenAIMixin implements ModelsProtocolPrivate (#3662) # What does this PR do? add ModelsProtocolPrivate methods to OpenAIMixin this will allow providers using OpenAIMixin to use a common interface ## Test Plan ci w/ new tests --- .../remote/inference/cerebras/cerebras.py | 4 - .../remote/inference/fireworks/fireworks.py | 2 +- .../providers/remote/inference/tgi/tgi.py | 2 - .../remote/inference/together/together.py | 2 +- .../providers/utils/inference/openai_mixin.py | 29 ++++- .../recordings/responses/39576bcd7ed6.json | 57 +++++++++ .../recordings/responses/53d2488c9ea9.json | 40 ++++++ .../utils/inference/test_openai_mixin.py | 118 ++++++++++++++++++ 8 files changed, 243 insertions(+), 11 deletions(-) create mode 100644 tests/integration/recordings/responses/39576bcd7ed6.json create mode 100644 tests/integration/recordings/responses/53d2488c9ea9.json diff --git a/llama_stack/providers/remote/inference/cerebras/cerebras.py b/llama_stack/providers/remote/inference/cerebras/cerebras.py index 95da71de8..43b984f7f 100644 --- a/llama_stack/providers/remote/inference/cerebras/cerebras.py +++ b/llama_stack/providers/remote/inference/cerebras/cerebras.py @@ -25,9 +25,6 @@ from llama_stack.apis.inference import ( ToolPromptFormat, TopKSamplingStrategy, ) -from llama_stack.providers.utils.inference.model_registry import ( - ModelRegistryHelper, -) from llama_stack.providers.utils.inference.openai_compat import ( get_sampling_options, process_chat_completion_response, @@ -44,7 +41,6 @@ from .config import CerebrasImplConfig class CerebrasInferenceAdapter( OpenAIMixin, - ModelRegistryHelper, Inference, ): def __init__(self, config: CerebrasImplConfig) -> None: diff --git a/llama_stack/providers/remote/inference/fireworks/fireworks.py b/llama_stack/providers/remote/inference/fireworks/fireworks.py index dcc9e240b..83d9ac354 100644 --- a/llama_stack/providers/remote/inference/fireworks/fireworks.py +++ b/llama_stack/providers/remote/inference/fireworks/fireworks.py @@ -44,7 +44,7 @@ from .config import FireworksImplConfig logger = get_logger(name=__name__, category="inference::fireworks") -class FireworksInferenceAdapter(OpenAIMixin, ModelRegistryHelper, Inference, NeedsRequestProviderData): +class FireworksInferenceAdapter(OpenAIMixin, Inference, NeedsRequestProviderData): embedding_model_metadata = { "nomic-ai/nomic-embed-text-v1.5": {"embedding_dimension": 768, "context_length": 8192}, "accounts/fireworks/models/qwen3-embedding-8b": {"embedding_dimension": 4096, "context_length": 40960}, diff --git a/llama_stack/providers/remote/inference/tgi/tgi.py b/llama_stack/providers/remote/inference/tgi/tgi.py index 27fc263a6..703ee2c1b 100644 --- a/llama_stack/providers/remote/inference/tgi/tgi.py +++ b/llama_stack/providers/remote/inference/tgi/tgi.py @@ -29,7 +29,6 @@ from llama_stack.apis.models import Model from llama_stack.apis.models.models import ModelType from llama_stack.log import get_logger from llama_stack.models.llama.sku_list import all_registered_models -from llama_stack.providers.datatypes import ModelsProtocolPrivate from llama_stack.providers.utils.inference.model_registry import ( ModelRegistryHelper, build_hf_repo_model_entry, @@ -65,7 +64,6 @@ def build_hf_repo_model_entries(): class _HfAdapter( OpenAIMixin, Inference, - ModelsProtocolPrivate, ): url: str api_key: SecretStr diff --git a/llama_stack/providers/remote/inference/together/together.py b/llama_stack/providers/remote/inference/together/together.py index 0c8363f6a..1f7a92d69 100644 --- a/llama_stack/providers/remote/inference/together/together.py +++ b/llama_stack/providers/remote/inference/together/together.py @@ -47,7 +47,7 @@ from .config import TogetherImplConfig logger = get_logger(name=__name__, category="inference::together") -class TogetherInferenceAdapter(OpenAIMixin, ModelRegistryHelper, Inference, NeedsRequestProviderData): +class TogetherInferenceAdapter(OpenAIMixin, Inference, NeedsRequestProviderData): embedding_model_metadata = { "togethercomputer/m2-bert-80M-32k-retrieval": {"embedding_dimension": 768, "context_length": 32768}, "BAAI/bge-large-en-v1.5": {"embedding_dimension": 1024, "context_length": 512}, diff --git a/llama_stack/providers/utils/inference/openai_mixin.py b/llama_stack/providers/utils/inference/openai_mixin.py index 3ff7d5cc6..4354b067e 100644 --- a/llama_stack/providers/utils/inference/openai_mixin.py +++ b/llama_stack/providers/utils/inference/openai_mixin.py @@ -26,14 +26,14 @@ from llama_stack.apis.inference import ( from llama_stack.apis.models import ModelType from llama_stack.core.request_headers import NeedsRequestProviderData from llama_stack.log import get_logger -from llama_stack.providers.utils.inference.model_registry import ModelRegistryHelper +from llama_stack.providers.datatypes import ModelsProtocolPrivate from llama_stack.providers.utils.inference.openai_compat import prepare_openai_completion_params from llama_stack.providers.utils.inference.prompt_adapter import localize_image_content logger = get_logger(name=__name__, category="providers::utils") -class OpenAIMixin(ModelRegistryHelper, NeedsRequestProviderData, ABC): +class OpenAIMixin(ModelsProtocolPrivate, NeedsRequestProviderData, ABC): """ Mixin class that provides OpenAI-specific functionality for inference providers. This class handles direct OpenAI API calls using the AsyncOpenAI client. @@ -73,6 +73,9 @@ class OpenAIMixin(ModelRegistryHelper, NeedsRequestProviderData, ABC): # Optional field name in provider data to look for API key, which takes precedence provider_data_api_key_field: str | None = None + # automatically set by the resolver when instantiating the provider + __provider_id__: str + @abstractmethod def get_api_key(self) -> str: """ @@ -356,6 +359,24 @@ class OpenAIMixin(ModelRegistryHelper, NeedsRequestProviderData, ABC): usage=usage, ) + ### + # ModelsProtocolPrivate implementation - provide model management functionality + # + # async def register_model(self, model: Model) -> Model: ... + # async def unregister_model(self, model_id: str) -> None: ... + # + # async def list_models(self) -> list[Model] | None: ... + # async def should_refresh_models(self) -> bool: ... + ## + + async def register_model(self, model: Model) -> Model: + if not await self.check_model_availability(model.provider_model_id): + raise ValueError(f"Model {model.provider_model_id} is not available from provider {self.__provider_id__}") + return model + + async def unregister_model(self, model_id: str) -> None: + return None + async def list_models(self) -> list[Model] | None: """ List available models from the provider's /v1/models endpoint augmented with static embedding model metadata. @@ -400,5 +421,7 @@ class OpenAIMixin(ModelRegistryHelper, NeedsRequestProviderData, ABC): """ if not self._model_cache: await self.list_models() - return model in self._model_cache + + async def should_refresh_models(self) -> bool: + return False diff --git a/tests/integration/recordings/responses/39576bcd7ed6.json b/tests/integration/recordings/responses/39576bcd7ed6.json new file mode 100644 index 000000000..77c8cf15c --- /dev/null +++ b/tests/integration/recordings/responses/39576bcd7ed6.json @@ -0,0 +1,57 @@ +{ + "request": { + "method": "POST", + "url": "http://0.0.0.0:11434/v1/v1/chat/completions", + "headers": {}, + "body": { + "model": "llama-guard3:1b", + "messages": [ + { + "role": "user", + "content": "Task: Check if there is unsafe content in 'Assistant' messages in conversations according our safety policy with the below categories.\n\n\nS1: Violent Crimes.\nS2: Non-Violent Crimes.\nS3: Sex Crimes.\nS4: Child Exploitation.\nS5: Defamation.\nS6: Specialized Advice.\nS7: Privacy.\nS8: Intellectual Property.\nS9: Indiscriminate Weapons.\nS10: Hate.\nS11: Self-Harm.\nS12: Sexual Content.\nS13: Elections.\nS14: Code Interpreter Abuse.\n\n\n\n\nUser: Give me a sentence that contains the word: hello\n\nAssistant: \n\n\n\nProvide your safety assessment for ONLY THE LAST Assistant message in the above conversation:\n - First line must read 'safe' or 'unsafe'.\n - If unsafe, a second line must include a comma-separated list of violated categories." + } + ], + "stream": false, + "temperature": 0.0 + }, + "endpoint": "/v1/chat/completions", + "model": "llama-guard3:1b" + }, + "response": { + "body": { + "__type__": "openai.types.chat.chat_completion.ChatCompletion", + "__data__": { + "id": "chatcmpl-317", + "choices": [ + { + "finish_reason": "stop", + "index": 0, + "logprobs": null, + "message": { + "content": "safe", + "refusal": null, + "role": "assistant", + "annotations": null, + "audio": null, + "function_call": null, + "tool_calls": null + } + } + ], + "created": 1759351124, + "model": "llama-guard3:1b", + "object": "chat.completion", + "service_tier": null, + "system_fingerprint": "fp_ollama", + "usage": { + "completion_tokens": 2, + "prompt_tokens": 397, + "total_tokens": 399, + "completion_tokens_details": null, + "prompt_tokens_details": null + } + } + }, + "is_streaming": false + } +} diff --git a/tests/integration/recordings/responses/53d2488c9ea9.json b/tests/integration/recordings/responses/53d2488c9ea9.json new file mode 100644 index 000000000..6b63536f5 --- /dev/null +++ b/tests/integration/recordings/responses/53d2488c9ea9.json @@ -0,0 +1,40 @@ +{ + "request": { + "method": "POST", + "url": "http://localhost:11434/api/generate", + "headers": {}, + "body": { + "model": "llama3.2:3b-instruct-fp16", + "options": { + "temperature": 0.0001, + "top_p": 0.9 + }, + "stream": true + }, + "endpoint": "/api/generate", + "model": "llama3.2:3b-instruct-fp16" + }, + "response": { + "body": [ + { + "__type__": "ollama._types.GenerateResponse", + "__data__": { + "model": "llama3.2:3b-instruct-fp16", + "created_at": "2025-10-01T20:38:48.732564955Z", + "done": true, + "done_reason": "load", + "total_duration": null, + "load_duration": null, + "prompt_eval_count": null, + "prompt_eval_duration": null, + "eval_count": null, + "eval_duration": null, + "response": "", + "thinking": null, + "context": null + } + } + ], + "is_streaming": true + } +} diff --git a/tests/unit/providers/utils/inference/test_openai_mixin.py b/tests/unit/providers/utils/inference/test_openai_mixin.py index 8ef7ec81c..4856f510b 100644 --- a/tests/unit/providers/utils/inference/test_openai_mixin.py +++ b/tests/unit/providers/utils/inference/test_openai_mixin.py @@ -362,6 +362,124 @@ class TestOpenAIMixinAllowedModels: assert not await mixin.check_model_availability("another-mock-model-id") +class TestOpenAIMixinModelRegistration: + """Test cases for model registration functionality""" + + async def test_register_model_success(self, mixin, mock_client_with_models, mock_client_context): + """Test successful model registration when model is available""" + model = Model( + provider_id="test-provider", + provider_resource_id="some-mock-model-id", + identifier="test-model", + model_type=ModelType.llm, + ) + + with mock_client_context(mixin, mock_client_with_models): + result = await mixin.register_model(model) + + assert result == model + assert result.provider_id == "test-provider" + assert result.provider_resource_id == "some-mock-model-id" + assert result.identifier == "test-model" + assert result.model_type == ModelType.llm + mock_client_with_models.models.list.assert_called_once() + + async def test_register_model_not_available(self, mixin, mock_client_with_models, mock_client_context): + """Test model registration failure when model is not available from provider""" + model = Model( + provider_id="test-provider", + provider_resource_id="non-existent-model", + identifier="test-model", + model_type=ModelType.llm, + ) + + with mock_client_context(mixin, mock_client_with_models): + with pytest.raises( + ValueError, match="Model non-existent-model is not available from provider test-provider" + ): + await mixin.register_model(model) + mock_client_with_models.models.list.assert_called_once() + + async def test_register_model_with_allowed_models_filter(self, mixin, mock_client_with_models, mock_client_context): + """Test model registration with allowed_models filtering""" + mixin.allowed_models = {"some-mock-model-id"} + + # Test with allowed model + allowed_model = Model( + provider_id="test-provider", + provider_resource_id="some-mock-model-id", + identifier="allowed-model", + model_type=ModelType.llm, + ) + + # Test with disallowed model + disallowed_model = Model( + provider_id="test-provider", + provider_resource_id="final-mock-model-id", + identifier="disallowed-model", + model_type=ModelType.llm, + ) + + with mock_client_context(mixin, mock_client_with_models): + result = await mixin.register_model(allowed_model) + assert result == allowed_model + with pytest.raises( + ValueError, match="Model final-mock-model-id is not available from provider test-provider" + ): + await mixin.register_model(disallowed_model) + mock_client_with_models.models.list.assert_called_once() + + async def test_register_embedding_model(self, mixin_with_embeddings, mock_client_context): + """Test registration of embedding models with metadata""" + mock_embedding_model = MagicMock(id="text-embedding-3-small") + mock_models = [mock_embedding_model] + + mock_client = MagicMock() + + async def mock_models_list(): + for model in mock_models: + yield model + + mock_client.models.list.return_value = mock_models_list() + + embedding_model = Model( + provider_id="test-provider", + provider_resource_id="text-embedding-3-small", + identifier="embedding-test", + model_type=ModelType.embedding, + ) + + with mock_client_context(mixin_with_embeddings, mock_client): + result = await mixin_with_embeddings.register_model(embedding_model) + assert result == embedding_model + assert result.model_type == ModelType.embedding + + async def test_unregister_model(self, mixin): + """Test model unregistration (should be no-op)""" + # unregister_model should not raise any exceptions and return None + result = await mixin.unregister_model("any-model-id") + assert result is None + + async def test_should_refresh_models(self, mixin): + """Test should_refresh_models method (should always return False)""" + result = await mixin.should_refresh_models() + assert result is False + + async def test_register_model_error_propagation(self, mixin, mock_client_with_exception, mock_client_context): + """Test that errors from provider API are properly propagated during registration""" + model = Model( + provider_id="test-provider", + provider_resource_id="some-model", + identifier="test-model", + model_type=ModelType.llm, + ) + + with mock_client_context(mixin, mock_client_with_exception): + # The exception from the API should be propagated + with pytest.raises(Exception, match="API Error"): + await mixin.register_model(model) + + class ProviderDataValidator(BaseModel): """Validator for provider data in tests"""