mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-16 22:09:27 +00:00
feat: Making static prompt values in Rag/File Search configurable in Vector Store Config (#4368)
# What does this PR do?
- Enables users to configure prompts used throughout the File Search /
Vector Retrieval
- Configuration is defined in the Vector Stores Config so they can be
modified at runtime
- Backwards compatible, which means the fields are optional and default
to the previously used values
This is the summary of the new options in the `run.yaml`
```yaml
vector_stores:
file_search_params:
header_template: 'knowledge_search tool found {num_chunks} chunks:\nBEGIN of knowledge_search tool results.\n'
footer_template: 'END of knowledge_search tool results.\n'
context_prompt_params:
chunk_annotation_template: 'Result {index}\nContent: {chunk.content}\nMetadata: {metadata}\n'
context_template: 'The above results were retrieved to help answer the user\'s query: "{query}". Use them as supporting information only in answering this query.{annotation_instruction}\n'
annotation_prompt_params:
enable_annotations: true
annotation_instruction_template: 'Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format like \'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.\'. Do not add
extra punctuation. Use only the file IDs provided, do not invent new ones.'
chunk_annotation_template: '[{index}] {metadata_text} cite as <|{file_id}|>\n{chunk_text}\n'
```
<!-- If resolving an issue, uncomment and update the line below -->
<!-- Closes #[issue-number] -->
## Test Plan
Added tests.
---------
Signed-off-by: Francisco Javier Arceo <farceo@redhat.com>
This commit is contained in:
parent
4043dedeea
commit
62005dc1a9
47 changed files with 42640 additions and 40 deletions
|
|
@ -15,6 +15,7 @@ Meta's reference implementation of an agent system that can use tools, access ve
|
|||
| Field | Type | Required | Default | Description |
|
||||
|-------|------|----------|---------|-------------|
|
||||
| `persistence` | `AgentPersistenceConfig` | No | | |
|
||||
| `vector_stores_config` | `VectorStoresConfig \| None` | No | | Configuration for vector store prompt templates and behavior |
|
||||
|
||||
## Sample Configuration
|
||||
|
||||
|
|
|
|||
|
|
@ -10,6 +10,12 @@ title: inline::rag-runtime
|
|||
|
||||
RAG (Retrieval-Augmented Generation) tool runtime for document ingestion, chunking, and semantic search.
|
||||
|
||||
## Configuration
|
||||
|
||||
| Field | Type | Required | Default | Description |
|
||||
|-------|------|----------|---------|-------------|
|
||||
| `vector_stores_config` | `VectorStoresConfig` | No | `default_provider_id=None default_embedding_model=None rewrite_query_params=None file_search_params=FileSearchParams(header_template='knowledge_search tool found {num_chunks} chunks:\nBEGIN of knowledge_search tool results.\n', footer_template='END of knowledge_search tool results.\n') context_prompt_params=ContextPromptParams(chunk_annotation_template='Result {index}\nContent: {chunk.content}\nMetadata: {metadata}\n', context_template='The above results were retrieved to help answer the user\'s query: "{query}". Use them as supporting information only in answering this query.{annotation_instruction}\n') annotation_prompt_params=AnnotationPromptParams(enable_annotations=True, annotation_instruction_template=" Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format like 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'. Do not add extra punctuation. Use only the file IDs provided, do not invent new ones.", chunk_annotation_template='[{index}] {metadata_text} cite as <|{file_id}|>\n{chunk_text}\n')` | Configuration for vector store prompt templates and behavior |
|
||||
|
||||
## Sample Configuration
|
||||
|
||||
```yaml
|
||||
|
|
|
|||
|
|
@ -18,7 +18,15 @@ from llama_stack.core.storage.datatypes import (
|
|||
StorageConfig,
|
||||
)
|
||||
from llama_stack.log import LoggingConfig
|
||||
from llama_stack.providers.utils.memory.constants import DEFAULT_QUERY_REWRITE_PROMPT
|
||||
from llama_stack.providers.utils.memory.constants import (
|
||||
DEFAULT_ANNOTATION_INSTRUCTION_TEMPLATE,
|
||||
DEFAULT_CHUNK_ANNOTATION_TEMPLATE,
|
||||
DEFAULT_CHUNK_WITH_SOURCES_TEMPLATE,
|
||||
DEFAULT_CONTEXT_TEMPLATE,
|
||||
DEFAULT_FILE_SEARCH_FOOTER_TEMPLATE,
|
||||
DEFAULT_FILE_SEARCH_HEADER_TEMPLATE,
|
||||
DEFAULT_QUERY_REWRITE_PROMPT,
|
||||
)
|
||||
from llama_stack_api import (
|
||||
Api,
|
||||
Benchmark,
|
||||
|
|
@ -371,6 +379,125 @@ class RewriteQueryParams(BaseModel):
|
|||
description="Temperature for query expansion model (0.0 = deterministic, 1.0 = creative).",
|
||||
)
|
||||
|
||||
@field_validator("prompt")
|
||||
@classmethod
|
||||
def validate_prompt(cls, v: str) -> str:
|
||||
if "{query}" not in v:
|
||||
raise ValueError("prompt must contain {query} placeholder")
|
||||
return v
|
||||
|
||||
@field_validator("max_tokens")
|
||||
@classmethod
|
||||
def validate_max_tokens(cls, v: int) -> int:
|
||||
if v <= 0:
|
||||
raise ValueError("max_tokens must be positive")
|
||||
if v > 4096:
|
||||
raise ValueError("max_tokens should not exceed 4096")
|
||||
return v
|
||||
|
||||
@field_validator("temperature")
|
||||
@classmethod
|
||||
def validate_temperature(cls, v: float) -> float:
|
||||
if v < 0.0 or v > 2.0:
|
||||
raise ValueError("temperature must be between 0.0 and 2.0")
|
||||
return v
|
||||
|
||||
|
||||
class FileSearchParams(BaseModel):
|
||||
"""Configuration for file search tool output formatting."""
|
||||
|
||||
header_template: str = Field(
|
||||
default=DEFAULT_FILE_SEARCH_HEADER_TEMPLATE,
|
||||
description="Template for the header text shown before search results. Available placeholders: {num_chunks} number of chunks found.",
|
||||
)
|
||||
footer_template: str = Field(
|
||||
default=DEFAULT_FILE_SEARCH_FOOTER_TEMPLATE,
|
||||
description="Template for the footer text shown after search results.",
|
||||
)
|
||||
|
||||
@field_validator("header_template")
|
||||
@classmethod
|
||||
def validate_header_template(cls, v: str) -> str:
|
||||
if len(v) == 0:
|
||||
raise ValueError("header_template must not be empty")
|
||||
if "{num_chunks}" not in v:
|
||||
raise ValueError("header_template must contain {num_chunks} placeholder")
|
||||
if "knowledge_search" not in v.lower():
|
||||
raise ValueError(
|
||||
"header_template must contain 'knowledge_search' keyword to ensure proper tool identification"
|
||||
)
|
||||
return v
|
||||
|
||||
|
||||
class ContextPromptParams(BaseModel):
|
||||
"""Configuration for LLM prompt content and chunk formatting."""
|
||||
|
||||
chunk_annotation_template: str = Field(
|
||||
default=DEFAULT_CHUNK_ANNOTATION_TEMPLATE,
|
||||
description="Template for formatting individual chunks in search results. Available placeholders: {index} 1-based chunk index, {chunk.content} chunk content, {metadata} chunk metadata dict.",
|
||||
)
|
||||
context_template: str = Field(
|
||||
default=DEFAULT_CONTEXT_TEMPLATE,
|
||||
description="Template for explaining the search results to the model. Available placeholders: {query} user's query, {num_chunks} number of chunks.",
|
||||
)
|
||||
|
||||
@field_validator("chunk_annotation_template")
|
||||
@classmethod
|
||||
def validate_chunk_annotation_template(cls, v: str) -> str:
|
||||
if len(v) == 0:
|
||||
raise ValueError("chunk_annotation_template must not be empty")
|
||||
if "{chunk.content}" not in v:
|
||||
raise ValueError("chunk_annotation_template must contain {chunk.content} placeholder")
|
||||
if "{index}" not in v:
|
||||
raise ValueError("chunk_annotation_template must contain {index} placeholder")
|
||||
return v
|
||||
|
||||
@field_validator("context_template")
|
||||
@classmethod
|
||||
def validate_context_template(cls, v: str) -> str:
|
||||
if len(v) == 0:
|
||||
raise ValueError("context_template must not be empty")
|
||||
if "{query}" not in v:
|
||||
raise ValueError("context_template must contain {query} placeholder")
|
||||
return v
|
||||
|
||||
|
||||
class AnnotationPromptParams(BaseModel):
|
||||
"""Configuration for source annotation and attribution features."""
|
||||
|
||||
enable_annotations: bool = Field(
|
||||
default=True,
|
||||
description="Whether to include annotation information in results.",
|
||||
)
|
||||
annotation_instruction_template: str = Field(
|
||||
default=DEFAULT_ANNOTATION_INSTRUCTION_TEMPLATE,
|
||||
description="Instructions for how the model should cite sources. Used when enable_annotations is True.",
|
||||
)
|
||||
chunk_annotation_template: str = Field(
|
||||
default=DEFAULT_CHUNK_WITH_SOURCES_TEMPLATE,
|
||||
description="Template for chunks with annotation information. Available placeholders: {index} 1-based chunk index, {metadata_text} formatted metadata, {file_id} document identifier, {chunk_text} chunk content.",
|
||||
)
|
||||
|
||||
@field_validator("chunk_annotation_template")
|
||||
@classmethod
|
||||
def validate_chunk_annotation_template(cls, v: str) -> str:
|
||||
if len(v) == 0:
|
||||
raise ValueError("chunk_annotation_template must not be empty")
|
||||
if "{index}" not in v:
|
||||
raise ValueError("chunk_annotation_template must contain {index} placeholder")
|
||||
if "{chunk_text}" not in v:
|
||||
raise ValueError("chunk_annotation_template must contain {chunk_text} placeholder")
|
||||
if "{file_id}" not in v:
|
||||
raise ValueError("chunk_annotation_template must contain {file_id} placeholder")
|
||||
return v
|
||||
|
||||
@field_validator("annotation_instruction_template")
|
||||
@classmethod
|
||||
def validate_annotation_instruction_template(cls, v: str) -> str:
|
||||
if len(v) == 0:
|
||||
raise ValueError("annotation_instruction_template must not be empty")
|
||||
return v
|
||||
|
||||
|
||||
class VectorStoresConfig(BaseModel):
|
||||
"""Configuration for vector stores in the stack."""
|
||||
|
|
@ -387,6 +514,18 @@ class VectorStoresConfig(BaseModel):
|
|||
default=None,
|
||||
description="Parameters for query rewriting/expansion. None disables query rewriting.",
|
||||
)
|
||||
file_search_params: FileSearchParams = Field(
|
||||
default_factory=FileSearchParams,
|
||||
description="Configuration for file search tool output formatting.",
|
||||
)
|
||||
context_prompt_params: ContextPromptParams = Field(
|
||||
default_factory=ContextPromptParams,
|
||||
description="Configuration for LLM prompt content and chunk formatting.",
|
||||
)
|
||||
annotation_prompt_params: AnnotationPromptParams = Field(
|
||||
default_factory=AnnotationPromptParams,
|
||||
description="Configuration for source annotation and attribution features.",
|
||||
)
|
||||
|
||||
|
||||
class SafetyConfig(BaseModel):
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
#
|
||||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
|
||||
import importlib
|
||||
import importlib.metadata
|
||||
import inspect
|
||||
|
|
@ -406,13 +407,17 @@ async def instantiate_provider(
|
|||
args = [provider_spec.api, inner_impls, deps, dist_registry, policy]
|
||||
else:
|
||||
method = "get_provider_impl"
|
||||
provider_config = provider.config.copy()
|
||||
|
||||
# Inject vector_stores_config for providers that need it (introspection-based)
|
||||
config_type = instantiate_class_type(provider_spec.config_class)
|
||||
config = config_type(**provider.config)
|
||||
if hasattr(config_type, "__fields__") and "vector_stores_config" in config_type.__fields__:
|
||||
provider_config["vector_stores_config"] = run_config.vector_stores
|
||||
|
||||
config = config_type(**provider_config)
|
||||
args = [config, deps]
|
||||
if "policy" in inspect.signature(getattr(module, method)).parameters:
|
||||
args.append(policy)
|
||||
|
||||
fn = getattr(module, method)
|
||||
impl = await fn(*args)
|
||||
impl.__provider_id__ = provider.provider_id
|
||||
|
|
|
|||
|
|
@ -229,8 +229,6 @@ async def validate_vector_stores_config(vector_stores_config: VectorStoresConfig
|
|||
if vector_stores_config.rewrite_query_params:
|
||||
if vector_stores_config.rewrite_query_params.model:
|
||||
await _validate_rewrite_query_model(vector_stores_config.rewrite_query_params.model, impls)
|
||||
if "{query}" not in vector_stores_config.rewrite_query_params.prompt:
|
||||
raise ValueError("'{query}' placeholder is required in the prompt template")
|
||||
|
||||
|
||||
async def _validate_embedding_model(embedding_model: QualifiedModel, impls: dict[Api, Any]) -> None:
|
||||
|
|
|
|||
|
|
@ -277,5 +277,38 @@ vector_stores:
|
|||
default_embedding_model:
|
||||
provider_id: sentence-transformers
|
||||
model_id: nomic-ai/nomic-embed-text-v1.5
|
||||
file_search_params:
|
||||
header_template: 'knowledge_search tool found {num_chunks} chunks:
|
||||
|
||||
BEGIN of knowledge_search tool results.
|
||||
|
||||
'
|
||||
footer_template: 'END of knowledge_search tool results.
|
||||
|
||||
'
|
||||
context_prompt_params:
|
||||
chunk_annotation_template: 'Result {index}
|
||||
|
||||
Content: {chunk.content}
|
||||
|
||||
Metadata: {metadata}
|
||||
|
||||
'
|
||||
context_template: 'The above results were retrieved to help answer the user''s
|
||||
query: "{query}". Use them as supporting information only in answering this
|
||||
query.{annotation_instruction}
|
||||
|
||||
'
|
||||
annotation_prompt_params:
|
||||
enable_annotations: true
|
||||
annotation_instruction_template: ' Cite sources immediately at the end of sentences
|
||||
before punctuation, using `<|file-id|>` format like ''This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.''.
|
||||
Do not add extra punctuation. Use only the file IDs provided, do not invent
|
||||
new ones.'
|
||||
chunk_annotation_template: '[{index}] {metadata_text} cite as <|{file_id}|>
|
||||
|
||||
{chunk_text}
|
||||
|
||||
'
|
||||
safety:
|
||||
default_shield_id: llama-guard
|
||||
|
|
|
|||
|
|
@ -286,5 +286,38 @@ vector_stores:
|
|||
default_embedding_model:
|
||||
provider_id: sentence-transformers
|
||||
model_id: nomic-ai/nomic-embed-text-v1.5
|
||||
file_search_params:
|
||||
header_template: 'knowledge_search tool found {num_chunks} chunks:
|
||||
|
||||
BEGIN of knowledge_search tool results.
|
||||
|
||||
'
|
||||
footer_template: 'END of knowledge_search tool results.
|
||||
|
||||
'
|
||||
context_prompt_params:
|
||||
chunk_annotation_template: 'Result {index}
|
||||
|
||||
Content: {chunk.content}
|
||||
|
||||
Metadata: {metadata}
|
||||
|
||||
'
|
||||
context_template: 'The above results were retrieved to help answer the user''s
|
||||
query: "{query}". Use them as supporting information only in answering this
|
||||
query.{annotation_instruction}
|
||||
|
||||
'
|
||||
annotation_prompt_params:
|
||||
enable_annotations: true
|
||||
annotation_instruction_template: ' Cite sources immediately at the end of sentences
|
||||
before punctuation, using `<|file-id|>` format like ''This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.''.
|
||||
Do not add extra punctuation. Use only the file IDs provided, do not invent
|
||||
new ones.'
|
||||
chunk_annotation_template: '[{index}] {metadata_text} cite as <|{file_id}|>
|
||||
|
||||
{chunk_text}
|
||||
|
||||
'
|
||||
safety:
|
||||
default_shield_id: llama-guard
|
||||
|
|
|
|||
|
|
@ -280,5 +280,38 @@ vector_stores:
|
|||
default_embedding_model:
|
||||
provider_id: sentence-transformers
|
||||
model_id: nomic-ai/nomic-embed-text-v1.5
|
||||
file_search_params:
|
||||
header_template: 'knowledge_search tool found {num_chunks} chunks:
|
||||
|
||||
BEGIN of knowledge_search tool results.
|
||||
|
||||
'
|
||||
footer_template: 'END of knowledge_search tool results.
|
||||
|
||||
'
|
||||
context_prompt_params:
|
||||
chunk_annotation_template: 'Result {index}
|
||||
|
||||
Content: {chunk.content}
|
||||
|
||||
Metadata: {metadata}
|
||||
|
||||
'
|
||||
context_template: 'The above results were retrieved to help answer the user''s
|
||||
query: "{query}". Use them as supporting information only in answering this
|
||||
query.{annotation_instruction}
|
||||
|
||||
'
|
||||
annotation_prompt_params:
|
||||
enable_annotations: true
|
||||
annotation_instruction_template: ' Cite sources immediately at the end of sentences
|
||||
before punctuation, using `<|file-id|>` format like ''This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.''.
|
||||
Do not add extra punctuation. Use only the file IDs provided, do not invent
|
||||
new ones.'
|
||||
chunk_annotation_template: '[{index}] {metadata_text} cite as <|{file_id}|>
|
||||
|
||||
{chunk_text}
|
||||
|
||||
'
|
||||
safety:
|
||||
default_shield_id: llama-guard
|
||||
|
|
|
|||
|
|
@ -289,5 +289,38 @@ vector_stores:
|
|||
default_embedding_model:
|
||||
provider_id: sentence-transformers
|
||||
model_id: nomic-ai/nomic-embed-text-v1.5
|
||||
file_search_params:
|
||||
header_template: 'knowledge_search tool found {num_chunks} chunks:
|
||||
|
||||
BEGIN of knowledge_search tool results.
|
||||
|
||||
'
|
||||
footer_template: 'END of knowledge_search tool results.
|
||||
|
||||
'
|
||||
context_prompt_params:
|
||||
chunk_annotation_template: 'Result {index}
|
||||
|
||||
Content: {chunk.content}
|
||||
|
||||
Metadata: {metadata}
|
||||
|
||||
'
|
||||
context_template: 'The above results were retrieved to help answer the user''s
|
||||
query: "{query}". Use them as supporting information only in answering this
|
||||
query.{annotation_instruction}
|
||||
|
||||
'
|
||||
annotation_prompt_params:
|
||||
enable_annotations: true
|
||||
annotation_instruction_template: ' Cite sources immediately at the end of sentences
|
||||
before punctuation, using `<|file-id|>` format like ''This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.''.
|
||||
Do not add extra punctuation. Use only the file IDs provided, do not invent
|
||||
new ones.'
|
||||
chunk_annotation_template: '[{index}] {metadata_text} cite as <|{file_id}|>
|
||||
|
||||
{chunk_text}
|
||||
|
||||
'
|
||||
safety:
|
||||
default_shield_id: llama-guard
|
||||
|
|
|
|||
|
|
@ -277,5 +277,38 @@ vector_stores:
|
|||
default_embedding_model:
|
||||
provider_id: sentence-transformers
|
||||
model_id: nomic-ai/nomic-embed-text-v1.5
|
||||
file_search_params:
|
||||
header_template: 'knowledge_search tool found {num_chunks} chunks:
|
||||
|
||||
BEGIN of knowledge_search tool results.
|
||||
|
||||
'
|
||||
footer_template: 'END of knowledge_search tool results.
|
||||
|
||||
'
|
||||
context_prompt_params:
|
||||
chunk_annotation_template: 'Result {index}
|
||||
|
||||
Content: {chunk.content}
|
||||
|
||||
Metadata: {metadata}
|
||||
|
||||
'
|
||||
context_template: 'The above results were retrieved to help answer the user''s
|
||||
query: "{query}". Use them as supporting information only in answering this
|
||||
query.{annotation_instruction}
|
||||
|
||||
'
|
||||
annotation_prompt_params:
|
||||
enable_annotations: true
|
||||
annotation_instruction_template: ' Cite sources immediately at the end of sentences
|
||||
before punctuation, using `<|file-id|>` format like ''This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.''.
|
||||
Do not add extra punctuation. Use only the file IDs provided, do not invent
|
||||
new ones.'
|
||||
chunk_annotation_template: '[{index}] {metadata_text} cite as <|{file_id}|>
|
||||
|
||||
{chunk_text}
|
||||
|
||||
'
|
||||
safety:
|
||||
default_shield_id: llama-guard
|
||||
|
|
|
|||
|
|
@ -286,5 +286,38 @@ vector_stores:
|
|||
default_embedding_model:
|
||||
provider_id: sentence-transformers
|
||||
model_id: nomic-ai/nomic-embed-text-v1.5
|
||||
file_search_params:
|
||||
header_template: 'knowledge_search tool found {num_chunks} chunks:
|
||||
|
||||
BEGIN of knowledge_search tool results.
|
||||
|
||||
'
|
||||
footer_template: 'END of knowledge_search tool results.
|
||||
|
||||
'
|
||||
context_prompt_params:
|
||||
chunk_annotation_template: 'Result {index}
|
||||
|
||||
Content: {chunk.content}
|
||||
|
||||
Metadata: {metadata}
|
||||
|
||||
'
|
||||
context_template: 'The above results were retrieved to help answer the user''s
|
||||
query: "{query}". Use them as supporting information only in answering this
|
||||
query.{annotation_instruction}
|
||||
|
||||
'
|
||||
annotation_prompt_params:
|
||||
enable_annotations: true
|
||||
annotation_instruction_template: ' Cite sources immediately at the end of sentences
|
||||
before punctuation, using `<|file-id|>` format like ''This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.''.
|
||||
Do not add extra punctuation. Use only the file IDs provided, do not invent
|
||||
new ones.'
|
||||
chunk_annotation_template: '[{index}] {metadata_text} cite as <|{file_id}|>
|
||||
|
||||
{chunk_text}
|
||||
|
||||
'
|
||||
safety:
|
||||
default_shield_id: llama-guard
|
||||
|
|
|
|||
|
|
@ -78,6 +78,7 @@ class MetaReferenceAgentsImpl(Agents):
|
|||
conversations_api=self.conversations_api,
|
||||
prompts_api=self.prompts_api,
|
||||
files_api=self.files_api,
|
||||
vector_stores_config=self.config.vector_stores_config,
|
||||
)
|
||||
|
||||
async def shutdown(self) -> None:
|
||||
|
|
|
|||
|
|
@ -6,8 +6,9 @@
|
|||
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from llama_stack.core.datatypes import VectorStoresConfig
|
||||
from llama_stack.core.storage.datatypes import KVStoreReference, ResponsesStoreReference
|
||||
|
||||
|
||||
|
|
@ -20,6 +21,10 @@ class AgentPersistenceConfig(BaseModel):
|
|||
|
||||
class MetaReferenceAgentsImplConfig(BaseModel):
|
||||
persistence: AgentPersistenceConfig
|
||||
vector_stores_config: VectorStoresConfig | None = Field(
|
||||
default=None,
|
||||
description="Configuration for vector store prompt templates and behavior",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def sample_run_config(cls, __distro_dir__: str) -> dict[str, Any]:
|
||||
|
|
|
|||
|
|
@ -80,6 +80,7 @@ class OpenAIResponsesImpl:
|
|||
conversations_api: Conversations,
|
||||
prompts_api: Prompts,
|
||||
files_api: Files,
|
||||
vector_stores_config=None,
|
||||
):
|
||||
self.inference_api = inference_api
|
||||
self.tool_groups_api = tool_groups_api
|
||||
|
|
@ -92,6 +93,7 @@ class OpenAIResponsesImpl:
|
|||
tool_groups_api=tool_groups_api,
|
||||
tool_runtime_api=tool_runtime_api,
|
||||
vector_io_api=vector_io_api,
|
||||
vector_stores_config=vector_stores_config,
|
||||
)
|
||||
self.prompts_api = prompts_api
|
||||
self.files_api = files_api
|
||||
|
|
|
|||
|
|
@ -12,6 +12,10 @@ from typing import Any
|
|||
from opentelemetry import trace
|
||||
|
||||
from llama_stack.log import get_logger
|
||||
from llama_stack.providers.utils.memory.constants import (
|
||||
DEFAULT_ANNOTATION_INSTRUCTION_TEMPLATE,
|
||||
DEFAULT_CHUNK_WITH_SOURCES_TEMPLATE,
|
||||
)
|
||||
from llama_stack_api import (
|
||||
ImageContentItem,
|
||||
OpenAIChatCompletionContentPartImageParam,
|
||||
|
|
@ -52,10 +56,12 @@ class ToolExecutor:
|
|||
tool_groups_api: ToolGroups,
|
||||
tool_runtime_api: ToolRuntime,
|
||||
vector_io_api: VectorIO,
|
||||
vector_stores_config=None,
|
||||
):
|
||||
self.tool_groups_api = tool_groups_api
|
||||
self.tool_runtime_api = tool_runtime_api
|
||||
self.vector_io_api = vector_io_api
|
||||
self.vector_stores_config = vector_stores_config
|
||||
|
||||
async def execute_tool_call(
|
||||
self,
|
||||
|
|
@ -148,13 +154,33 @@ class ToolExecutor:
|
|||
for results in all_results:
|
||||
search_results.extend(results)
|
||||
|
||||
content_items = []
|
||||
content_items.append(
|
||||
TextContentItem(
|
||||
text=f"knowledge_search tool found {len(search_results)} chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
)
|
||||
# Get templates from vector stores config, fallback to constants
|
||||
|
||||
# Check if annotations are enabled
|
||||
enable_annotations = (
|
||||
self.vector_stores_config
|
||||
and self.vector_stores_config.annotation_prompt_params
|
||||
and self.vector_stores_config.annotation_prompt_params.enable_annotations
|
||||
)
|
||||
|
||||
# Get templates
|
||||
header_template = self.vector_stores_config.file_search_params.header_template
|
||||
footer_template = self.vector_stores_config.file_search_params.footer_template
|
||||
context_template = self.vector_stores_config.context_prompt_params.context_template
|
||||
|
||||
# Get annotation templates (use defaults if annotations disabled)
|
||||
if enable_annotations:
|
||||
chunk_annotation_template = self.vector_stores_config.annotation_prompt_params.chunk_annotation_template
|
||||
annotation_instruction_template = (
|
||||
self.vector_stores_config.annotation_prompt_params.annotation_instruction_template
|
||||
)
|
||||
else:
|
||||
chunk_annotation_template = DEFAULT_CHUNK_WITH_SOURCES_TEMPLATE
|
||||
annotation_instruction_template = DEFAULT_ANNOTATION_INSTRUCTION_TEMPLATE
|
||||
|
||||
content_items = []
|
||||
content_items.append(TextContentItem(text=header_template.format(num_chunks=len(search_results))))
|
||||
|
||||
unique_files = set()
|
||||
for i, result_item in enumerate(search_results):
|
||||
chunk_text = result_item.content[0].text if result_item.content else ""
|
||||
|
|
@ -166,22 +192,23 @@ class ToolExecutor:
|
|||
if result_item.attributes:
|
||||
metadata_text += f", attributes: {result_item.attributes}"
|
||||
|
||||
text_content = f"[{i + 1}] {metadata_text} (cite as <|{file_id}|>)\n{chunk_text}\n"
|
||||
text_content = chunk_annotation_template.format(
|
||||
index=i + 1, metadata_text=metadata_text, file_id=file_id, chunk_text=chunk_text
|
||||
)
|
||||
content_items.append(TextContentItem(text=text_content))
|
||||
unique_files.add(file_id)
|
||||
|
||||
content_items.append(TextContentItem(text="END of knowledge_search tool results.\n"))
|
||||
content_items.append(TextContentItem(text=footer_template))
|
||||
|
||||
citation_instruction = ""
|
||||
annotation_instruction = ""
|
||||
if unique_files:
|
||||
citation_instruction = (
|
||||
" Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). "
|
||||
"Do not add extra punctuation. Use only the file IDs provided (do not invent new ones)."
|
||||
)
|
||||
annotation_instruction = annotation_instruction_template
|
||||
|
||||
content_items.append(
|
||||
TextContentItem(
|
||||
text=f'The above results were retrieved to help answer the user\'s query: "{query}". Use them as supporting information only in answering this query.{citation_instruction}\n',
|
||||
text=context_template.format(
|
||||
query=query, num_chunks=len(search_results), annotation_instruction=annotation_instruction
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -6,10 +6,17 @@
|
|||
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from llama_stack.core.datatypes import VectorStoresConfig
|
||||
|
||||
|
||||
class RagToolRuntimeConfig(BaseModel):
|
||||
vector_stores_config: VectorStoresConfig = Field(
|
||||
default_factory=VectorStoresConfig,
|
||||
description="Configuration for vector store prompt templates and behavior",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def sample_run_config(cls, __distro_dir__: str, **kwargs: Any) -> dict[str, Any]:
|
||||
return {}
|
||||
|
|
|
|||
|
|
@ -221,11 +221,15 @@ class MemoryToolRuntimeImpl(ToolGroupsProtocolPrivate, ToolRuntime):
|
|||
chunks = chunks[: query_config.max_chunks]
|
||||
|
||||
tokens = 0
|
||||
picked: list[InterleavedContentItem] = [
|
||||
TextContentItem(
|
||||
text=f"knowledge_search tool found {len(chunks)} chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
)
|
||||
]
|
||||
|
||||
# Get templates from vector stores config
|
||||
vector_stores_config = self.config.vector_stores_config
|
||||
header_template = vector_stores_config.file_search_params.header_template
|
||||
footer_template = vector_stores_config.file_search_params.footer_template
|
||||
chunk_template = vector_stores_config.context_prompt_params.chunk_annotation_template
|
||||
context_template = vector_stores_config.context_prompt_params.context_template
|
||||
|
||||
picked: list[InterleavedContentItem] = [TextContentItem(text=header_template.format(num_chunks=len(chunks)))]
|
||||
for i, chunk in enumerate(chunks):
|
||||
metadata = chunk.metadata
|
||||
tokens += metadata.get("token_count", 0)
|
||||
|
|
@ -255,13 +259,13 @@ class MemoryToolRuntimeImpl(ToolGroupsProtocolPrivate, ToolRuntime):
|
|||
if k not in metadata_keys_to_exclude_from_context:
|
||||
metadata_for_context[k] = metadata[k]
|
||||
|
||||
text_content = query_config.chunk_template.format(index=i + 1, chunk=chunk, metadata=metadata_for_context)
|
||||
text_content = chunk_template.format(index=i + 1, chunk=chunk, metadata=metadata_for_context)
|
||||
picked.append(TextContentItem(text=text_content))
|
||||
|
||||
picked.append(TextContentItem(text="END of knowledge_search tool results.\n"))
|
||||
picked.append(TextContentItem(text=footer_template))
|
||||
picked.append(
|
||||
TextContentItem(
|
||||
text=f'The above results were retrieved to help answer the user\'s query: "{interleaved_content_as_str(content)}". Use them as supporting information only in answering this query.\n',
|
||||
text=context_template.format(query=interleaved_content_as_str(content), annotation_instruction="")
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -6,3 +6,17 @@
|
|||
|
||||
# Default prompt template for query rewriting in vector search
|
||||
DEFAULT_QUERY_REWRITE_PROMPT = "Expand this query with relevant synonyms and related terms. Return only the improved query, no explanations:\n\n{query}\n\nImproved query:"
|
||||
|
||||
# Default templates for file search tool output formatting
|
||||
DEFAULT_FILE_SEARCH_HEADER_TEMPLATE = (
|
||||
"knowledge_search tool found {num_chunks} chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
)
|
||||
DEFAULT_FILE_SEARCH_FOOTER_TEMPLATE = "END of knowledge_search tool results.\n"
|
||||
|
||||
# Default templates for LLM prompt content and chunk formatting
|
||||
DEFAULT_CHUNK_ANNOTATION_TEMPLATE = "Result {index}\nContent: {chunk.content}\nMetadata: {metadata}\n"
|
||||
DEFAULT_CONTEXT_TEMPLATE = 'The above results were retrieved to help answer the user\'s query: "{query}". Use them as supporting information only in answering this query.{annotation_instruction}\n'
|
||||
|
||||
# Default templates for source annotation and attribution features
|
||||
DEFAULT_ANNOTATION_INSTRUCTION_TEMPLATE = " Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format like 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'. Do not add extra punctuation. Use only the file IDs provided, do not invent new ones."
|
||||
DEFAULT_CHUNK_WITH_SOURCES_TEMPLATE = "[{index}] {metadata_text} cite as <|{file_id}|>\n{chunk_text}\n"
|
||||
|
|
|
|||
2152
tests/integration/responses/recordings/02b32fe03919cf862d4f97088b58b41f62662aab6d30c0af57da645edbbe84b6.json
generated
Normal file
2152
tests/integration/responses/recordings/02b32fe03919cf862d4f97088b58b41f62662aab6d30c0af57da645edbbe84b6.json
generated
Normal file
File diff suppressed because it is too large
Load diff
5076
tests/integration/responses/recordings/02e846d83678946e7b7dd7610496f8989621648e55c78588b033a64040266f49.json
generated
Normal file
5076
tests/integration/responses/recordings/02e846d83678946e7b7dd7610496f8989621648e55c78588b033a64040266f49.json
generated
Normal file
File diff suppressed because it is too large
Load diff
2036
tests/integration/responses/recordings/0652618307804555ec2837e1bd33023c38e865065de8ade26677251fdcb4e456.json
generated
Normal file
2036
tests/integration/responses/recordings/0652618307804555ec2837e1bd33023c38e865065de8ade26677251fdcb4e456.json
generated
Normal file
File diff suppressed because it is too large
Load diff
1712
tests/integration/responses/recordings/0b45257ef8ece1b2ae98dc4f3fb078d4b5e96f823be88e4463707d499e013262.json
generated
Normal file
1712
tests/integration/responses/recordings/0b45257ef8ece1b2ae98dc4f3fb078d4b5e96f823be88e4463707d499e013262.json
generated
Normal file
File diff suppressed because it is too large
Load diff
1780
tests/integration/responses/recordings/17cf0143efe13139537802fe97b1d5e86dafd5e23479728505924440ce9b11d6.json
generated
Normal file
1780
tests/integration/responses/recordings/17cf0143efe13139537802fe97b1d5e86dafd5e23479728505924440ce9b11d6.json
generated
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,631 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_sequential_file_search[client_with_models-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "How many experts does the Llama 4 Maverick model have?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_FzhOmTdZThRndI5rSASPdAqr",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_FzhOmTdZThRndI5rSASPdAqr",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-797509666839, score: 0.019272319661840426, attributes: {'filename': 'test_sequential_file_search.txt', 'chunk_id': '3907d885-d8e7-a72d-1113-f7080454d97c', 'document_id': 'file-797509666839', 'token_count': 19.0, 'metadata_token_count': 11.0} cite as <|file-797509666839|>\nThe Llama 4 Maverick model has 128 experts in its mixture of experts architecture.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format like 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'. Do not add extra punctuation. Use only the file IDs provided, do not invent new ones.\n"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "The Llama 4 Maverick model has 128 experts in its mixture of experts architecture <|file-797509666839|>."
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Can you tell me more about the architecture?"
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"stream_options": {
|
||||
"include_usage": true
|
||||
},
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1e1717665769",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_VtFoA9FzgtwNWqenbX1PKi1w",
|
||||
"function": {
|
||||
"arguments": "",
|
||||
"name": "knowledge_search"
|
||||
},
|
||||
"type": "function"
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "xPf6"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1e1717665769",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "{\"",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "dHP"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1e1717665769",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "query",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "a"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1e1717665769",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "\":\"",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "n"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1e1717665769",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "L",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "ibn6l"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1e1717665769",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "lama",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "Zj"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1e1717665769",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": " ",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "Z1K9G"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1e1717665769",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "4",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "SvheX"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1e1717665769",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": " Maver",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1e1717665769",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "ick",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "Egj"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1e1717665769",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": " model",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1e1717665769",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": " architecture",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "2veLpIXn5"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1e1717665769",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "\"}",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "EF4"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1e1717665769",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "tool_calls",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "Sp28"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1e1717665769",
|
||||
"choices": [],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": {
|
||||
"completion_tokens": 22,
|
||||
"prompt_tokens": 400,
|
||||
"total_tokens": 422,
|
||||
"completion_tokens_details": {
|
||||
"accepted_prediction_tokens": 0,
|
||||
"audio_tokens": 0,
|
||||
"reasoning_tokens": 0,
|
||||
"rejected_prediction_tokens": 0
|
||||
},
|
||||
"prompt_tokens_details": {
|
||||
"audio_tokens": 0,
|
||||
"cached_tokens": 0
|
||||
}
|
||||
},
|
||||
"obfuscation": "rszgcNSsbRR1D"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,763 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_file_search[client_with_models-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768-llama_experts]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "How many experts does the Llama 4 Maverick model have?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_gZXRKN1HMDC16NP9wNPAkP9K",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"Llama 4 Maverick model experts count\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_gZXRKN1HMDC16NP9wNPAkP9K",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-864460993305, score: 0.01141874848523339, attributes: {'filename': 'test_response_non_streaming_file_search.txt', 'chunk_id': '869ae0c0-ab85-ca6f-e5d0-024381443c27', 'document_id': 'file-864460993305', 'token_count': 10.0, 'metadata_token_count': 13.0} cite as <|file-864460993305|>\nLlama 4 Maverick has 128 experts\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model experts count\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format like 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'. Do not add extra punctuation. Use only the file IDs provided, do not invent new ones.\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"stream_options": {
|
||||
"include_usage": true
|
||||
},
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-256215277d28",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "SvaNRTSCreICNt"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-256215277d28",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "The",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "uI5dQOp434E2B"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-256215277d28",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " L",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "S5xCfipngY5Jsu"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-256215277d28",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "lama",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "U8KuOQQeWohQ"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-256215277d28",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "zk2skKNLkFwn1Zw"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-256215277d28",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "4",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "jVclDXFBOVbnhCb"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-256215277d28",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " Maver",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "3CuCCZARj9"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-256215277d28",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "ick",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "6pbQvXnE705dF"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-256215277d28",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " model",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "8oL0LQrbQe"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-256215277d28",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " has",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "DQaAbmXLP8wQ"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-256215277d28",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "AWfDhbEuxBZaxDZ"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-256215277d28",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "128",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "BestZ5aIWFH02"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-256215277d28",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " experts",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "pAbvQSXU"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-256215277d28",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " <",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "Tddmv7Un5cYy1K"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-256215277d28",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "|",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "FEEpu3yMVOhWzyv"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-256215277d28",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "file",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "SJSLMJJF2hpn"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-256215277d28",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "-",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "tM9VcUkheYocNr2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-256215277d28",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "864",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "GEZX6ibeJqJDb"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-256215277d28",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "460",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "WktxNU0pSDQCp"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-256215277d28",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "993",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "n3FX4Ln5LBia0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-256215277d28",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "305",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "eS0MfdTf1582u"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-256215277d28",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "|",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "r1PjmGi346q7Ixf"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-256215277d28",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": ">.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "PgIfwA3juex8OG"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-256215277d28",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "2ncsenZDLu"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-256215277d28",
|
||||
"choices": [],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": {
|
||||
"completion_tokens": 23,
|
||||
"prompt_tokens": 346,
|
||||
"total_tokens": 369,
|
||||
"completion_tokens_details": {
|
||||
"accepted_prediction_tokens": 0,
|
||||
"audio_tokens": 0,
|
||||
"reasoning_tokens": 0,
|
||||
"rejected_prediction_tokens": 0
|
||||
},
|
||||
"prompt_tokens_details": {
|
||||
"audio_tokens": 0,
|
||||
"cached_tokens": 0
|
||||
}
|
||||
},
|
||||
"obfuscation": "xoNg7P6WIVerb"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,668 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_sequential_file_search[openai_client-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "How many experts does the Llama 4 Maverick model have?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_4ac6gxccWFxDvEl8BizY3BJw",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_4ac6gxccWFxDvEl8BizY3BJw",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-528246887823, score: 0.019272319661840426, attributes: {'filename': 'test_sequential_file_search.txt', 'chunk_id': '3907d885-d8e7-a72d-1113-f7080454d97c', 'document_id': 'file-528246887823', 'token_count': 19.0, 'metadata_token_count': 11.0} cite as <|file-528246887823|>\nThe Llama 4 Maverick model has 128 experts in its mixture of experts architecture.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format like 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'. Do not add extra punctuation. Use only the file IDs provided, do not invent new ones.\n"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "The Llama 4 Maverick model has 128 experts in its mixture of experts architecture <|file-528246887823|>."
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Can you tell me more about the architecture?"
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"stream_options": {
|
||||
"include_usage": true
|
||||
},
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-29ab0b407d3d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_de3mxedMBHueFpm2b61KzC57",
|
||||
"function": {
|
||||
"arguments": "",
|
||||
"name": "knowledge_search"
|
||||
},
|
||||
"type": "function"
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "WnRz"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-29ab0b407d3d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "{\"",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "Tdk"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-29ab0b407d3d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "query",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "b"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-29ab0b407d3d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "\":\"",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "5"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-29ab0b407d3d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "L",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "ixufu"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-29ab0b407d3d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "lama",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "JP"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-29ab0b407d3d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": " ",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "vJSE0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-29ab0b407d3d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "4",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "1ps8E"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-29ab0b407d3d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": " Maver",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-29ab0b407d3d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "ick",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "YB0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-29ab0b407d3d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": " model",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-29ab0b407d3d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": " architecture",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "DgrymXGGS"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-29ab0b407d3d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": " details",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "rcnLxZVgmKPeIW"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-29ab0b407d3d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "\"}",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "uu6"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-29ab0b407d3d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "tool_calls",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "Loyb"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-29ab0b407d3d",
|
||||
"choices": [],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": {
|
||||
"completion_tokens": 23,
|
||||
"prompt_tokens": 400,
|
||||
"total_tokens": 423,
|
||||
"completion_tokens_details": {
|
||||
"accepted_prediction_tokens": 0,
|
||||
"audio_tokens": 0,
|
||||
"reasoning_tokens": 0,
|
||||
"rejected_prediction_tokens": 0
|
||||
},
|
||||
"prompt_tokens_details": {
|
||||
"audio_tokens": 0,
|
||||
"cached_tokens": 0
|
||||
}
|
||||
},
|
||||
"obfuscation": "3H5M4ak9Z9li3"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,925 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_sequential_file_search[client_with_models-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "How many experts does the Llama 4 Maverick model have?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_FzhOmTdZThRndI5rSASPdAqr",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_FzhOmTdZThRndI5rSASPdAqr",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-797509666839, score: 0.019272319661840426, attributes: {'filename': 'test_sequential_file_search.txt', 'chunk_id': '3907d885-d8e7-a72d-1113-f7080454d97c', 'document_id': 'file-797509666839', 'token_count': 19.0, 'metadata_token_count': 11.0} cite as <|file-797509666839|>\nThe Llama 4 Maverick model has 128 experts in its mixture of experts architecture.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format like 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'. Do not add extra punctuation. Use only the file IDs provided, do not invent new ones.\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"stream_options": {
|
||||
"include_usage": true
|
||||
},
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "X2XnRCilRPxgou"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "The",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "sNrUscXR9a5ZW"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " L",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "lUIVgc5y4Soo2E"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "lama",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "BLLf5MHrUd3S"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "bFCci6Tu59o9tmD"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "4",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "dVnRcXTaDdrpaMB"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " Maver",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "3r72Ztc1P3"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "ick",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "DuesapCBztIIr"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " model",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "ZUoA9UPcfp"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " has",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "bm7beCvKpP24"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "IhzX818vDFrNi78"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "128",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "lSq0KBU2Y8ZJS"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " experts",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "f9iewxru"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " in",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "nPgE2TSRe4N3i"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " its",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "nyDiyR4g3Bcu"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " mixture",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "nAYDkbgt"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " of",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "ZTB7U1lyTE7gO"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " experts",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "XYAqh8Ah"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " architecture",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "Jnw"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " <",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "OtEY9P1EWChz9k"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "|",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "7bE355cFwUnztmk"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "file",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "okss4UeCv655"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "-",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "0vKQVPNH34N6JPH"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "797",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "lzhiEt8HDEE5F"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "509",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "ZvxUlUnrIYGiP"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "666",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "ylx50KiWqHw95"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "839",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "WbLvgnaEKF5fe"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "|",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "dP1oyCP9PCHRKCF"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": ">.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "tsyjM2ubYy8raG"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "dVYz9fwa19"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2dc865d055f3",
|
||||
"choices": [],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": {
|
||||
"completion_tokens": 29,
|
||||
"prompt_tokens": 355,
|
||||
"total_tokens": 384,
|
||||
"completion_tokens_details": {
|
||||
"accepted_prediction_tokens": 0,
|
||||
"audio_tokens": 0,
|
||||
"reasoning_tokens": 0,
|
||||
"rejected_prediction_tokens": 0
|
||||
},
|
||||
"prompt_tokens_details": {
|
||||
"audio_tokens": 0,
|
||||
"cached_tokens": 0
|
||||
}
|
||||
},
|
||||
"obfuscation": "vdEJGR6vOpTQR"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
2738
tests/integration/responses/recordings/2fa72534cefd5c8baeb7e65efd1e9f6bdb8710a974804a5bc79fd8bca669464f.json
generated
Normal file
2738
tests/integration/responses/recordings/2fa72534cefd5c8baeb7e65efd1e9f6bdb8710a974804a5bc79fd8bca669464f.json
generated
Normal file
File diff suppressed because it is too large
Load diff
1091
tests/integration/responses/recordings/3ad5c85a4df455be7cc724d789dd9b804be74e28c36069860db48e39e36b080c.json
generated
Normal file
1091
tests/integration/responses/recordings/3ad5c85a4df455be7cc724d789dd9b804be74e28c36069860db48e39e36b080c.json
generated
Normal file
File diff suppressed because it is too large
Load diff
1315
tests/integration/responses/recordings/4f46489a9532e9f6c0b8457067383e9deb599d5b51001b6f4c13f32342011b26.json
generated
Normal file
1315
tests/integration/responses/recordings/4f46489a9532e9f6c0b8457067383e9deb599d5b51001b6f4c13f32342011b26.json
generated
Normal file
File diff suppressed because it is too large
Load diff
1060
tests/integration/responses/recordings/556bb5a135769252ddb4232a07a86a2c7c5817e07e150f92a61c84c97b5c3618.json
generated
Normal file
1060
tests/integration/responses/recordings/556bb5a135769252ddb4232a07a86a2c7c5817e07e150f92a61c84c97b5c3618.json
generated
Normal file
File diff suppressed because it is too large
Load diff
1627
tests/integration/responses/recordings/65d67bc8aa00d27af1e9f5ca584e9037a6d27d9426a6e68cc65ff538af8d748d.json
generated
Normal file
1627
tests/integration/responses/recordings/65d67bc8aa00d27af1e9f5ca584e9037a6d27d9426a6e68cc65ff538af8d748d.json
generated
Normal file
File diff suppressed because it is too large
Load diff
2792
tests/integration/responses/recordings/81c896a41bb1ff7107978b7f759e30a76a68f00a023ec0cf4815afeafc83fc79.json
generated
Normal file
2792
tests/integration/responses/recordings/81c896a41bb1ff7107978b7f759e30a76a68f00a023ec0cf4815afeafc83fc79.json
generated
Normal file
File diff suppressed because it is too large
Load diff
2765
tests/integration/responses/recordings/99f6a59ebd00d6fcbf93a7cde22b6ff44f001c5de5a24118d0fa51dc216e1917.json
generated
Normal file
2765
tests/integration/responses/recordings/99f6a59ebd00d6fcbf93a7cde22b6ff44f001c5de5a24118d0fa51dc216e1917.json
generated
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,925 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_sequential_file_search[openai_client-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "How many experts does the Llama 4 Maverick model have?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_4ac6gxccWFxDvEl8BizY3BJw",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_4ac6gxccWFxDvEl8BizY3BJw",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-528246887823, score: 0.019272319661840426, attributes: {'filename': 'test_sequential_file_search.txt', 'chunk_id': '3907d885-d8e7-a72d-1113-f7080454d97c', 'document_id': 'file-528246887823', 'token_count': 19.0, 'metadata_token_count': 11.0} cite as <|file-528246887823|>\nThe Llama 4 Maverick model has 128 experts in its mixture of experts architecture.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format like 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'. Do not add extra punctuation. Use only the file IDs provided, do not invent new ones.\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"stream_options": {
|
||||
"include_usage": true
|
||||
},
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "qxiMbOb2xONvED"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "The",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "G58v6jnZjGOYu"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " L",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "fxvK2E9KzsZ4Qh"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "lama",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "KMh23BEbyBFf"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "khZLqq9dq1JVs50"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "4",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "daMaLef5moqoEc6"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " Maver",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "Cd9EAwdwxo"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "ick",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "I0JQB094ggRj3"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " model",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "ucfxMhve2z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " has",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "m4H3KsvURIcc"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "adn7icPrZiPsOKU"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "128",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "eXJB2DbT4sjvW"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " experts",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "zcFMcygQ"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " in",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "nipPRlDuOEz8K"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " its",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "vnV7BYmJvj04"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " mixture",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "hrijN588"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " of",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "FcjZ5PIO2jswL"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " experts",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "k9tHak4g"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " architecture",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "Mv3"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " <",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "FeQKZfXsh61kLW"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "|",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "N7d96ngH25hy1DD"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "file",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "X78VovNmcAxM"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "-",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "x6AspjfSKgMx1nV"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "528",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "7NZLsXRVCQOtG"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "246",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "9G7Fo1P4gaW9E"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "887",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "XHaRH6aQEwpXx"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "823",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "Ui4dA5C7hyBs3"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "|",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "G3N92kgznwmVeJv"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": ">.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "XB0HKF8UhE71LI"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": null,
|
||||
"obfuscation": "0XQKNcPPlW"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-9b13d4fa1a58",
|
||||
"choices": [],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_83554c687e",
|
||||
"usage": {
|
||||
"completion_tokens": 29,
|
||||
"prompt_tokens": 355,
|
||||
"total_tokens": 384,
|
||||
"completion_tokens_details": {
|
||||
"accepted_prediction_tokens": 0,
|
||||
"audio_tokens": 0,
|
||||
"reasoning_tokens": 0,
|
||||
"rejected_prediction_tokens": 0
|
||||
},
|
||||
"prompt_tokens_details": {
|
||||
"audio_tokens": 0,
|
||||
"cached_tokens": 0
|
||||
}
|
||||
},
|
||||
"obfuscation": "3g8L2kMn2uVi0"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
1645
tests/integration/responses/recordings/a5a92ca10e1cf3901daeb8a2f862f4284013f6472db141a339b3e54ea62b4827.json
generated
Normal file
1645
tests/integration/responses/recordings/a5a92ca10e1cf3901daeb8a2f862f4284013f6472db141a339b3e54ea62b4827.json
generated
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,763 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_file_search[openai_client-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768-llama_experts]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "How many experts does the Llama 4 Maverick model have?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_UKFNZA0eSkL6fZHbs8ygBd5W",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_UKFNZA0eSkL6fZHbs8ygBd5W",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-861837565219, score: 0.015252754664575433, attributes: {'filename': 'test_response_non_streaming_file_search.txt', 'chunk_id': '869ae0c0-ab85-ca6f-e5d0-024381443c27', 'document_id': 'file-861837565219', 'token_count': 10.0, 'metadata_token_count': 13.0} cite as <|file-861837565219|>\nLlama 4 Maverick has 128 experts\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format like 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'. Do not add extra punctuation. Use only the file IDs provided, do not invent new ones.\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"stream_options": {
|
||||
"include_usage": true
|
||||
},
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a85a6b4b83b5",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "6wH8vOctR2svNR"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a85a6b4b83b5",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "The",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "5oz2dxNtl8vK6"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a85a6b4b83b5",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " L",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "GKLiyEQApk31hb"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a85a6b4b83b5",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "lama",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "I9X1EqJkuSbT"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a85a6b4b83b5",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "5ISAGK6ExeKonxW"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a85a6b4b83b5",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "4",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "D5w6XfYvfRb83fN"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a85a6b4b83b5",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " Maver",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "IK92tDE63Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a85a6b4b83b5",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "ick",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "9YxtDVmhxn6PF"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a85a6b4b83b5",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " model",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "rnfyd19aew"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a85a6b4b83b5",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " has",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "ovivqJM70Xwx"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a85a6b4b83b5",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "A4SovCFionNzutJ"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a85a6b4b83b5",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "128",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "huURqdjMt0b6n"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a85a6b4b83b5",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " experts",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "jlM0JVa0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a85a6b4b83b5",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " <",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "Yyble5FrnTFCZk"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a85a6b4b83b5",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "|",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "O0xPhtHpRbYoxrG"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a85a6b4b83b5",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "file",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "N0d49Zf2BV9X"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a85a6b4b83b5",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "-",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "XURNgWoMJ6UVFW4"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a85a6b4b83b5",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "861",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "u889HDzEmluhx"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a85a6b4b83b5",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "837",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "bzhHS40hC5a9O"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a85a6b4b83b5",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "565",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "gOOIEbz760wrK"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a85a6b4b83b5",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "219",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "BxOCPExRyzb1l"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a85a6b4b83b5",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "|",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "YWbtDfSqck38Ean"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a85a6b4b83b5",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": ">.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "ujVxH137evJENw"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a85a6b4b83b5",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": null,
|
||||
"obfuscation": "baBBFRPewx"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a85a6b4b83b5",
|
||||
"choices": [],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_a4d13246c5",
|
||||
"usage": {
|
||||
"completion_tokens": 23,
|
||||
"prompt_tokens": 348,
|
||||
"total_tokens": 371,
|
||||
"completion_tokens_details": {
|
||||
"accepted_prediction_tokens": 0,
|
||||
"audio_tokens": 0,
|
||||
"reasoning_tokens": 0,
|
||||
"rejected_prediction_tokens": 0
|
||||
},
|
||||
"prompt_tokens_details": {
|
||||
"audio_tokens": 0,
|
||||
"cached_tokens": 0
|
||||
}
|
||||
},
|
||||
"obfuscation": "amtwuUWgCoeCm"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
1820
tests/integration/responses/recordings/abdc464d8573d03c671266b602b8bc6f61fb809b4e648f48477ba4cd9c81a216.json
generated
Normal file
1820
tests/integration/responses/recordings/abdc464d8573d03c671266b602b8bc6f61fb809b4e648f48477ba4cd9c81a216.json
generated
Normal file
File diff suppressed because it is too large
Load diff
1091
tests/integration/responses/recordings/eddd68f1cd5c593ed7b6cb4b4b2306df8117cf7c14b3087e9cdea23ceb8c8104.json
generated
Normal file
1091
tests/integration/responses/recordings/eddd68f1cd5c593ed7b6cb4b4b2306df8117cf7c14b3087e9cdea23ceb8c8104.json
generated
Normal file
File diff suppressed because it is too large
Load diff
4455
tests/integration/responses/recordings/fcff32ffee8ecdc771165eff7e82f1ad2d83aec3532ef10c5cfcab2e637ee94c.json
generated
Normal file
4455
tests/integration/responses/recordings/fcff32ffee8ecdc771165eff7e82f1ad2d83aec3532ef10c5cfcab2e637ee94c.json
generated
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,547 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_basic_responses.py::test_include_logprobs_streaming[txt=openai/gpt-4o]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://generativelanguage.googleapis.com/v1beta/openai/v1/models",
|
||||
"headers": {},
|
||||
"body": {},
|
||||
"endpoint": "/v1/models",
|
||||
"model": ""
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/embedding-gecko-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Embedding Gecko"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-pro",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Pro"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-exp",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash Experimental"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash 001"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-exp-image-generation",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash (Image Generation) Experimental"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-lite-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash-Lite 001"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-lite",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash-Lite"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-lite-preview-02-05",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash-Lite Preview 02-05"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-lite-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash-Lite Preview"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-exp-1206",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Experimental 1206"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-preview-tts",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash Preview TTS"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-pro-preview-tts",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Pro Preview TTS"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemma-3-1b-it",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemma 3 1B"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemma-3-4b-it",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemma 3 4B"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemma-3-12b-it",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemma 3 12B"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemma-3-27b-it",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemma 3 27B"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemma-3n-e4b-it",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemma 3n E4B"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemma-3n-e2b-it",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemma 3n E2B"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-flash-latest",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Flash Latest"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-flash-lite-latest",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Flash-Lite Latest"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-pro-latest",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Pro Latest"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-lite",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash-Lite"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-image-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Nano Banana"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-image",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Nano Banana"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-preview-09-2025",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash Preview Sep 2025"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-lite-preview-09-2025",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash-Lite Preview Sep 2025"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-3-pro-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 3 Pro Preview"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-3-pro-image-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Nano Banana Pro"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/nano-banana-pro-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Nano Banana Pro"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-robotics-er-1.5-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Robotics-ER 1.5 Preview"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-computer-use-preview-10-2025",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Computer Use Preview 10-2025"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/deep-research-pro-preview-12-2025",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Deep Research Pro Preview (Dec-12-2025)"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/embedding-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Embedding 001"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/text-embedding-004",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Text Embedding 004"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-embedding-exp-03-07",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Embedding Experimental 03-07"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-embedding-exp",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Embedding Experimental"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-embedding-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Embedding 001"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/aqa",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Model that performs Attributed Question Answering."
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/imagen-4.0-generate-preview-06-06",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Imagen 4 (Preview)"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/imagen-4.0-ultra-generate-preview-06-06",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Imagen 4 Ultra (Preview)"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/imagen-4.0-generate-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Imagen 4"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/imagen-4.0-ultra-generate-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Imagen 4 Ultra"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/imagen-4.0-fast-generate-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Imagen 4 Fast"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/veo-2.0-generate-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Veo 2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/veo-3.0-generate-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Veo 3"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/veo-3.0-fast-generate-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Veo 3 fast"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/veo-3.1-generate-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Veo 3.1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/veo-3.1-fast-generate-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Veo 3.1 fast"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-native-audio-latest",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash Native Audio Latest"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-native-audio-preview-09-2025",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash Native Audio Preview 09-2025"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/lyria-realtime-exp",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Lyria Realtime Experimental"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": false
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,547 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_basic_responses.py::test_include_logprobs_with_function_tools[txt=openai/gpt-4o]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://generativelanguage.googleapis.com/v1beta/openai/v1/models",
|
||||
"headers": {},
|
||||
"body": {},
|
||||
"endpoint": "/v1/models",
|
||||
"model": ""
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/embedding-gecko-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Embedding Gecko"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-pro",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Pro"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-exp",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash Experimental"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash 001"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-exp-image-generation",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash (Image Generation) Experimental"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-lite-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash-Lite 001"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-lite",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash-Lite"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-lite-preview-02-05",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash-Lite Preview 02-05"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-lite-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash-Lite Preview"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-exp-1206",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Experimental 1206"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-preview-tts",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash Preview TTS"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-pro-preview-tts",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Pro Preview TTS"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemma-3-1b-it",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemma 3 1B"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemma-3-4b-it",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemma 3 4B"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemma-3-12b-it",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemma 3 12B"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemma-3-27b-it",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemma 3 27B"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemma-3n-e4b-it",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemma 3n E4B"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemma-3n-e2b-it",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemma 3n E2B"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-flash-latest",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Flash Latest"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-flash-lite-latest",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Flash-Lite Latest"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-pro-latest",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Pro Latest"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-lite",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash-Lite"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-image-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Nano Banana"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-image",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Nano Banana"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-preview-09-2025",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash Preview Sep 2025"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-lite-preview-09-2025",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash-Lite Preview Sep 2025"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-3-pro-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 3 Pro Preview"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-3-pro-image-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Nano Banana Pro"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/nano-banana-pro-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Nano Banana Pro"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-robotics-er-1.5-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Robotics-ER 1.5 Preview"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-computer-use-preview-10-2025",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Computer Use Preview 10-2025"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/deep-research-pro-preview-12-2025",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Deep Research Pro Preview (Dec-12-2025)"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/embedding-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Embedding 001"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/text-embedding-004",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Text Embedding 004"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-embedding-exp-03-07",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Embedding Experimental 03-07"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-embedding-exp",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Embedding Experimental"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-embedding-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Embedding 001"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/aqa",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Model that performs Attributed Question Answering."
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/imagen-4.0-generate-preview-06-06",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Imagen 4 (Preview)"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/imagen-4.0-ultra-generate-preview-06-06",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Imagen 4 Ultra (Preview)"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/imagen-4.0-generate-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Imagen 4"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/imagen-4.0-ultra-generate-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Imagen 4 Ultra"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/imagen-4.0-fast-generate-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Imagen 4 Fast"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/veo-2.0-generate-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Veo 2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/veo-3.0-generate-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Veo 3"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/veo-3.0-fast-generate-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Veo 3 fast"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/veo-3.1-generate-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Veo 3.1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/veo-3.1-fast-generate-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Veo 3.1 fast"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-native-audio-latest",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash Native Audio Latest"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-native-audio-preview-09-2025",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash Native Audio Preview 09-2025"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/lyria-realtime-exp",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Lyria Realtime Experimental"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": false
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,547 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_basic_responses.py::test_include_logprobs_with_web_search[txt=openai/gpt-4o]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://generativelanguage.googleapis.com/v1beta/openai/v1/models",
|
||||
"headers": {},
|
||||
"body": {},
|
||||
"endpoint": "/v1/models",
|
||||
"model": ""
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/embedding-gecko-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Embedding Gecko"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-pro",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Pro"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-exp",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash Experimental"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash 001"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-exp-image-generation",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash (Image Generation) Experimental"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-lite-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash-Lite 001"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-lite",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash-Lite"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-lite-preview-02-05",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash-Lite Preview 02-05"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-lite-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash-Lite Preview"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-exp-1206",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Experimental 1206"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-preview-tts",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash Preview TTS"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-pro-preview-tts",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Pro Preview TTS"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemma-3-1b-it",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemma 3 1B"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemma-3-4b-it",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemma 3 4B"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemma-3-12b-it",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemma 3 12B"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemma-3-27b-it",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemma 3 27B"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemma-3n-e4b-it",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemma 3n E4B"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemma-3n-e2b-it",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemma 3n E2B"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-flash-latest",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Flash Latest"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-flash-lite-latest",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Flash-Lite Latest"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-pro-latest",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Pro Latest"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-lite",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash-Lite"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-image-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Nano Banana"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-image",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Nano Banana"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-preview-09-2025",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash Preview Sep 2025"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-lite-preview-09-2025",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash-Lite Preview Sep 2025"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-3-pro-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 3 Pro Preview"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-3-pro-image-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Nano Banana Pro"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/nano-banana-pro-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Nano Banana Pro"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-robotics-er-1.5-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Robotics-ER 1.5 Preview"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-computer-use-preview-10-2025",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Computer Use Preview 10-2025"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/deep-research-pro-preview-12-2025",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Deep Research Pro Preview (Dec-12-2025)"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/embedding-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Embedding 001"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/text-embedding-004",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Text Embedding 004"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-embedding-exp-03-07",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Embedding Experimental 03-07"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-embedding-exp",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Embedding Experimental"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-embedding-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Embedding 001"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/aqa",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Model that performs Attributed Question Answering."
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/imagen-4.0-generate-preview-06-06",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Imagen 4 (Preview)"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/imagen-4.0-ultra-generate-preview-06-06",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Imagen 4 Ultra (Preview)"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/imagen-4.0-generate-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Imagen 4"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/imagen-4.0-ultra-generate-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Imagen 4 Ultra"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/imagen-4.0-fast-generate-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Imagen 4 Fast"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/veo-2.0-generate-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Veo 2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/veo-3.0-generate-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Veo 3"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/veo-3.0-fast-generate-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Veo 3 fast"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/veo-3.1-generate-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Veo 3.1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/veo-3.1-fast-generate-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Veo 3.1 fast"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-native-audio-latest",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash Native Audio Latest"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-native-audio-preview-09-2025",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash Native Audio Preview 09-2025"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/lyria-realtime-exp",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Lyria Realtime Experimental"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": false
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,547 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_basic_responses.py::test_include_logprobs_non_streaming[txt=openai/gpt-4o]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://generativelanguage.googleapis.com/v1beta/openai/v1/models",
|
||||
"headers": {},
|
||||
"body": {},
|
||||
"endpoint": "/v1/models",
|
||||
"model": ""
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/embedding-gecko-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Embedding Gecko"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-pro",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Pro"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-exp",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash Experimental"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash 001"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-exp-image-generation",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash (Image Generation) Experimental"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-lite-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash-Lite 001"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-lite",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash-Lite"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-lite-preview-02-05",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash-Lite Preview 02-05"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.0-flash-lite-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.0 Flash-Lite Preview"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-exp-1206",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Experimental 1206"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-preview-tts",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash Preview TTS"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-pro-preview-tts",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Pro Preview TTS"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemma-3-1b-it",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemma 3 1B"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemma-3-4b-it",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemma 3 4B"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemma-3-12b-it",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemma 3 12B"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemma-3-27b-it",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemma 3 27B"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemma-3n-e4b-it",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemma 3n E4B"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemma-3n-e2b-it",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemma 3n E2B"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-flash-latest",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Flash Latest"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-flash-lite-latest",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Flash-Lite Latest"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-pro-latest",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Pro Latest"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-lite",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash-Lite"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-image-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Nano Banana"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-image",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Nano Banana"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-preview-09-2025",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash Preview Sep 2025"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-lite-preview-09-2025",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash-Lite Preview Sep 2025"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-3-pro-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 3 Pro Preview"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-3-pro-image-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Nano Banana Pro"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/nano-banana-pro-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Nano Banana Pro"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-robotics-er-1.5-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Robotics-ER 1.5 Preview"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-computer-use-preview-10-2025",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Computer Use Preview 10-2025"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/deep-research-pro-preview-12-2025",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Deep Research Pro Preview (Dec-12-2025)"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/embedding-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Embedding 001"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/text-embedding-004",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Text Embedding 004"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-embedding-exp-03-07",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Embedding Experimental 03-07"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-embedding-exp",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Embedding Experimental"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-embedding-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini Embedding 001"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/aqa",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Model that performs Attributed Question Answering."
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/imagen-4.0-generate-preview-06-06",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Imagen 4 (Preview)"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/imagen-4.0-ultra-generate-preview-06-06",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Imagen 4 Ultra (Preview)"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/imagen-4.0-generate-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Imagen 4"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/imagen-4.0-ultra-generate-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Imagen 4 Ultra"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/imagen-4.0-fast-generate-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Imagen 4 Fast"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/veo-2.0-generate-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Veo 2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/veo-3.0-generate-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Veo 3"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/veo-3.0-fast-generate-001",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Veo 3 fast"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/veo-3.1-generate-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Veo 3.1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/veo-3.1-fast-generate-preview",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Veo 3.1 fast"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-native-audio-latest",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash Native Audio Latest"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/gemini-2.5-flash-native-audio-preview-09-2025",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Gemini 2.5 Flash Native Audio Preview 09-2025"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.model.Model",
|
||||
"__data__": {
|
||||
"id": "models/lyria-realtime-exp",
|
||||
"created": null,
|
||||
"object": "model",
|
||||
"owned_by": "google",
|
||||
"display_name": "Lyria Realtime Experimental"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": false
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -90,14 +90,12 @@ class TestVectorStoresValidation:
|
|||
|
||||
async def test_validate_rewrite_query_prompt_missing_placeholder(self):
|
||||
"""Test validation fails when prompt template is missing {query} placeholder."""
|
||||
config = VectorStoresConfig(
|
||||
rewrite_query_params=RewriteQueryParams(
|
||||
prompt="This prompt has no placeholder",
|
||||
),
|
||||
)
|
||||
from pydantic import ValidationError
|
||||
|
||||
with pytest.raises(ValueError, match="'\\{query\\}' placeholder is required"):
|
||||
await validate_vector_stores_config(config, {})
|
||||
with pytest.raises(ValidationError, match=r"prompt must contain \{query\} placeholder"):
|
||||
RewriteQueryParams(
|
||||
prompt="This prompt has no placeholder",
|
||||
)
|
||||
|
||||
|
||||
class TestSafetyConfigValidation:
|
||||
|
|
|
|||
170
tests/unit/core/test_vector_stores_config.py
Normal file
170
tests/unit/core/test_vector_stores_config.py
Normal file
|
|
@ -0,0 +1,170 @@
|
|||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
|
||||
import pytest
|
||||
from pydantic import ValidationError
|
||||
|
||||
from llama_stack.core.datatypes import QualifiedModel, RewriteQueryParams, VectorStoresConfig
|
||||
|
||||
|
||||
class TestVectorStoresConfigValidation:
|
||||
"""Test validation of VectorStoresConfig prompt templates."""
|
||||
|
||||
def test_default_config_is_valid(self):
|
||||
"""Test that default configuration passes all validation."""
|
||||
config = VectorStoresConfig()
|
||||
|
||||
# Verify all sub-configs exist with valid templates
|
||||
assert config.file_search_params.header_template is not None
|
||||
assert config.context_prompt_params.chunk_annotation_template is not None
|
||||
assert config.annotation_prompt_params.chunk_annotation_template is not None
|
||||
|
||||
# Verify required placeholders are present
|
||||
assert "{num_chunks}" in config.file_search_params.header_template
|
||||
assert "knowledge_search" in config.file_search_params.header_template.lower()
|
||||
assert "{chunk.content}" in config.context_prompt_params.chunk_annotation_template
|
||||
assert "{query}" in config.context_prompt_params.context_template
|
||||
|
||||
def test_template_validation_errors(self):
|
||||
"""Test that templates fail validation for common errors."""
|
||||
from llama_stack.core.datatypes import AnnotationPromptParams, ContextPromptParams, FileSearchParams
|
||||
|
||||
# Empty templates fail
|
||||
with pytest.raises(ValidationError, match="must not be empty"):
|
||||
FileSearchParams(header_template="")
|
||||
|
||||
# Missing required placeholders fail
|
||||
with pytest.raises(ValidationError, match="must contain {num_chunks} placeholder"):
|
||||
FileSearchParams(header_template="search found results")
|
||||
|
||||
with pytest.raises(ValidationError, match="must contain 'knowledge_search' keyword"):
|
||||
FileSearchParams(header_template="search found {num_chunks} results")
|
||||
|
||||
with pytest.raises(ValidationError, match="must contain {chunk.content} placeholder"):
|
||||
ContextPromptParams(chunk_annotation_template="Result {index}: some content")
|
||||
|
||||
with pytest.raises(ValidationError, match="must contain {query} placeholder"):
|
||||
ContextPromptParams(context_template="Retrieved results. Use as context.")
|
||||
|
||||
with pytest.raises(ValidationError, match="must contain {file_id} placeholder"):
|
||||
AnnotationPromptParams(chunk_annotation_template="[{index}] {chunk_text}")
|
||||
|
||||
def test_rewrite_query_params_validation(self):
|
||||
"""Test RewriteQueryParams validation."""
|
||||
model = QualifiedModel(provider_id="test", model_id="test-model")
|
||||
|
||||
# Valid config works
|
||||
valid_params = RewriteQueryParams(
|
||||
model=model, prompt="Expand this query: {query}", max_tokens=100, temperature=0.5
|
||||
)
|
||||
assert valid_params.prompt == "Expand this query: {query}"
|
||||
|
||||
# Invalid configurations fail
|
||||
with pytest.raises(ValidationError, match="prompt must contain \\{query\\} placeholder"):
|
||||
RewriteQueryParams(model=model, prompt="No placeholder here")
|
||||
|
||||
with pytest.raises(ValidationError, match="max_tokens must be positive"):
|
||||
RewriteQueryParams(model=model, max_tokens=0)
|
||||
|
||||
with pytest.raises(ValidationError, match="temperature must be between 0.0 and 2.0"):
|
||||
RewriteQueryParams(model=model, temperature=3.0)
|
||||
|
||||
def test_custom_configuration(self):
|
||||
"""Test complete custom configuration."""
|
||||
from llama_stack.core.datatypes import AnnotationPromptParams, ContextPromptParams, FileSearchParams
|
||||
|
||||
config = VectorStoresConfig(
|
||||
default_provider_id="test-provider",
|
||||
default_embedding_model=QualifiedModel(provider_id="test", model_id="embedding-model"),
|
||||
file_search_params=FileSearchParams(
|
||||
header_template="Custom knowledge_search found {num_chunks} items:\nSTART\n", footer_template="END\n"
|
||||
),
|
||||
context_prompt_params=ContextPromptParams(
|
||||
chunk_annotation_template="Item {index}: {chunk.content} | Meta: {metadata}\n",
|
||||
context_template='Results for "{query}": Use carefully.\n',
|
||||
),
|
||||
annotation_prompt_params=AnnotationPromptParams(
|
||||
enable_annotations=False,
|
||||
annotation_instruction_template=" Custom citation format.",
|
||||
chunk_citation_template="[{index}] {metadata_text} --> {file_id}\n{chunk_text}\n",
|
||||
),
|
||||
)
|
||||
|
||||
assert config.default_provider_id == "test-provider"
|
||||
assert "Custom knowledge_search" in config.file_search_params.header_template
|
||||
assert config.annotation_prompt_params.enable_annotations is False
|
||||
|
||||
|
||||
class TestOptionalArchitecture:
|
||||
"""Test optional sub-config architecture and constants fallback."""
|
||||
|
||||
def test_guaranteed_defaults_behavior(self):
|
||||
"""Test that sub-configs are always instantiated with defaults."""
|
||||
# Sub-configs are always instantiated due to default_factory
|
||||
config = VectorStoresConfig()
|
||||
assert config.file_search_params is not None
|
||||
assert config.context_prompt_params is not None
|
||||
assert config.annotation_prompt_params is not None
|
||||
assert "{num_chunks}" in config.file_search_params.header_template
|
||||
|
||||
def test_guaranteed_defaults_match_constants(self):
|
||||
"""Test that guaranteed defaults match expected constant values."""
|
||||
from llama_stack.providers.utils.memory.constants import (
|
||||
DEFAULT_CONTEXT_TEMPLATE,
|
||||
DEFAULT_FILE_SEARCH_HEADER_TEMPLATE,
|
||||
)
|
||||
|
||||
# Create config with guaranteed defaults
|
||||
config = VectorStoresConfig()
|
||||
|
||||
# Verify defaults match constants
|
||||
header_template = config.file_search_params.header_template
|
||||
context_template = config.context_prompt_params.context_template
|
||||
|
||||
assert header_template == DEFAULT_FILE_SEARCH_HEADER_TEMPLATE
|
||||
assert context_template == DEFAULT_CONTEXT_TEMPLATE
|
||||
|
||||
# Verify templates can be formatted successfully
|
||||
formatted_header = header_template.format(num_chunks=3)
|
||||
assert "3" in formatted_header
|
||||
assert "knowledge_search" in formatted_header.lower()
|
||||
|
||||
formatted_context = context_template.format(
|
||||
query="test query", annotation_instruction=" Cite sources properly."
|
||||
)
|
||||
assert "test query" in formatted_context
|
||||
|
||||
def test_end_to_end_template_usage(self):
|
||||
"""Test that guaranteed defaults lead to working template output."""
|
||||
# Create config with guaranteed defaults
|
||||
config = VectorStoresConfig()
|
||||
|
||||
header_template = config.file_search_params.header_template
|
||||
chunk_template = config.context_prompt_params.chunk_annotation_template
|
||||
|
||||
# Generate realistic output
|
||||
test_chunks = [
|
||||
{"content": "Paris is the capital of France.", "metadata": {"doc": "geo.pdf"}},
|
||||
{"content": "London is the capital of England.", "metadata": {"doc": "cities.txt"}},
|
||||
]
|
||||
|
||||
header_output = header_template.format(num_chunks=len(test_chunks))
|
||||
chunk_outputs = []
|
||||
for i, chunk_data in enumerate(test_chunks):
|
||||
|
||||
class MockChunk:
|
||||
content = chunk_data["content"]
|
||||
|
||||
chunk_output = chunk_template.format(index=i + 1, chunk=MockChunk(), metadata=chunk_data["metadata"])
|
||||
chunk_outputs.append(chunk_output)
|
||||
|
||||
complete_output = header_output + "".join(chunk_outputs)
|
||||
|
||||
# Verify output is substantial and contains expected content
|
||||
assert len(complete_output) > 100
|
||||
assert "knowledge_search" in complete_output.lower()
|
||||
assert "Paris is the capital" in complete_output
|
||||
assert "London is the capital" in complete_output
|
||||
|
|
@ -8,21 +8,26 @@ from unittest.mock import AsyncMock, MagicMock
|
|||
|
||||
import pytest
|
||||
|
||||
from llama_stack.providers.inline.tool_runtime.rag.config import RagToolRuntimeConfig
|
||||
from llama_stack.providers.inline.tool_runtime.rag.memory import MemoryToolRuntimeImpl
|
||||
from llama_stack_api import Chunk, ChunkMetadata, QueryChunksResponse, RAGQueryConfig
|
||||
|
||||
|
||||
class TestRagQuery:
|
||||
async def test_query_raises_on_empty_vector_store_ids(self):
|
||||
config = RagToolRuntimeConfig()
|
||||
rag_tool = MemoryToolRuntimeImpl(
|
||||
config=MagicMock(), vector_io_api=MagicMock(), inference_api=MagicMock(), files_api=MagicMock()
|
||||
config=config, vector_io_api=MagicMock(), inference_api=MagicMock(), files_api=MagicMock()
|
||||
)
|
||||
with pytest.raises(ValueError):
|
||||
await rag_tool.query(content=MagicMock(), vector_store_ids=[])
|
||||
|
||||
async def test_query_chunk_metadata_handling(self):
|
||||
# Create config with default templates
|
||||
config = RagToolRuntimeConfig()
|
||||
|
||||
rag_tool = MemoryToolRuntimeImpl(
|
||||
config=MagicMock(), vector_io_api=MagicMock(), inference_api=MagicMock(), files_api=MagicMock()
|
||||
config=config, vector_io_api=MagicMock(), inference_api=MagicMock(), files_api=MagicMock()
|
||||
)
|
||||
content = "test query content"
|
||||
vector_store_ids = ["db1"]
|
||||
|
|
@ -33,9 +38,8 @@ class TestRagQuery:
|
|||
source="test_source",
|
||||
metadata_token_count=5,
|
||||
)
|
||||
interleaved_content = MagicMock()
|
||||
chunk = Chunk(
|
||||
content=interleaved_content,
|
||||
content="This is test chunk content from document 1",
|
||||
chunk_id="chunk1",
|
||||
metadata={
|
||||
"key1": "value1",
|
||||
|
|
@ -78,8 +82,11 @@ class TestRagQuery:
|
|||
RAGQueryConfig(mode="wrong_mode")
|
||||
|
||||
async def test_query_adds_vector_store_id_to_chunk_metadata(self):
|
||||
# Create config with default templates
|
||||
config = RagToolRuntimeConfig()
|
||||
|
||||
rag_tool = MemoryToolRuntimeImpl(
|
||||
config=MagicMock(),
|
||||
config=config,
|
||||
vector_io_api=MagicMock(),
|
||||
inference_api=MagicMock(),
|
||||
files_api=MagicMock(),
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue