mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-12 20:12:33 +00:00
a lot of simplification finally. all works
This commit is contained in:
parent
b47bf340db
commit
00fd27be1f
39 changed files with 16027 additions and 1969 deletions
|
|
@ -232,14 +232,25 @@ def create_dynamic_typed_route(func: Any, method: str, route: str) -> Callable:
|
|||
|
||||
await log_request_pre_validation(request)
|
||||
|
||||
test_context_token = None
|
||||
|
||||
# Use context manager with both provider data and auth attributes
|
||||
with request_provider_data_context(request.headers, user):
|
||||
if os.environ.get("LLAMA_STACK_TEST_INFERENCE_MODE"):
|
||||
from llama_stack.core.testing_context import (
|
||||
TEST_CONTEXT,
|
||||
reset_test_context,
|
||||
sync_test_context_from_provider_data,
|
||||
)
|
||||
|
||||
test_context_token = sync_test_context_from_provider_data()
|
||||
|
||||
is_streaming = is_streaming_request(func.__name__, request, **kwargs)
|
||||
|
||||
try:
|
||||
if is_streaming:
|
||||
gen = preserve_contexts_async_generator(
|
||||
sse_generator(func(**kwargs)), [CURRENT_TRACE_CONTEXT, PROVIDER_DATA_VAR]
|
||||
sse_generator(func(**kwargs)), [CURRENT_TRACE_CONTEXT, PROVIDER_DATA_VAR, TEST_CONTEXT]
|
||||
)
|
||||
return StreamingResponse(gen, media_type="text/event-stream")
|
||||
else:
|
||||
|
|
@ -258,6 +269,9 @@ def create_dynamic_typed_route(func: Any, method: str, route: str) -> Callable:
|
|||
else:
|
||||
logger.error(f"Error executing endpoint {route=} {method=}: {str(e)}")
|
||||
raise translate_exception(e) from e
|
||||
finally:
|
||||
if test_context_token is not None:
|
||||
reset_test_context(test_context_token)
|
||||
|
||||
sig = inspect.signature(func)
|
||||
|
||||
|
|
|
|||
44
llama_stack/core/testing_context.py
Normal file
44
llama_stack/core/testing_context.py
Normal file
|
|
@ -0,0 +1,44 @@
|
|||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
|
||||
import os
|
||||
from contextvars import ContextVar
|
||||
|
||||
from llama_stack.core.request_headers import PROVIDER_DATA_VAR
|
||||
|
||||
TEST_CONTEXT: ContextVar[str | None] = ContextVar("llama_stack_test_context", default=None)
|
||||
|
||||
|
||||
def get_test_context() -> str | None:
|
||||
return TEST_CONTEXT.get()
|
||||
|
||||
|
||||
def set_test_context(value: str | None):
|
||||
return TEST_CONTEXT.set(value)
|
||||
|
||||
|
||||
def reset_test_context(token) -> None:
|
||||
TEST_CONTEXT.reset(token)
|
||||
|
||||
|
||||
def sync_test_context_from_provider_data():
|
||||
"""Sync test context from provider data when running in server test mode."""
|
||||
if "LLAMA_STACK_TEST_INFERENCE_MODE" not in os.environ:
|
||||
return None
|
||||
|
||||
stack_config_type = os.environ.get("LLAMA_STACK_TEST_STACK_CONFIG_TYPE", "library_client")
|
||||
if stack_config_type != "server":
|
||||
return None
|
||||
|
||||
try:
|
||||
provider_data = PROVIDER_DATA_VAR.get()
|
||||
except LookupError:
|
||||
provider_data = None
|
||||
|
||||
if provider_data and "__test_id" in provider_data:
|
||||
return TEST_CONTEXT.set(provider_data["__test_id"])
|
||||
|
||||
return None
|
||||
|
|
@ -9,7 +9,6 @@ from __future__ import annotations # for forward references
|
|||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
from collections.abc import Callable, Generator
|
||||
from contextlib import contextmanager
|
||||
from enum import StrEnum
|
||||
|
|
@ -35,12 +34,10 @@ _original_methods: dict[str, Any] = {}
|
|||
_id_counters: dict[str, dict[str, int]] = {}
|
||||
|
||||
# Test context uses ContextVar since it changes per-test and needs async isolation
|
||||
from contextvars import ContextVar
|
||||
|
||||
_test_context: ContextVar[str | None] = ContextVar("_test_context", default=None)
|
||||
|
||||
from openai.types.completion_choice import CompletionChoice
|
||||
|
||||
from llama_stack.core.testing_context import get_test_context
|
||||
|
||||
# update the "finish_reason" field, since its type definition is wrong (no None is accepted)
|
||||
CompletionChoice.model_fields["finish_reason"].annotation = Literal["stop", "length", "content_filter"] | None
|
||||
CompletionChoice.model_rebuild()
|
||||
|
|
@ -60,6 +57,7 @@ _ID_KIND_PREFIXES: dict[str, str] = {
|
|||
"file": "file-",
|
||||
"vector_store": "vs_",
|
||||
"vector_store_file_batch": "batch_",
|
||||
"tool_call": "call_",
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -68,176 +66,32 @@ def _allocate_test_scoped_id(kind: str) -> str | None:
|
|||
|
||||
global _id_counters
|
||||
|
||||
test_id = _test_context.get()
|
||||
test_id = get_test_context()
|
||||
prefix = _ID_KIND_PREFIXES.get(kind)
|
||||
|
||||
if prefix is None:
|
||||
return None
|
||||
|
||||
key = test_id or "__global__"
|
||||
if not test_id:
|
||||
raise ValueError(f"Test ID is required for {kind} ID allocation")
|
||||
|
||||
key = test_id
|
||||
if key not in _id_counters:
|
||||
_id_counters[key] = {}
|
||||
|
||||
counter = _id_counters[key].get(kind, 0) + 1
|
||||
# each test should get a contiguous block of IDs otherwise we will get
|
||||
# collisions between tests inside other systems (like file storage) which
|
||||
# expect IDs to be unique
|
||||
test_hash = hashlib.sha256(test_id.encode()).hexdigest()
|
||||
test_hash_int = int(test_hash, 16)
|
||||
counter = test_hash_int % 1000000000000
|
||||
|
||||
counter = _id_counters[key].get(kind, counter) + 1
|
||||
_id_counters[key][kind] = counter
|
||||
|
||||
return f"{prefix}{counter}"
|
||||
|
||||
|
||||
class _IdCanonicalizer:
|
||||
PATTERN = re.compile(r"(file-[A-Za-z0-9_-]+|vs_[A-Za-z0-9_-]+|batch_[A-Za-z0-9_-]+)")
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._mappings: dict[str, dict[str, str]] = {kind: {} for kind in _ID_KIND_PREFIXES}
|
||||
self._counters: dict[str, int] = dict.fromkeys(_ID_KIND_PREFIXES, 0)
|
||||
|
||||
def canonicalize(self, obj: Any) -> Any:
|
||||
if isinstance(obj, dict):
|
||||
return {k: self._canonicalize_value(k, v) for k, v in obj.items()}
|
||||
if isinstance(obj, list):
|
||||
return [self.canonicalize(item) for item in obj]
|
||||
if isinstance(obj, str):
|
||||
return self._canonicalize_string(obj)
|
||||
return obj
|
||||
|
||||
def _canonicalize_value(self, key: str, value: Any) -> Any:
|
||||
if key in {"vector_db_id", "vector_store_id", "bank_id"} and isinstance(value, str):
|
||||
return self._canonicalize_string(value)
|
||||
if key == "document_id" and isinstance(value, str) and value.startswith("file-"):
|
||||
return self._canonicalize_string(value)
|
||||
return self.canonicalize(value)
|
||||
|
||||
def _canonicalize_string(self, value: str) -> str:
|
||||
def replace(match: re.Match[str]) -> str:
|
||||
token = match.group(0)
|
||||
if token.startswith("file-"):
|
||||
return self._mapped_value("file", token)
|
||||
if token.startswith("vs_"):
|
||||
return self._mapped_value("vector_store", token)
|
||||
if token.startswith("batch_"):
|
||||
return self._mapped_value("vector_store_file_batch", token)
|
||||
return token
|
||||
|
||||
return self.PATTERN.sub(replace, value)
|
||||
|
||||
def _mapped_value(self, kind: str, original: str) -> str:
|
||||
mapping = self._mappings[kind]
|
||||
if original not in mapping:
|
||||
self._counters[kind] += 1
|
||||
mapping[original] = f"{_ID_KIND_PREFIXES[kind]}{self._counters[kind]}"
|
||||
return mapping[original]
|
||||
|
||||
|
||||
def _canonicalize_for_hashing(obj: Any) -> Any:
|
||||
canonicalizer = _IdCanonicalizer()
|
||||
return canonicalizer.canonicalize(obj)
|
||||
|
||||
|
||||
def _chunk_text_content(chunk: Any) -> tuple[str | None, bool]:
|
||||
"""Return (content, has_structured_fields) for OpenAI chat completion chunks."""
|
||||
choices = getattr(chunk, "choices", None)
|
||||
if not choices:
|
||||
return None, False
|
||||
|
||||
delta = choices[0].delta
|
||||
content = getattr(delta, "content", None)
|
||||
if not content:
|
||||
return None, False
|
||||
|
||||
has_structured = bool(getattr(delta, "tool_calls", None) or getattr(delta, "function_call", None))
|
||||
return content, has_structured
|
||||
|
||||
|
||||
def _chunk_with_content(chunk: Any, content: str) -> Any:
|
||||
"""Return a copy of the chunk with delta.content replaced by the provided string."""
|
||||
choices = getattr(chunk, "choices", None)
|
||||
if not choices:
|
||||
return chunk
|
||||
|
||||
updated_choices = []
|
||||
for choice in choices:
|
||||
delta = choice.delta
|
||||
if getattr(delta, "content", None) is not None:
|
||||
new_delta = delta.model_copy(update={"content": content})
|
||||
updated_choices.append(choice.model_copy(update={"delta": new_delta}))
|
||||
else:
|
||||
updated_choices.append(choice)
|
||||
|
||||
return chunk.model_copy(update={"choices": updated_choices})
|
||||
|
||||
|
||||
def _ends_with_partial_identifier(text: str) -> bool:
|
||||
"""Return True if text ends in an incomplete file identifier."""
|
||||
match = re.search(r"(?:<\|)?file-[A-Za-z0-9_-]*$", text)
|
||||
if not match:
|
||||
return False
|
||||
|
||||
token = match.group()
|
||||
enclosed = token.startswith("<|")
|
||||
if enclosed and not token.endswith("|>"):
|
||||
return True
|
||||
|
||||
if enclosed:
|
||||
core = token[2:-2] if token.endswith("|>") else token[2:]
|
||||
else:
|
||||
core = token
|
||||
|
||||
suffix = core[len("file-") :]
|
||||
if len(suffix) < 16:
|
||||
return True
|
||||
if not re.fullmatch(r"[A-Za-z0-9_-]+", suffix):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _has_safe_boundary(text: str) -> bool:
|
||||
if not text:
|
||||
return False
|
||||
|
||||
last_char = text[-1]
|
||||
if last_char.isspace():
|
||||
return True
|
||||
|
||||
return last_char in ".,?!;:)]}>\"'"
|
||||
|
||||
|
||||
def _coalesce_streaming_chunks(chunks: list[Any]) -> list[Any]:
|
||||
"""Merge adjacent text chunks to avoid breaking identifiers across boundaries."""
|
||||
result: list[Any] = []
|
||||
pending_chunk: Any | None = None
|
||||
pending_content = ""
|
||||
|
||||
for chunk in chunks:
|
||||
content, has_structured = _chunk_text_content(chunk)
|
||||
|
||||
if content is None or has_structured:
|
||||
if pending_chunk is not None:
|
||||
result.append(_chunk_with_content(pending_chunk, pending_content))
|
||||
pending_chunk = None
|
||||
pending_content = ""
|
||||
|
||||
result.append(chunk)
|
||||
continue
|
||||
|
||||
if pending_chunk is None:
|
||||
pending_chunk = chunk
|
||||
pending_content = content
|
||||
else:
|
||||
pending_content += content
|
||||
|
||||
if (not _ends_with_partial_identifier(pending_content)) and _has_safe_boundary(pending_content):
|
||||
result.append(_chunk_with_content(pending_chunk, pending_content))
|
||||
pending_chunk = None
|
||||
pending_content = ""
|
||||
|
||||
if pending_chunk is not None:
|
||||
result.append(_chunk_with_content(pending_chunk, pending_content))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _deterministic_id_override(kind: str, factory: Callable[[], str]) -> str:
|
||||
deterministic_id = _allocate_test_scoped_id(kind)
|
||||
if deterministic_id is not None:
|
||||
|
|
@ -262,12 +116,12 @@ def normalize_inference_request(method: str, url: str, headers: dict[str, Any],
|
|||
normalized: dict[str, Any] = {
|
||||
"method": method.upper(),
|
||||
"endpoint": parsed.path,
|
||||
"body": _canonicalize_for_hashing(body),
|
||||
"body": body,
|
||||
}
|
||||
|
||||
# Include test_id for isolation, except for shared infrastructure endpoints
|
||||
if parsed.path not in ("/api/tags", "/v1/models"):
|
||||
normalized["test_id"] = _test_context.get()
|
||||
normalized["test_id"] = get_test_context()
|
||||
|
||||
# Create hash - sort_keys=True ensures deterministic ordering
|
||||
normalized_json = json.dumps(normalized, sort_keys=True)
|
||||
|
|
@ -279,7 +133,7 @@ def normalize_tool_request(provider_name: str, tool_name: str, kwargs: dict[str,
|
|||
normalized = {
|
||||
"provider": provider_name,
|
||||
"tool_name": tool_name,
|
||||
"kwargs": _canonicalize_for_hashing(kwargs),
|
||||
"kwargs": kwargs,
|
||||
}
|
||||
|
||||
# Create hash - sort_keys=True ensures deterministic ordering
|
||||
|
|
@ -287,33 +141,6 @@ def normalize_tool_request(provider_name: str, tool_name: str, kwargs: dict[str,
|
|||
return hashlib.sha256(normalized_json.encode()).hexdigest()
|
||||
|
||||
|
||||
def _sync_test_context_from_provider_data():
|
||||
"""In server mode, sync test ID from provider_data to _test_context.
|
||||
|
||||
This ensures that storage operations (which read from _test_context) work correctly
|
||||
in server mode where the test ID arrives via HTTP header → provider_data.
|
||||
|
||||
Returns a token to reset _test_context, or None if no sync was needed.
|
||||
"""
|
||||
stack_config_type = os.environ.get("LLAMA_STACK_TEST_STACK_CONFIG_TYPE", "library_client")
|
||||
|
||||
if stack_config_type != "server":
|
||||
return None
|
||||
|
||||
try:
|
||||
from llama_stack.core.request_headers import PROVIDER_DATA_VAR
|
||||
|
||||
provider_data = PROVIDER_DATA_VAR.get()
|
||||
|
||||
if provider_data and "__test_id" in provider_data:
|
||||
test_id = provider_data["__test_id"]
|
||||
return _test_context.set(test_id)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def patch_httpx_for_test_id():
|
||||
"""Patch client _prepare_request methods to inject test ID into provider data header.
|
||||
|
||||
|
|
@ -335,13 +162,12 @@ def patch_httpx_for_test_id():
|
|||
def patched_prepare_request(self, request):
|
||||
# Call original first (it's a sync method that returns None)
|
||||
# Determine which original to call based on client type
|
||||
if "llama_stack_client" in self.__class__.__module__:
|
||||
_original_methods["llama_stack_client_prepare_request"](self, request)
|
||||
_original_methods["openai_prepare_request"](self, request)
|
||||
_original_methods["llama_stack_client_prepare_request"](self, request)
|
||||
_original_methods["openai_prepare_request"](self, request)
|
||||
|
||||
# Only inject test ID in server mode
|
||||
stack_config_type = os.environ.get("LLAMA_STACK_TEST_STACK_CONFIG_TYPE", "library_client")
|
||||
test_id = _test_context.get()
|
||||
test_id = get_test_context()
|
||||
|
||||
if stack_config_type == "server" and test_id:
|
||||
provider_data_header = request.headers.get("X-LlamaStack-Provider-Data")
|
||||
|
|
@ -482,8 +308,6 @@ class ResponseStorage:
|
|||
def __init__(self, base_dir: Path):
|
||||
self.base_dir = base_dir
|
||||
# Don't create responses_dir here - determine it per-test at runtime
|
||||
self._legacy_index: dict[str, Path] = {}
|
||||
self._scanned_dirs: set[Path] = set()
|
||||
|
||||
def _get_test_dir(self) -> Path:
|
||||
"""Get the recordings directory in the test file's parent directory.
|
||||
|
|
@ -491,7 +315,7 @@ class ResponseStorage:
|
|||
For test at "tests/integration/inference/test_foo.py::test_bar",
|
||||
returns "tests/integration/inference/recordings/".
|
||||
"""
|
||||
test_id = _test_context.get()
|
||||
test_id = get_test_context()
|
||||
if test_id:
|
||||
# Extract the directory path from the test nodeid
|
||||
# e.g., "tests/integration/inference/test_basic.py::test_foo[params]"
|
||||
|
|
@ -506,7 +330,7 @@ class ResponseStorage:
|
|||
# Fallback for non-test contexts
|
||||
return self.base_dir / "recordings"
|
||||
|
||||
def _ensure_directories(self):
|
||||
def _ensure_directory(self):
|
||||
"""Ensure test-specific directories exist."""
|
||||
test_dir = self._get_test_dir()
|
||||
test_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
|
@ -514,7 +338,7 @@ class ResponseStorage:
|
|||
|
||||
def store_recording(self, request_hash: str, request: dict[str, Any], response: dict[str, Any]):
|
||||
"""Store a request/response pair."""
|
||||
responses_dir = self._ensure_directories()
|
||||
responses_dir = self._ensure_directory()
|
||||
|
||||
# Use FULL hash (not truncated)
|
||||
response_file = f"{request_hash}.json"
|
||||
|
|
@ -543,7 +367,7 @@ class ResponseStorage:
|
|||
with open(response_path, "w") as f:
|
||||
json.dump(
|
||||
{
|
||||
"test_id": _test_context.get(),
|
||||
"test_id": get_test_context(),
|
||||
"request": request,
|
||||
"response": serialized_response,
|
||||
"id_normalization_mapping": {},
|
||||
|
|
@ -554,8 +378,6 @@ class ResponseStorage:
|
|||
f.write("\n")
|
||||
f.flush()
|
||||
|
||||
self._legacy_index[request_hash] = response_path
|
||||
|
||||
def find_recording(self, request_hash: str) -> dict[str, Any] | None:
|
||||
"""Find a recorded response by request hash.
|
||||
|
||||
|
|
@ -579,52 +401,6 @@ class ResponseStorage:
|
|||
if fallback_path.exists():
|
||||
return _recording_from_file(fallback_path)
|
||||
|
||||
return self._find_in_legacy_index(request_hash, [test_dir, fallback_dir])
|
||||
|
||||
def _find_in_legacy_index(self, request_hash: str, directories: list[Path]) -> dict[str, Any] | None:
|
||||
for directory in directories:
|
||||
if not directory.exists() or directory in self._scanned_dirs:
|
||||
continue
|
||||
|
||||
for path in directory.glob("*.json"):
|
||||
try:
|
||||
with open(path) as f:
|
||||
data = json.load(f)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
request = data.get("request")
|
||||
if not request:
|
||||
continue
|
||||
|
||||
body = request.get("body")
|
||||
canonical_body = _canonicalize_for_hashing(body) if isinstance(body, dict | list) else body
|
||||
|
||||
token = None
|
||||
test_id = data.get("test_id")
|
||||
if test_id:
|
||||
token = _test_context.set(test_id)
|
||||
|
||||
try:
|
||||
legacy_hash = normalize_inference_request(
|
||||
request.get("method", ""),
|
||||
request.get("url", ""),
|
||||
request.get("headers", {}),
|
||||
canonical_body,
|
||||
)
|
||||
finally:
|
||||
if token is not None:
|
||||
_test_context.reset(token)
|
||||
|
||||
if legacy_hash not in self._legacy_index:
|
||||
self._legacy_index[legacy_hash] = path
|
||||
|
||||
self._scanned_dirs.add(directory)
|
||||
|
||||
legacy_path = self._legacy_index.get(request_hash)
|
||||
if legacy_path and legacy_path.exists():
|
||||
return _recording_from_file(legacy_path)
|
||||
|
||||
return None
|
||||
|
||||
def _model_list_responses(self, request_hash: str) -> list[dict[str, Any]]:
|
||||
|
|
@ -740,46 +516,38 @@ async def _patched_tool_invoke_method(
|
|||
# Normal operation
|
||||
return await original_method(self, tool_name, kwargs)
|
||||
|
||||
# In server mode, sync test ID from provider_data to _test_context for storage operations
|
||||
test_context_token = _sync_test_context_from_provider_data()
|
||||
request_hash = normalize_tool_request(provider_name, tool_name, kwargs)
|
||||
|
||||
try:
|
||||
request_hash = normalize_tool_request(provider_name, tool_name, kwargs)
|
||||
if _current_mode in (APIRecordingMode.REPLAY, APIRecordingMode.RECORD_IF_MISSING):
|
||||
recording = _current_storage.find_recording(request_hash)
|
||||
if recording:
|
||||
return recording["response"]["body"]
|
||||
elif _current_mode == APIRecordingMode.REPLAY:
|
||||
raise RuntimeError(
|
||||
f"No recorded tool result found for {provider_name}.{tool_name}\n"
|
||||
f"Request: {kwargs}\n"
|
||||
f"To record this response, run with LLAMA_STACK_TEST_INFERENCE_MODE=record"
|
||||
)
|
||||
# If RECORD_IF_MISSING and no recording found, fall through to record
|
||||
|
||||
if _current_mode in (APIRecordingMode.REPLAY, APIRecordingMode.RECORD_IF_MISSING):
|
||||
recording = _current_storage.find_recording(request_hash)
|
||||
if recording:
|
||||
return recording["response"]["body"]
|
||||
elif _current_mode == APIRecordingMode.REPLAY:
|
||||
raise RuntimeError(
|
||||
f"No recorded tool result found for {provider_name}.{tool_name}\n"
|
||||
f"Request: {kwargs}\n"
|
||||
f"To record this response, run with LLAMA_STACK_TEST_INFERENCE_MODE=record"
|
||||
)
|
||||
# If RECORD_IF_MISSING and no recording found, fall through to record
|
||||
if _current_mode in (APIRecordingMode.RECORD, APIRecordingMode.RECORD_IF_MISSING):
|
||||
# Make the tool call and record it
|
||||
result = await original_method(self, tool_name, kwargs)
|
||||
|
||||
if _current_mode in (APIRecordingMode.RECORD, APIRecordingMode.RECORD_IF_MISSING):
|
||||
# Make the tool call and record it
|
||||
result = await original_method(self, tool_name, kwargs)
|
||||
request_data = {
|
||||
"test_id": get_test_context(),
|
||||
"provider": provider_name,
|
||||
"tool_name": tool_name,
|
||||
"kwargs": kwargs,
|
||||
}
|
||||
response_data = {"body": result, "is_streaming": False}
|
||||
|
||||
request_data = {
|
||||
"test_id": _test_context.get(),
|
||||
"provider": provider_name,
|
||||
"tool_name": tool_name,
|
||||
"kwargs": kwargs,
|
||||
}
|
||||
response_data = {"body": result, "is_streaming": False}
|
||||
# Store the recording
|
||||
_current_storage.store_recording(request_hash, request_data, response_data)
|
||||
return result
|
||||
|
||||
# Store the recording
|
||||
_current_storage.store_recording(request_hash, request_data, response_data)
|
||||
return result
|
||||
|
||||
else:
|
||||
raise AssertionError(f"Invalid mode: {_current_mode}")
|
||||
finally:
|
||||
# Reset test context if we set it in server mode
|
||||
if test_context_token is not None:
|
||||
_test_context.reset(test_context_token)
|
||||
else:
|
||||
raise AssertionError(f"Invalid mode: {_current_mode}")
|
||||
|
||||
|
||||
async def _patched_inference_method(original_method, self, client_type, endpoint, *args, **kwargs):
|
||||
|
|
@ -794,120 +562,108 @@ async def _patched_inference_method(original_method, self, client_type, endpoint
|
|||
else:
|
||||
return await original_method(self, *args, **kwargs)
|
||||
|
||||
# In server mode, sync test ID from provider_data to _test_context for storage operations
|
||||
test_context_token = _sync_test_context_from_provider_data()
|
||||
# Get base URL based on client type
|
||||
if client_type == "openai":
|
||||
base_url = str(self._client.base_url)
|
||||
|
||||
try:
|
||||
# Get base URL based on client type
|
||||
if client_type == "openai":
|
||||
base_url = str(self._client.base_url)
|
||||
# the OpenAI client methods may pass NOT_GIVEN for unset parameters; filter these out
|
||||
kwargs = {k: v for k, v in kwargs.items() if v is not NOT_GIVEN}
|
||||
elif client_type == "ollama":
|
||||
# Get base URL from the client (Ollama client uses host attribute)
|
||||
base_url = getattr(self, "host", "http://localhost:11434")
|
||||
if not base_url.startswith("http"):
|
||||
base_url = f"http://{base_url}"
|
||||
else:
|
||||
raise ValueError(f"Unknown client type: {client_type}")
|
||||
|
||||
# the OpenAI client methods may pass NOT_GIVEN for unset parameters; filter these out
|
||||
kwargs = {k: v for k, v in kwargs.items() if v is not NOT_GIVEN}
|
||||
elif client_type == "ollama":
|
||||
# Get base URL from the client (Ollama client uses host attribute)
|
||||
base_url = getattr(self, "host", "http://localhost:11434")
|
||||
if not base_url.startswith("http"):
|
||||
base_url = f"http://{base_url}"
|
||||
url = base_url.rstrip("/") + endpoint
|
||||
# Special handling for Databricks URLs to avoid leaking workspace info
|
||||
# e.g. https://adb-1234567890123456.7.cloud.databricks.com -> https://...cloud.databricks.com
|
||||
if "cloud.databricks.com" in url:
|
||||
url = "__databricks__" + url.split("cloud.databricks.com")[-1]
|
||||
method = "POST"
|
||||
headers = {}
|
||||
body = kwargs
|
||||
|
||||
request_hash = normalize_inference_request(method, url, headers, body)
|
||||
|
||||
# Try to find existing recording for REPLAY or RECORD_IF_MISSING modes
|
||||
recording = None
|
||||
if mode == APIRecordingMode.REPLAY or mode == APIRecordingMode.RECORD_IF_MISSING:
|
||||
# Special handling for model-list endpoints: merge all recordings with this hash
|
||||
if endpoint in ("/api/tags", "/v1/models"):
|
||||
records = storage._model_list_responses(request_hash)
|
||||
recording = _combine_model_list_responses(endpoint, records)
|
||||
else:
|
||||
raise ValueError(f"Unknown client type: {client_type}")
|
||||
recording = storage.find_recording(request_hash)
|
||||
|
||||
url = base_url.rstrip("/") + endpoint
|
||||
# Special handling for Databricks URLs to avoid leaking workspace info
|
||||
# e.g. https://adb-1234567890123456.7.cloud.databricks.com -> https://...cloud.databricks.com
|
||||
if "cloud.databricks.com" in url:
|
||||
url = "__databricks__" + url.split("cloud.databricks.com")[-1]
|
||||
method = "POST"
|
||||
headers = {}
|
||||
body = kwargs
|
||||
if recording:
|
||||
response_body = recording["response"]["body"]
|
||||
|
||||
request_hash = normalize_inference_request(method, url, headers, body)
|
||||
if recording["response"].get("is_streaming", False):
|
||||
|
||||
# Try to find existing recording for REPLAY or RECORD_IF_MISSING modes
|
||||
recording = None
|
||||
if mode == APIRecordingMode.REPLAY or mode == APIRecordingMode.RECORD_IF_MISSING:
|
||||
# Special handling for model-list endpoints: merge all recordings with this hash
|
||||
if endpoint in ("/api/tags", "/v1/models"):
|
||||
records = storage._model_list_responses(request_hash)
|
||||
recording = _combine_model_list_responses(endpoint, records)
|
||||
else:
|
||||
recording = storage.find_recording(request_hash)
|
||||
|
||||
if recording:
|
||||
response_body = recording["response"]["body"]
|
||||
|
||||
if recording["response"].get("is_streaming", False) and isinstance(response_body, list):
|
||||
response_body = _coalesce_streaming_chunks(response_body)
|
||||
|
||||
if recording["response"].get("is_streaming", False):
|
||||
|
||||
async def replay_stream():
|
||||
for chunk in response_body:
|
||||
yield chunk
|
||||
|
||||
return replay_stream()
|
||||
else:
|
||||
return response_body
|
||||
elif mode == APIRecordingMode.REPLAY:
|
||||
# REPLAY mode requires recording to exist
|
||||
raise RuntimeError(
|
||||
f"No recorded response found for request hash: {request_hash}\n"
|
||||
f"Request: {method} {url} {body}\n"
|
||||
f"Model: {body.get('model', 'unknown')}\n"
|
||||
f"To record this response, run with LLAMA_STACK_TEST_INFERENCE_MODE=record"
|
||||
)
|
||||
|
||||
if mode == APIRecordingMode.RECORD or (mode == APIRecordingMode.RECORD_IF_MISSING and not recording):
|
||||
if endpoint == "/v1/models":
|
||||
response = original_method(self, *args, **kwargs)
|
||||
else:
|
||||
response = await original_method(self, *args, **kwargs)
|
||||
|
||||
# we want to store the result of the iterator, not the iterator itself
|
||||
if endpoint == "/v1/models":
|
||||
response = [m async for m in response]
|
||||
|
||||
request_data = {
|
||||
"method": method,
|
||||
"url": url,
|
||||
"headers": headers,
|
||||
"body": body,
|
||||
"endpoint": endpoint,
|
||||
"model": body.get("model", ""),
|
||||
}
|
||||
|
||||
# Determine if this is a streaming request based on request parameters
|
||||
is_streaming = body.get("stream", False)
|
||||
|
||||
if is_streaming:
|
||||
# For streaming responses, we need to collect all chunks immediately before yielding
|
||||
# This ensures the recording is saved even if the generator isn't fully consumed
|
||||
raw_chunks: list[Any] = []
|
||||
async for chunk in response:
|
||||
raw_chunks.append(chunk)
|
||||
|
||||
chunks = _coalesce_streaming_chunks(raw_chunks)
|
||||
|
||||
# Store the recording immediately
|
||||
response_data = {"body": chunks, "is_streaming": True}
|
||||
storage.store_recording(request_hash, request_data, response_data)
|
||||
|
||||
# Return a generator that replays the stored chunks
|
||||
async def replay_recorded_stream():
|
||||
for chunk in chunks:
|
||||
async def replay_stream():
|
||||
for chunk in response_body:
|
||||
yield chunk
|
||||
|
||||
return replay_recorded_stream()
|
||||
return replay_stream()
|
||||
else:
|
||||
response_data = {"body": response, "is_streaming": False}
|
||||
storage.store_recording(request_hash, request_data, response_data)
|
||||
return response
|
||||
return response_body
|
||||
elif mode == APIRecordingMode.REPLAY:
|
||||
# REPLAY mode requires recording to exist
|
||||
raise RuntimeError(
|
||||
f"No recorded response found for request hash: {request_hash}\n"
|
||||
f"Request: {method} {url} {body}\n"
|
||||
f"Model: {body.get('model', 'unknown')}\n"
|
||||
f"To record this response, run with LLAMA_STACK_TEST_INFERENCE_MODE=record"
|
||||
)
|
||||
|
||||
if mode == APIRecordingMode.RECORD or (mode == APIRecordingMode.RECORD_IF_MISSING and not recording):
|
||||
if endpoint == "/v1/models":
|
||||
response = original_method(self, *args, **kwargs)
|
||||
else:
|
||||
raise AssertionError(f"Invalid mode: {mode}")
|
||||
finally:
|
||||
if test_context_token:
|
||||
_test_context.reset(test_context_token)
|
||||
response = await original_method(self, *args, **kwargs)
|
||||
|
||||
# we want to store the result of the iterator, not the iterator itself
|
||||
if endpoint == "/v1/models":
|
||||
response = [m async for m in response]
|
||||
|
||||
request_data = {
|
||||
"method": method,
|
||||
"url": url,
|
||||
"headers": headers,
|
||||
"body": body,
|
||||
"endpoint": endpoint,
|
||||
"model": body.get("model", ""),
|
||||
}
|
||||
|
||||
# Determine if this is a streaming request based on request parameters
|
||||
is_streaming = body.get("stream", False)
|
||||
|
||||
if is_streaming:
|
||||
# For streaming responses, we need to collect all chunks immediately before yielding
|
||||
# This ensures the recording is saved even if the generator isn't fully consumed
|
||||
chunks: list[Any] = []
|
||||
async for chunk in response:
|
||||
chunks.append(chunk)
|
||||
|
||||
# Store the recording immediately
|
||||
response_data = {"body": chunks, "is_streaming": True}
|
||||
storage.store_recording(request_hash, request_data, response_data)
|
||||
|
||||
# Return a generator that replays the stored chunks
|
||||
async def replay_recorded_stream():
|
||||
for chunk in chunks:
|
||||
yield chunk
|
||||
|
||||
return replay_recorded_stream()
|
||||
else:
|
||||
response_data = {"body": response, "is_streaming": False}
|
||||
storage.store_recording(request_hash, request_data, response_data)
|
||||
return response
|
||||
|
||||
else:
|
||||
raise AssertionError(f"Invalid mode: {mode}")
|
||||
|
||||
|
||||
def patch_inference_clients():
|
||||
|
|
|
|||
|
|
@ -0,0 +1,107 @@
|
|||
{
|
||||
"test_id": null,
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Which planet has rings around it with a name starting with letter S?"
|
||||
}
|
||||
],
|
||||
"stream": true
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-224f7e7bd332",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f33640a400",
|
||||
"usage": null,
|
||||
"obfuscation": "QgpggKqlsUi"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-224f7e7bd332",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "The planet with rings around it that starts with the letter S is Saturn.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f33640a400",
|
||||
"usage": null,
|
||||
"obfuscation": "HYFma3xA0U"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-224f7e7bd332",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f33640a400",
|
||||
"usage": null,
|
||||
"obfuscation": "GqfSkdB"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,107 @@
|
|||
{
|
||||
"test_id": null,
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Which planet do humans live on?"
|
||||
}
|
||||
],
|
||||
"stream": true
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a44164820534",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_1827dd0c55",
|
||||
"usage": null,
|
||||
"obfuscation": "Uk6yP9DR13H"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a44164820534",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "Humans live on Earth.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_1827dd0c55",
|
||||
"usage": null,
|
||||
"obfuscation": "6RKlE6NFLf"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a44164820534",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_1827dd0c55",
|
||||
"usage": null,
|
||||
"obfuscation": "IhlSKzm"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,124 @@
|
|||
{
|
||||
"test_id": null,
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "what teams are playing in this image?"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": [
|
||||
{
|
||||
"type": "image_url",
|
||||
"image_url": {
|
||||
"url": "https://upload.wikimedia.org/wikipedia/commons/3/3b/LeBron_James_Layup_%28Cleveland_vs_Brooklyn_2018%29.jpg",
|
||||
"detail": "auto"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"stream": true
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a6ad8748dce1",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_cbf1785567",
|
||||
"usage": null,
|
||||
"obfuscation": "XomWZpEB3cK"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a6ad8748dce1",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "The teams playing in the image are the Cleveland Cavaliers and the Brooklyn Nets.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_cbf1785567",
|
||||
"usage": null,
|
||||
"obfuscation": "w3SqawrKwS"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-a6ad8748dce1",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_cbf1785567",
|
||||
"usage": null,
|
||||
"obfuscation": "kkQyIuu"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -849,5 +849,6 @@
|
|||
}
|
||||
],
|
||||
"is_streaming": false
|
||||
}
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -59,15 +59,13 @@ def _track_test_context(request):
|
|||
This fixture runs for every test and stores the test's nodeid in a contextvar
|
||||
that the recording system can access to determine which subdirectory to use.
|
||||
"""
|
||||
from llama_stack.testing.api_recorder import _test_context
|
||||
from llama_stack.core.testing_context import reset_test_context, set_test_context
|
||||
|
||||
# Store the test nodeid (e.g., "tests/integration/responses/test_basic.py::test_foo[params]")
|
||||
token = _test_context.set(request.node.nodeid)
|
||||
token = set_test_context(request.node.nodeid)
|
||||
|
||||
yield
|
||||
|
||||
# Cleanup
|
||||
_test_context.reset(token)
|
||||
reset_test_context(token)
|
||||
|
||||
|
||||
def pytest_runtest_teardown(item):
|
||||
|
|
|
|||
|
|
@ -41,32 +41,33 @@ basic_test_cases = [
|
|||
),
|
||||
id="saturn",
|
||||
),
|
||||
pytest.param(
|
||||
ResponsesTestCase(
|
||||
input=[
|
||||
{
|
||||
"role": "user",
|
||||
"content": [
|
||||
{
|
||||
"type": "input_text",
|
||||
"text": "what teams are playing in this image?",
|
||||
}
|
||||
],
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": [
|
||||
{
|
||||
"type": "input_image",
|
||||
"image_url": "https://upload.wikimedia.org/wikipedia/commons/3/3b/LeBron_James_Layup_%28Cleveland_vs_Brooklyn_2018%29.jpg",
|
||||
}
|
||||
],
|
||||
},
|
||||
],
|
||||
expected="brooklyn nets",
|
||||
),
|
||||
id="image_input",
|
||||
),
|
||||
# TODO: Add image input test case, since this test case got Nerfed by OpenAI with a refusal
|
||||
# pytest.param(
|
||||
# ResponsesTestCase(
|
||||
# input=[
|
||||
# {
|
||||
# "role": "user",
|
||||
# "content": [
|
||||
# {
|
||||
# "type": "input_text",
|
||||
# "text": "what teams are playing in this image?",
|
||||
# }
|
||||
# ],
|
||||
# },
|
||||
# {
|
||||
# "role": "user",
|
||||
# "content": [
|
||||
# {
|
||||
# "type": "input_image",
|
||||
# "image_url": "https://upload.wikimedia.org/wikipedia/commons/3/3b/LeBron_James_Layup_%28Cleveland_vs_Brooklyn_2018%29.jpg",
|
||||
# }
|
||||
# ],
|
||||
# },
|
||||
# ],
|
||||
# expected="brooklyn nets",
|
||||
# ),
|
||||
# id="image_input",
|
||||
# ),
|
||||
]
|
||||
|
||||
# Multi-turn test cases
|
||||
|
|
|
|||
|
|
@ -0,0 +1,372 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_sequential_file_search[openai_client-txt=openai/gpt-4o:emb=openai/text-embedding-3-small:dim=1536]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "How many experts does the Llama 4 Maverick model have?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_aPe1vS1v5bIwPgl789D5bfmW",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_aPe1vS1v5bIwPgl789D5bfmW",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-690481987689, score: 2.5781234969335522, attributes: {'filename': 'test_sequential_file_search.txt', 'document_id': 'file-690481987689', 'token_count': 19.0, 'metadata_token_count': 11.0} (cite as <|file-690481987689|>)\nThe Llama 4 Maverick model has 128 experts in its mixture of experts architecture.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "The Llama 4 Maverick model has 128 experts in its mixture of experts architecture <|file-690481987689|>."
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Can you tell me more about the architecture?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_UqyVZyvMh30eQuiKg7lJIUhQ",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"Llama 4 Maverick model architecture\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_UqyVZyvMh30eQuiKg7lJIUhQ",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-690481987689, score: 1.9327567816402336, attributes: {'filename': 'test_sequential_file_search.txt', 'document_id': 'file-690481987689', 'token_count': 19.0, 'metadata_token_count': 11.0} (cite as <|file-690481987689|>)\nThe Llama 4 Maverick model has 128 experts in its mixture of experts architecture.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model architecture\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-07b6475c4213",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "MR85AdN9cL5"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-07b6475c4213",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "The Llama ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "PU1MvlGgbp"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-07b6475c4213",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "4 Maverick model's architecture utilizes a mixture of experts,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "j5k5TwcdwAGF"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-07b6475c4213",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " comprising ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "RY"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-07b6475c4213",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "128 experts <|file-690481987689|>.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "zfJzUoSQia"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-07b6475c4213",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " Unfortunately,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "myxQ8DZiq0bzUFn"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-07b6475c4213",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " the retrieved data did not provide more detailed information beyond this specific aspect of the architecture.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "tqzrfbJ9V"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-07b6475c4213",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " Would you like me to search for additional details?",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "p8JOT5P"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-07b6475c4213",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "tA2QeAL"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,219 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_sequential_file_search[openai_client-txt=openai/gpt-4o:emb=openai/text-embedding-3-small:dim=1536]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "How many experts does the Llama 4 Maverick model have?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_aPe1vS1v5bIwPgl789D5bfmW",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_aPe1vS1v5bIwPgl789D5bfmW",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-690481987689, score: 2.5781234969335522, attributes: {'filename': 'test_sequential_file_search.txt', 'document_id': 'file-690481987689', 'token_count': 19.0, 'metadata_token_count': 11.0} (cite as <|file-690481987689|>)\nThe Llama 4 Maverick model has 128 experts in its mixture of experts architecture.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-0fd12925d27e",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "SgS7tknb3vK"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-0fd12925d27e",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "The Llama ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "Rf8yvaq0Sm"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-0fd12925d27e",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "4 Maverick model has ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "KwthQ5gO2NxL"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-0fd12925d27e",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "128 experts in its mixture of experts architecture <|file-690481987689|>.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "wmUFlFBAXg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-0fd12925d27e",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "El7QNNB"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,600 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_sequential_file_search[openai_client-txt=openai/gpt-4o:emb=openai/text-embedding-3-small:dim=1536]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "How many experts does the Llama 4 Maverick model have?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_aPe1vS1v5bIwPgl789D5bfmW",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_aPe1vS1v5bIwPgl789D5bfmW",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-690481987689, score: 2.5781234969335522, attributes: {'filename': 'test_sequential_file_search.txt', 'document_id': 'file-690481987689', 'token_count': 19.0, 'metadata_token_count': 11.0} (cite as <|file-690481987689|>)\nThe Llama 4 Maverick model has 128 experts in its mixture of experts architecture.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "The Llama 4 Maverick model has 128 experts in its mixture of experts architecture <|file-690481987689|>."
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Can you tell me more about the architecture?"
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-127a97b42f23",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_UqyVZyvMh30eQuiKg7lJIUhQ",
|
||||
"function": {
|
||||
"arguments": "",
|
||||
"name": "knowledge_search"
|
||||
},
|
||||
"type": "function"
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "L"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-127a97b42f23",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "{\"",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-127a97b42f23",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "query",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "rOZPdKYIRKVpUK"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-127a97b42f23",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "\":\"",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "KbnFcNwgyiUhKq"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-127a97b42f23",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "L",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "Wj"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-127a97b42f23",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "lama",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "2HMMQqdTQMzWCVE"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-127a97b42f23",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": " ",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "eF"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-127a97b42f23",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "4",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "1C"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-127a97b42f23",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": " Maver",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "BDs9l6hlr8pF5"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-127a97b42f23",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "ick",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-127a97b42f23",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": " model",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "k0Bkf9cbzy8r0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-127a97b42f23",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": " architecture",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "sxpQee"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-127a97b42f23",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "\"}",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-127a97b42f23",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "tool_calls",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "j"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,779 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_file_search.py::test_response_file_search_filter_compound_or[openai_client-txt=openai/gpt-4o]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Show me marketing and sales documents"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_Fvpcub7CINR4M4mne3NrqPSh",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\": \"marketing documents examples\"}"
|
||||
}
|
||||
},
|
||||
{
|
||||
"index": 1,
|
||||
"id": "call_nNONu48z4o4psZIlHNDbWIe6",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\": \"sales documents examples\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_Fvpcub7CINR4M4mne3NrqPSh",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 3 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-862946202236, score: 0.7656461893973538, attributes: {'region': 'eu', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'eu_marketing_q1.txt', 'document_id': 'file-862946202236', 'token_count': 17.0, 'metadata_token_count': 32.0} (cite as <|file-862946202236|>)\nEuropean advertising campaign results for Q1 2023. Strong growth in EU markets.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[2] document_id: file-862946202234, score: 0.729409669664322, attributes: {'region': 'us', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'us_marketing_q1.txt', 'document_id': 'file-862946202234', 'token_count': 21.0, 'metadata_token_count': 32.0} (cite as <|file-862946202234|>)\nUS promotional campaigns for Q1 2023. Revenue increased by 15% in the US region.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[3] document_id: file-862946202237, score: 0.5528548752393603, attributes: {'region': 'asia', 'category': 'sales', 'date': 1688169600.0, 'filename': 'asia_sales_q3.txt', 'document_id': 'file-862946202237', 'token_count': 17.0, 'metadata_token_count': 31.0} (cite as <|file-862946202237|>)\nAsia Pacific revenue figures for Q3 2023. Record breaking quarter in Asia.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"marketing documents examples\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_nNONu48z4o4psZIlHNDbWIe6",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 3 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-862946202234, score: 0.6624775971970099, attributes: {'region': 'us', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'us_marketing_q1.txt', 'document_id': 'file-862946202234', 'token_count': 21.0, 'metadata_token_count': 32.0} (cite as <|file-862946202234|>)\nUS promotional campaigns for Q1 2023. Revenue increased by 15% in the US region.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[2] document_id: file-862946202236, score: 0.6241908355663578, attributes: {'region': 'eu', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'eu_marketing_q1.txt', 'document_id': 'file-862946202236', 'token_count': 17.0, 'metadata_token_count': 32.0} (cite as <|file-862946202236|>)\nEuropean advertising campaign results for Q1 2023. Strong growth in EU markets.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[3] document_id: file-862946202237, score: 0.5761999414500201, attributes: {'region': 'asia', 'category': 'sales', 'date': 1688169600.0, 'filename': 'asia_sales_q3.txt', 'document_id': 'file-862946202237', 'token_count': 17.0, 'metadata_token_count': 31.0} (cite as <|file-862946202237|>)\nAsia Pacific revenue figures for Q3 2023. Record breaking quarter in Asia.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"sales documents examples\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1dbd102b81c7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "R6uA5YGaVXy"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1dbd102b81c7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "Here are some examples of marketing and sales documents:\n\n",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "fvEM0zzsT"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1dbd102b81c7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "### Marketing Documents:\n",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "fM8pbIH2Du"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1dbd102b81c7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "1.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "ywGbbKNyMkSs"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1dbd102b81c7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " **European Advertising Campaign**:",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "9Df30NePsv"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1dbd102b81c7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " This document reports on the results of European advertising campaigns for Q1 ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "1KDOAYrg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1dbd102b81c7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2023,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "Ur6YZUx8o2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1dbd102b81c7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " highlighting strong growth in EU markets <|file-862946202236|>.\n",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1dbd102b81c7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "bGNEay3i7CbT"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1dbd102b81c7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " **US Promotional Campaigns**:",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "BcVg7O8xUL"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1dbd102b81c7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " It details the marketing activities in the US for Q1 ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "lJMGEMwQQW"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1dbd102b81c7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2023,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "fWLN9vPA4l"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1dbd102b81c7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " showing a ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "hcpod"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1dbd102b81c7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "15% revenue increase in the region <|file-862946202234|>",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "zSOh8pStGzP"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1dbd102b81c7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": ".\n\n",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "4FFmHXhr"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1dbd102b81c7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "### Sales Documents:\n",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "f2CxS7gaPY"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1dbd102b81c7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "1.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "hKaxDGxBdyNL"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1dbd102b81c7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " **Asia Pacific Revenue Figures**:",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "MBZgU7ywir"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1dbd102b81c7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " This document provides sales figures for the Asia Pacific region for Q3 ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "6qKqjjyo"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1dbd102b81c7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2023,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "KRSPEkdDeP"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1dbd102b81c7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " noting a record-breaking quarter in Asia <|file-862946202237|>",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "wCkeab"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1dbd102b81c7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": ".\n\n",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "0FVU0V3j"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1dbd102b81c7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "These documents provide insights into regional marketing strategies and sales performance over specific periods.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "F5QDS9Hw"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-1dbd102b81c7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "rOg1C0x"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,192 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_file_search.py::test_response_file_search_filter_compound_and[client_with_models-txt=openai/gpt-4o]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What are the engineering updates from the US?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_toDXiP4WpUNQvvvLkTGEbQAr",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"latest engineering updates US 2023\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_toDXiP4WpUNQvvvLkTGEbQAr",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-710505118847, score: 1.4208534597848683, attributes: {'region': 'us', 'category': 'engineering', 'date': 1680307200.0, 'filename': 'us_engineering_q2.txt', 'document_id': 'file-710505118847', 'token_count': 18.0, 'metadata_token_count': 32.0} (cite as <|file-710505118847|>)\nUS technical updates for Q2 2023. New features deployed in the US region.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"latest engineering updates US 2023\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2443581bf4a7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "WaPYOiGjAoJ"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2443581bf4a7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "The latest engineering updates from the US include new features deployed in the region for Q2 ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "wV3WlUqwS3"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2443581bf4a7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2023 <|file-710505118847|>.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "LACGsaGI88"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2443581bf4a7",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "Rb1SZZK"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,219 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_file_search[client_with_models-txt=openai/gpt-4o:emb=openai/text-embedding-3-small:dim=1536-llama_experts]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "How many experts does the Llama 4 Maverick model have?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_90pCu8l9ITbz463ZJxhGGKm3",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_90pCu8l9ITbz463ZJxhGGKm3",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-36441599755, score: 2.933222791810999, attributes: {'filename': 'test_response_non_streaming_file_search.txt', 'document_id': 'file-36441599755', 'token_count': 10.0, 'metadata_token_count': 13.0} (cite as <|file-36441599755|>)\nLlama 4 Maverick has 128 experts\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2f3f766a9601",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "ZcjznG6Yo8S"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2f3f766a9601",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "The Llama ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "otwbPJWhEZ"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2f3f766a9601",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "4 Maverick model has ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "WOGpoDlX3rN1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2f3f766a9601",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "128 experts <|file-36441599755|>.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "td4Hm9RbPE"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-2f3f766a9601",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "PMGTSWK"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,107 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_file_search.py::test_response_text_format[client_with_models-txt=openai/gpt-4o-text_format0]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What is the capital of France?"
|
||||
}
|
||||
],
|
||||
"stream": true
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-3d3c43d9096b",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_eb3c3cb84d",
|
||||
"usage": null,
|
||||
"obfuscation": "F1K7h1lKsaT"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-3d3c43d9096b",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "The capital of France is Paris.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_eb3c3cb84d",
|
||||
"usage": null,
|
||||
"obfuscation": "Wk7AnksnRP"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-3d3c43d9096b",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_eb3c3cb84d",
|
||||
"usage": null,
|
||||
"obfuscation": "SyYnKNH"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,250 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_file_search[client_with_models-txt=openai/gpt-4o:emb=openai/text-embedding-3-small:dim=1536-llama_experts_pdf]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "How many experts does the Llama 4 Maverick model have?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_Zb3UPWm2DOVVTce3d5Uo21FX",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_Zb3UPWm2DOVVTce3d5Uo21FX",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 2 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-433408948870, score: 1.4947232325305748, attributes: {'filename': 'llama_stack_and_models.pdf', 'document_id': 'file-433408948870', 'token_count': 98.0, 'metadata_token_count': 11.0} (cite as <|file-433408948870|>)\n, \nhardware\n \nvendors,\n \nand\n \nAI-focused\n \ncompanies)\n \nthat\n \noffer\n \ntailored\n \ninfrastructure,\n \nsoftware,\n \nand\n \nservices\n \nfor\n \ndeploying\n \nLlama\n \nmodels.\n \nLlama 4 Maverick \n Llama 4 Maverick is a Mixture-of-Experts (MoE) model with 17 billion active parameters and 128 experts. \n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[2] document_id: file-433408948870, score: 1.1417523389560924, attributes: {'filename': 'llama_stack_and_models.pdf', 'document_id': 'file-433408948870', 'token_count': 498.0, 'metadata_token_count': 11.0} (cite as <|file-433408948870|>)\nLlama Stack \nLlama Stack Overview \nLlama Stack standardizes the core building blocks that simplify AI application development. It codifies best \npractices\n \nacross\n \nthe\n \nLlama\n \necosystem.\n \nMore\n \nspecifically,\n \nit\n \nprovides\n \u25cf Unified API layer for Inference, RAG, Agents, Tools, Safety, Evals, and Telemetry. \u25cf Plugin architecture to support the rich ecosystem of different API implementations in various \nenvironments,\n \nincluding\n \nlocal\n \ndevelopment,\n \non-premises,\n \ncloud,\n \nand\n \nmobile.\n \u25cf Prepackaged verified distributions which offer a one-stop solution for developers to get started quickly \nand\n \nreliably\n \nin\n \nany\n \nenvironment.\n \u25cf Multiple developer interfaces like CLI and SDKs for Python, Typescript, iOS, and Android. \u25cf Standalone applications as examples for how to build production-grade AI applications with Llama \nStack.\n \nLlama Stack Benefits \n\u25cf Flexible Options: Developers can choose their preferred infrastructure without changing APIs and enjoy \nflexible\n \ndeployment\n \nchoices.\n \u25cf Consistent Experience: With its unified APIs, Llama Stack makes it easier to build, test, and deploy AI \napplications\n \nwith\n \nconsistent\n \napplication\n \nbehavior.\n \u25cf Robust Ecosystem: Llama Stack is already integrated with distribution partners (cloud providers, \nhardware\n \nvendors,\n \nand\n \nAI-focused\n \ncompanies)\n \nthat\n \noffer\n \ntailored\n \ninfrastructure,\n \nsoftware,\n \nand\n \nservices\n \nfor\n \ndeploying\n \nLlama\n \nmodels.\n \nLlama 4 Maverick \n Llama 4 Maverick is a Mixture-of-Experts (MoE) model with 17 billion active parameters and 128 experts. \n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-4eb6e076a5b4",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "ot9TaDQyHrX"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-4eb6e076a5b4",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "The Llama ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "vJfFXqDKPB"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-4eb6e076a5b4",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "4 Maverick model is a Mixture-of-Experts (MoE)",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "2LBRj2itsMHM"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-4eb6e076a5b4",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " model with ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "9Stbjbq"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-4eb6e076a5b4",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "128 experts <|file-433408948870|>.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "KK4ebrfUqW"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-4eb6e076a5b4",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "A1JTNrt"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,202 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_file_search.py::test_response_text_format[openai_client-txt=openai/gpt-4o-text_format1]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What is the capital of France?"
|
||||
}
|
||||
],
|
||||
"response_format": {
|
||||
"type": "json_schema",
|
||||
"json_schema": {
|
||||
"name": "capitals",
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"capital": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"stream": true
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-53a5ba8ac823",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "zTAPKN8NLoS"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-53a5ba8ac823",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "{\"",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "3tfleEvigd"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-53a5ba8ac823",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "capital\":\"",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "FqmKd8"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-53a5ba8ac823",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "The capital of France is Paris.\"",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "TV7xqUyDie"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-53a5ba8ac823",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "}",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "Ohw3LeZf0TFe"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-53a5ba8ac823",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "8ByAOfX"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,385 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_file_search.py::test_response_file_search_filter_by_region[client_with_models-txt=openai/gpt-4o]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What are the updates from the US region?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_oDB87lp9gQBu3Xr5LRKtgKvc",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"US news updates\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_oDB87lp9gQBu3Xr5LRKtgKvc",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 2 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-446066738450, score: 1.0038202356233785, attributes: {'region': 'us', 'category': 'engineering', 'date': 1680307200.0, 'filename': 'us_engineering_q2.txt', 'document_id': 'file-446066738450', 'token_count': 18.0, 'metadata_token_count': 32.0} (cite as <|file-446066738450|>)\nUS technical updates for Q2 2023. New features deployed in the US region.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[2] document_id: file-446066738449, score: 0.7400703615803927, attributes: {'region': 'us', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'us_marketing_q1.txt', 'document_id': 'file-446066738449', 'token_count': 21.0, 'metadata_token_count': 32.0} (cite as <|file-446066738449|>)\nUS promotional campaigns for Q1 2023. Revenue increased by 15% in the US region.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"US news updates\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-545bde720fe1",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "ww18bkXvZgv"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-545bde720fe1",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "The latest updates from the US region include:\n\n",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "rYyrxABNu6"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-545bde720fe1",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "1.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "8FlLCah0MSt0"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-545bde720fe1",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " Technical updates for Q2 ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "phH"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-545bde720fe1",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2023 with new features deployed in the US region <|file-446066738450|>.\n",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "jN19v4DXyu"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-545bde720fe1",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "t74bLfabpn5g"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-545bde720fe1",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " Successful promotional campaigns for Q1 ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "1D"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-545bde720fe1",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2023,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "ROy4TEmWrk"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-545bde720fe1",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " which resulted in a ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "QKZLoz3"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-545bde720fe1",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "15% increase in revenue <|file-446066738449|>.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "5rcb3lTUi11"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-545bde720fe1",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "iWXlJa0"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,725 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_file_search.py::test_response_file_search_filter_compound_or[client_with_models-txt=openai/gpt-4o]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Show me marketing and sales documents"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_XJHVfGOgINI5ssxdPEkIzNGn",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\": \"marketing documents examples\"}"
|
||||
}
|
||||
},
|
||||
{
|
||||
"index": 1,
|
||||
"id": "call_yU3cSA6ZVa05x581uiSYW8De",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\": \"sales documents examples\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_XJHVfGOgINI5ssxdPEkIzNGn",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 3 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-66545595857, score: 0.7657325734983355, attributes: {'region': 'eu', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'eu_marketing_q1.txt', 'document_id': 'file-66545595857', 'token_count': 17.0, 'metadata_token_count': 32.0} (cite as <|file-66545595857|>)\nEuropean advertising campaign results for Q1 2023. Strong growth in EU markets.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[2] document_id: file-66545595855, score: 0.729409669664322, attributes: {'region': 'us', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'us_marketing_q1.txt', 'document_id': 'file-66545595855', 'token_count': 21.0, 'metadata_token_count': 32.0} (cite as <|file-66545595855|>)\nUS promotional campaigns for Q1 2023. Revenue increased by 15% in the US region.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[3] document_id: file-66545595858, score: 0.5528548752393603, attributes: {'region': 'asia', 'category': 'sales', 'date': 1688169600.0, 'filename': 'asia_sales_q3.txt', 'document_id': 'file-66545595858', 'token_count': 17.0, 'metadata_token_count': 31.0} (cite as <|file-66545595858|>)\nAsia Pacific revenue figures for Q3 2023. Record breaking quarter in Asia.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"marketing documents examples\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_yU3cSA6ZVa05x581uiSYW8De",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 3 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-66545595855, score: 0.6624775971970099, attributes: {'region': 'us', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'us_marketing_q1.txt', 'document_id': 'file-66545595855', 'token_count': 21.0, 'metadata_token_count': 32.0} (cite as <|file-66545595855|>)\nUS promotional campaigns for Q1 2023. Revenue increased by 15% in the US region.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[2] document_id: file-66545595857, score: 0.624255076486169, attributes: {'region': 'eu', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'eu_marketing_q1.txt', 'document_id': 'file-66545595857', 'token_count': 17.0, 'metadata_token_count': 32.0} (cite as <|file-66545595857|>)\nEuropean advertising campaign results for Q1 2023. Strong growth in EU markets.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[3] document_id: file-66545595858, score: 0.5761999414500201, attributes: {'region': 'asia', 'category': 'sales', 'date': 1688169600.0, 'filename': 'asia_sales_q3.txt', 'document_id': 'file-66545595858', 'token_count': 17.0, 'metadata_token_count': 31.0} (cite as <|file-66545595858|>)\nAsia Pacific revenue figures for Q3 2023. Record breaking quarter in Asia.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"sales documents examples\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-5d796fd48309",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "sCS0WEObq9w"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-5d796fd48309",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "Here are some examples of marketing and sales documents:\n\n",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "sylalKfmX"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-5d796fd48309",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "**Marketing Documents:",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "8ZEgciTllZd"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-5d796fd48309",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "**\n\n",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "dGMJfxh"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-5d796fd48309",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "1.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "tsPacaJ4eDPD"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-5d796fd48309",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " **European Marketing Document** - This document reports on the European advertising campaign results for Q1 ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "UZuul9FFnh"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-5d796fd48309",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2023,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "lXJE322Ble"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-5d796fd48309",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " highlighting strong growth in EU markets <|file-66545595857|>",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-5d796fd48309",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": ".\n\n",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "Dy4yeYTT"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-5d796fd48309",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "Yc1ZRuoEhLaD"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-5d796fd48309",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " **US Marketing Document** - This document details US promotional campaigns for Q1 ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "wL0a8WrT8i"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-5d796fd48309",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2023,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "g8dTqUQ6iz"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-5d796fd48309",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " noting a ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "GnSI11"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-5d796fd48309",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "15% increase in revenue in the US region <|file-66545595855|>",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "XoC0MGmkMRD"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-5d796fd48309",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": ".\n\n",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "ipOgSJrl"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-5d796fd48309",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "**Sales Documents:",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "ONNWxX50IkZ"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-5d796fd48309",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "**\n\n",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "1x4JLxe"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-5d796fd48309",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "1.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "6MVOndt1oixv"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-5d796fd48309",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " **Asia-Pacific Sales Document** - This document provides revenue figures for the Asia Pacific region for Q3 ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "NOhDOd2O2e"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-5d796fd48309",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2023,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "Eog46AZ0tJ"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-5d796fd48309",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " marking a record-breaking quarter in Asia <|file-66545595858|>.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "RTYDY"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-5d796fd48309",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "GETqYY9"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,219 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_sequential_file_search[client_with_models-txt=openai/gpt-4o:emb=openai/text-embedding-3-small:dim=1536]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "How many experts does the Llama 4 Maverick model have?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_zS2WxgXWetjnlPt2MzH9Asrc",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_zS2WxgXWetjnlPt2MzH9Asrc",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-5217982280, score: 2.57802841833685, attributes: {'filename': 'test_sequential_file_search.txt', 'document_id': 'file-5217982280', 'token_count': 19.0, 'metadata_token_count': 11.0} (cite as <|file-5217982280|>)\nThe Llama 4 Maverick model has 128 experts in its mixture of experts architecture.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-813ac454f8df",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "kAlKUdypard"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-813ac454f8df",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "The Llama ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "U9LsCIESSE"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-813ac454f8df",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "4 Maverick model has ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "wwn6wHEfw0FY"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-813ac454f8df",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "128 experts in its mixture of experts architecture <|file-5217982280|>.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "k3KlnqkWxH"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-813ac454f8df",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "m4ogAxG"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -54,7 +54,7 @@
|
|||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_qzY7B7EArJwpMqLVer8kcAey",
|
||||
"id": "call_zS2WxgXWetjnlPt2MzH9Asrc",
|
||||
"function": {
|
||||
"arguments": "",
|
||||
"name": "knowledge_search"
|
||||
|
|
@ -74,7 +74,7 @@
|
|||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "D"
|
||||
"obfuscation": "u"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
|
@ -148,7 +148,7 @@
|
|||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "CJiqORTeQGxjIe"
|
||||
"obfuscation": "RFdKkejzJ9k94o"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
|
@ -185,7 +185,7 @@
|
|||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "r6QBYD3kczuPXu"
|
||||
"obfuscation": "Hb81GJqWa4o3xK"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
|
@ -222,7 +222,7 @@
|
|||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "QV"
|
||||
"obfuscation": "1F"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
|
@ -259,7 +259,7 @@
|
|||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "pvqVubGjwneskfA"
|
||||
"obfuscation": "5EIhJTfkplhcF8b"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
|
@ -296,7 +296,7 @@
|
|||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "xn"
|
||||
"obfuscation": "hy"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
|
@ -333,7 +333,7 @@
|
|||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "kd"
|
||||
"obfuscation": "Bh"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
|
@ -370,7 +370,7 @@
|
|||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "sNtz6TsVqvTPw"
|
||||
"obfuscation": "qSHaXAuZaOsnR"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
|
@ -444,7 +444,7 @@
|
|||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "fBQyzFK1sVnTB"
|
||||
"obfuscation": "oeMApPOcLgemX"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
|
@ -481,7 +481,7 @@
|
|||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "0U81JnFrkqiB"
|
||||
"obfuscation": "iT8zweOYBMgE"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
|
@ -555,7 +555,7 @@
|
|||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "s9gav9QCV2Y"
|
||||
"obfuscation": "4QR8kd8nG1V"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
|
@ -619,7 +619,7 @@
|
|||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "g"
|
||||
"obfuscation": "S"
|
||||
}
|
||||
}
|
||||
],
|
||||
|
|
|
|||
|
|
@ -0,0 +1,507 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_file_search.py::test_response_file_search_filter_by_date_range[client_with_models-txt=openai/gpt-4o]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What happened in Q1 2023?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_a9Kxt1z020OLhioOdi8pVAl6",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"events in Q1 2023\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_a9Kxt1z020OLhioOdi8pVAl6",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 2 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-664068282482, score: 1.0007501503163803, attributes: {'region': 'us', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'us_marketing_q1.txt', 'document_id': 'file-664068282482', 'token_count': 21.0, 'metadata_token_count': 32.0} (cite as <|file-664068282482|>)\nUS promotional campaigns for Q1 2023. Revenue increased by 15% in the US region.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[2] document_id: file-664068282484, score: 0.9776032276748411, attributes: {'region': 'eu', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'eu_marketing_q1.txt', 'document_id': 'file-664068282484', 'token_count': 17.0, 'metadata_token_count': 32.0} (cite as <|file-664068282484|>)\nEuropean advertising campaign results for Q1 2023. Strong growth in EU markets.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"events in Q1 2023\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_CwdXa6wdMO3Jbh0oB3NNlIjE",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"key global events Q1 2023\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_CwdXa6wdMO3Jbh0oB3NNlIjE",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 2 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-664068282484, score: 0.9341816085356718, attributes: {'region': 'eu', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'eu_marketing_q1.txt', 'document_id': 'file-664068282484', 'token_count': 17.0, 'metadata_token_count': 32.0} (cite as <|file-664068282484|>)\nEuropean advertising campaign results for Q1 2023. Strong growth in EU markets.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[2] document_id: file-664068282482, score: 0.9132392014678701, attributes: {'region': 'us', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'us_marketing_q1.txt', 'document_id': 'file-664068282482', 'token_count': 21.0, 'metadata_token_count': 32.0} (cite as <|file-664068282482|>)\nUS promotional campaigns for Q1 2023. Revenue increased by 15% in the US region.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"key global events Q1 2023\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8945025e663d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "e4Awy8Uo6Cv"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8945025e663d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "In Q1 ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "zODOR5zYmdo"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8945025e663d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2023,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "yQvVB91rkr"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8945025e663d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " marketing campaigns in the US and Europe showed positive results.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "Kug"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8945025e663d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " In the US,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "KgmGx1ov56"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8945025e663d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " promotional campaigns led to a revenue increase of ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "V"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8945025e663d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "15% <|file-664068282482|>.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "jMPOQcwcREa"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8945025e663d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " In Europe,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "WSomtA0mMU"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8945025e663d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " advertising campaigns resulted in strong growth in EU markets <|file-664068282484|>.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "Z"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8945025e663d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " However,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "9Y6mq"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8945025e663d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " more details on political,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "Bzwxk0pg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8945025e663d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " economic,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "XOGl"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8945025e663d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " or other significant global events weren't found in the current search results.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "M1ALADgkgW"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8945025e663d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "NDfVBAJ"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,520 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_file_search.py::test_response_file_search_filter_by_category[client_with_models-txt=openai/gpt-4o]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Show me all marketing reports"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_sfoVogRyIflBvHjg8G9ZZ52v",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"marketing reports\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_sfoVogRyIflBvHjg8G9ZZ52v",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 2 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-630021438843, score: 0.7788970955409423, attributes: {'region': 'us', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'us_marketing_q1.txt', 'document_id': 'file-630021438843', 'token_count': 21.0, 'metadata_token_count': 32.0} (cite as <|file-630021438843|>)\nUS promotional campaigns for Q1 2023. Revenue increased by 15% in the US region.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[2] document_id: file-630021438845, score: 0.74726414959878, attributes: {'region': 'eu', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'eu_marketing_q1.txt', 'document_id': 'file-630021438845', 'token_count': 17.0, 'metadata_token_count': 32.0} (cite as <|file-630021438845|>)\nEuropean advertising campaign results for Q1 2023. Strong growth in EU markets.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"marketing reports\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8d448edc2c00",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "dxDNR1iXMJd"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8d448edc2c00",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "Here are the marketing reports I found:\n\n",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "80wR3f26X"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8d448edc2c00",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "1.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "6KXPlP3ySakz"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8d448edc2c00",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " **US Marketing Report for Q1 ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "e83B5CRJLU"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8d448edc2c00",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2023**:",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "9FeKTd1pUV"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8d448edc2c00",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " This report highlights that promotional campaigns in the US led to a ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "orXPwx1c"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8d448edc2c00",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "15% increase in revenue for Q1 ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "SchCp9zvapp"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8d448edc2c00",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2023 <|file-630021438843|>",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "Sccnv1BaF3"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8d448edc2c00",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": ".\n\n",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "Mu1UC4rL"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8d448edc2c00",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "1uBu918De7Ba"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8d448edc2c00",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " **European Marketing Report for Q1 ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "hH2y5VHDyZ"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8d448edc2c00",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2023**:",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "AXvFJtxuCD"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8d448edc2c00",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " This report discusses the results of advertising campaigns in Europe,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "PedxYuNe"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8d448edc2c00",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " noting strong growth in the EU markets during Q1 ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "pfJyqd"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8d448edc2c00",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2023 <|file-630021438845|>.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "3UbqhbmFpR"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-8d448edc2c00",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "lYDLApf"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,304 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_file_search.py::test_response_file_search_filter_by_date_range[openai_client-txt=openai/gpt-4o]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What happened in Q1 2023?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_PfY7bcXP30PsqQxRzUa1peOc",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"Q1 2023 events\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_PfY7bcXP30PsqQxRzUa1peOc",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 2 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-151637398235, score: 1.028104385476394, attributes: {'region': 'us', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'us_marketing_q1.txt', 'document_id': 'file-151637398235', 'token_count': 21.0, 'metadata_token_count': 32.0} (cite as <|file-151637398235|>)\nUS promotional campaigns for Q1 2023. Revenue increased by 15% in the US region.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[2] document_id: file-151637398237, score: 0.9312513654699082, attributes: {'region': 'eu', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'eu_marketing_q1.txt', 'document_id': 'file-151637398237', 'token_count': 17.0, 'metadata_token_count': 32.0} (cite as <|file-151637398237|>)\nEuropean advertising campaign results for Q1 2023. Strong growth in EU markets.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"Q1 2023 events\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-abb544a92652",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "Bqtevt5MPSd"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-abb544a92652",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "In Q1 ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "dGD9ZZwkokA"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-abb544a92652",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2023,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "8EOofwL33Y"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-abb544a92652",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " promotional campaigns in the United States resulted in a ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "s"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-abb544a92652",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "15% increase in revenue <|file-151637398235|>.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "0YA2WkTFqQv"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-abb544a92652",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " Additionally,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-abb544a92652",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " there was significant growth in the European markets due to successful advertising campaigns <|file-151637398237|>.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "cEKaL4U"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-abb544a92652",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "BqZbBfd"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,493 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_file_search.py::test_response_file_search_filter_by_category[openai_client-txt=openai/gpt-4o]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Show me all marketing reports"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_54ZqtMaB74gGLhQoT5Kxe1KN",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"marketing reports\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_54ZqtMaB74gGLhQoT5Kxe1KN",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 2 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-824569833174, score: 0.7788970955409423, attributes: {'region': 'us', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'us_marketing_q1.txt', 'document_id': 'file-824569833174', 'token_count': 21.0, 'metadata_token_count': 32.0} (cite as <|file-824569833174|>)\nUS promotional campaigns for Q1 2023. Revenue increased by 15% in the US region.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[2] document_id: file-824569833176, score: 0.74726414959878, attributes: {'region': 'eu', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'eu_marketing_q1.txt', 'document_id': 'file-824569833176', 'token_count': 17.0, 'metadata_token_count': 32.0} (cite as <|file-824569833176|>)\nEuropean advertising campaign results for Q1 2023. Strong growth in EU markets.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"marketing reports\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-b615a3e10275",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "S0wk0b3F0v8"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-b615a3e10275",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "Here are two marketing reports:\n\n",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "0rdc2O0d6"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-b615a3e10275",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "1.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "Mweu9w6gSOii"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-b615a3e10275",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " **US Marketing Report**:",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "Q0qYYvVaIN"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-b615a3e10275",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " This report covers promotional campaigns for Q1 ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "uIhtL8H7"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-b615a3e10275",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2023 in the US region,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "94u0V02PhV"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-b615a3e10275",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " highlighting a revenue increase of ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-b615a3e10275",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "15% <|file-824569833174|>",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "XnUSenFGx0l"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-b615a3e10275",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": ".\n\n",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "9utCu2QR"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-b615a3e10275",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "OKaYfCPWTlYx"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-b615a3e10275",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " **EU Marketing Report**:",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "srw7RTZWcQ"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-b615a3e10275",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " This report details European advertising campaign results for Q1 ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "VZI08kBN"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-b615a3e10275",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2023,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "Mwt9DGHXI1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-b615a3e10275",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " showing strong growth in EU markets <|file-824569833176|>.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "oYERz"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-b615a3e10275",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "ICphsuA"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,291 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_sequential_file_search[client_with_models-txt=openai/gpt-4o:emb=openai/text-embedding-3-small:dim=1536]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "How many experts does the Llama 4 Maverick model have?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_zS2WxgXWetjnlPt2MzH9Asrc",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_zS2WxgXWetjnlPt2MzH9Asrc",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-5217982280, score: 2.57802841833685, attributes: {'filename': 'test_sequential_file_search.txt', 'document_id': 'file-5217982280', 'token_count': 19.0, 'metadata_token_count': 11.0} (cite as <|file-5217982280|>)\nThe Llama 4 Maverick model has 128 experts in its mixture of experts architecture.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "The Llama 4 Maverick model has 128 experts in its mixture of experts architecture <|file-5217982280|>."
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Can you tell me more about the architecture?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_eC40X8CzoIfT4BUnCbmNGlbT",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"Llama 4 Maverick model architecture\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_eC40X8CzoIfT4BUnCbmNGlbT",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-5217982280, score: 1.9325686555844606, attributes: {'filename': 'test_sequential_file_search.txt', 'document_id': 'file-5217982280', 'token_count': 19.0, 'metadata_token_count': 11.0} (cite as <|file-5217982280|>)\nThe Llama 4 Maverick model has 128 experts in its mixture of experts architecture.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model architecture\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-b6484b82fd6d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "YG2gnXGln3w"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-b6484b82fd6d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "The available information only mentions that the Llama ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "zsQxdAF7Bf"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-b6484b82fd6d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "4 Maverick model has a mixture of experts architecture with ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "RS3quzkVDo6p"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-b6484b82fd6d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "128 experts <|file-5217982280|>.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "AA2MyYWwqU"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-b6484b82fd6d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " Further details about its architecture were not provided in the current data.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "PKOKN"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-b6484b82fd6d",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "bZqZWmb"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,354 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_file_search.py::test_response_file_search_filter_compound_and[openai_client-txt=openai/gpt-4o]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What are the engineering updates from the US?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_xmBCIHb34NLfvnRnt1wDXjie",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"engineering updates US October 2023\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_xmBCIHb34NLfvnRnt1wDXjie",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-90357487492, score: 1.708989789892816, attributes: {'region': 'us', 'category': 'engineering', 'date': 1680307200.0, 'filename': 'us_engineering_q2.txt', 'document_id': 'file-90357487492', 'token_count': 18.0, 'metadata_token_count': 32.0} (cite as <|file-90357487492|>)\nUS technical updates for Q2 2023. New features deployed in the US region.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"engineering updates US October 2023\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-c060e0e12e3b",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "QL1wD7Bp6RT"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-c060e0e12e3b",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "The search for specific engineering updates from the US for October ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "fzZkmiEW1g"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-c060e0e12e3b",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2023 did not return the desired results.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "cJGY5dUAi5"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-c060e0e12e3b",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " It seems that there are only technical updates available for earlier periods,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "KzI4vF1Wib"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-c060e0e12e3b",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " such as Q2 ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "4vlbZJ3Y"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-c060e0e12e3b",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2023,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "fdLgA1uk9A"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-c060e0e12e3b",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " where new features were deployed in the US region <|file-90357487492|>.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "1NITGWv"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-c060e0e12e3b",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " For the most current information,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "ElPjxSzYB"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-c060e0e12e3b",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " checking engineering news websites or official announcements is recommended.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "SpqI"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-c060e0e12e3b",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "RT09BI6"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,250 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_file_search[openai_client-txt=openai/gpt-4o:emb=openai/text-embedding-3-small:dim=1536-llama_experts_pdf]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "How many experts does the Llama 4 Maverick model have?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_zKJ7WgvCO3tx6yB0We1qI0d8",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_zKJ7WgvCO3tx6yB0We1qI0d8",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 2 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-728796232029, score: 1.4945131220963286, attributes: {'filename': 'llama_stack_and_models.pdf', 'document_id': 'file-728796232029', 'token_count': 98.0, 'metadata_token_count': 11.0} (cite as <|file-728796232029|>)\n, \nhardware\n \nvendors,\n \nand\n \nAI-focused\n \ncompanies)\n \nthat\n \noffer\n \ntailored\n \ninfrastructure,\n \nsoftware,\n \nand\n \nservices\n \nfor\n \ndeploying\n \nLlama\n \nmodels.\n \nLlama 4 Maverick \n Llama 4 Maverick is a Mixture-of-Experts (MoE) model with 17 billion active parameters and 128 experts. \n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[2] document_id: file-728796232029, score: 1.1415676746925796, attributes: {'filename': 'llama_stack_and_models.pdf', 'document_id': 'file-728796232029', 'token_count': 498.0, 'metadata_token_count': 11.0} (cite as <|file-728796232029|>)\nLlama Stack \nLlama Stack Overview \nLlama Stack standardizes the core building blocks that simplify AI application development. It codifies best \npractices\n \nacross\n \nthe\n \nLlama\n \necosystem.\n \nMore\n \nspecifically,\n \nit\n \nprovides\n \u25cf Unified API layer for Inference, RAG, Agents, Tools, Safety, Evals, and Telemetry. \u25cf Plugin architecture to support the rich ecosystem of different API implementations in various \nenvironments,\n \nincluding\n \nlocal\n \ndevelopment,\n \non-premises,\n \ncloud,\n \nand\n \nmobile.\n \u25cf Prepackaged verified distributions which offer a one-stop solution for developers to get started quickly \nand\n \nreliably\n \nin\n \nany\n \nenvironment.\n \u25cf Multiple developer interfaces like CLI and SDKs for Python, Typescript, iOS, and Android. \u25cf Standalone applications as examples for how to build production-grade AI applications with Llama \nStack.\n \nLlama Stack Benefits \n\u25cf Flexible Options: Developers can choose their preferred infrastructure without changing APIs and enjoy \nflexible\n \ndeployment\n \nchoices.\n \u25cf Consistent Experience: With its unified APIs, Llama Stack makes it easier to build, test, and deploy AI \napplications\n \nwith\n \nconsistent\n \napplication\n \nbehavior.\n \u25cf Robust Ecosystem: Llama Stack is already integrated with distribution partners (cloud providers, \nhardware\n \nvendors,\n \nand\n \nAI-focused\n \ncompanies)\n \nthat\n \noffer\n \ntailored\n \ninfrastructure,\n \nsoftware,\n \nand\n \nservices\n \nfor\n \ndeploying\n \nLlama\n \nmodels.\n \nLlama 4 Maverick \n Llama 4 Maverick is a Mixture-of-Experts (MoE) model with 17 billion active parameters and 128 experts. \n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-c4915bdee5a9",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "P4gkicgUKax"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-c4915bdee5a9",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "The Llama ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "PIPk8kAyow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-c4915bdee5a9",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "4 Maverick model is a Mixture-of-Experts (MoE)",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "viELmOBhSxTU"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-c4915bdee5a9",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " model with ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "M866QoC"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-c4915bdee5a9",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "128 experts <|file-728796232029|>.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "s6h8QUwLvD"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-c4915bdee5a9",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "ibcjGHS"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,600 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_sequential_file_search[client_with_models-txt=openai/gpt-4o:emb=openai/text-embedding-3-small:dim=1536]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "How many experts does the Llama 4 Maverick model have?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_zS2WxgXWetjnlPt2MzH9Asrc",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_zS2WxgXWetjnlPt2MzH9Asrc",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-5217982280, score: 2.57802841833685, attributes: {'filename': 'test_sequential_file_search.txt', 'document_id': 'file-5217982280', 'token_count': 19.0, 'metadata_token_count': 11.0} (cite as <|file-5217982280|>)\nThe Llama 4 Maverick model has 128 experts in its mixture of experts architecture.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "The Llama 4 Maverick model has 128 experts in its mixture of experts architecture <|file-5217982280|>."
|
||||
},
|
||||
{
|
||||
"role": "user",
|
||||
"content": "Can you tell me more about the architecture?"
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d07dea167108",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_eC40X8CzoIfT4BUnCbmNGlbT",
|
||||
"function": {
|
||||
"arguments": "",
|
||||
"name": "knowledge_search"
|
||||
},
|
||||
"type": "function"
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "1"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d07dea167108",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "{\"",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d07dea167108",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "query",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "WaHwUohOgfnJOH"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d07dea167108",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "\":\"",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "OWZ9cPJiOvgHp6"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d07dea167108",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "L",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "0c"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d07dea167108",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "lama",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "rjJXIDI6BKl39ZI"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d07dea167108",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": " ",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "iM"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d07dea167108",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "4",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "15"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d07dea167108",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": " Maver",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "bsgmAqW9qyRkB"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d07dea167108",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "ick",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d07dea167108",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": " model",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "0y5WwPvUi2J2l"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d07dea167108",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": " architecture",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "8YsQud"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d07dea167108",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "\"}",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d07dea167108",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "tool_calls",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "z"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,107 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_file_search.py::test_response_text_format[openai_client-txt=openai/gpt-4o-text_format0]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What is the capital of France?"
|
||||
}
|
||||
],
|
||||
"stream": true
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d1c19e856c52",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_cbf1785567",
|
||||
"usage": null,
|
||||
"obfuscation": "GmDyFBTczH2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d1c19e856c52",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "The capital of France is Paris.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_cbf1785567",
|
||||
"usage": null,
|
||||
"obfuscation": "o24r1KRzNw"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d1c19e856c52",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_cbf1785567",
|
||||
"usage": null,
|
||||
"obfuscation": "igzuBfl"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,466 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_file_search.py::test_response_file_search_filter_by_region[openai_client-txt=openai/gpt-4o]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What are the updates from the US region?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_tH5vt4iTTESjlL0vKwDKKLtU",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"latest updates from the US region\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_tH5vt4iTTESjlL0vKwDKKLtU",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 2 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-247992711531, score: 1.746917709229671, attributes: {'region': 'us', 'category': 'engineering', 'date': 1680307200.0, 'filename': 'us_engineering_q2.txt', 'document_id': 'file-247992711531', 'token_count': 18.0, 'metadata_token_count': 32.0} (cite as <|file-247992711531|>)\nUS technical updates for Q2 2023. New features deployed in the US region.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[2] document_id: file-247992711530, score: 0.9418284563822891, attributes: {'region': 'us', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'us_marketing_q1.txt', 'document_id': 'file-247992711530', 'token_count': 21.0, 'metadata_token_count': 32.0} (cite as <|file-247992711530|>)\nUS promotional campaigns for Q1 2023. Revenue increased by 15% in the US region.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"latest updates from the US region\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d4b919f92aa0",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "LVBYUvJmYrP"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d4b919f92aa0",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "Here are some recent updates related to the US region:\n\n",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "I0WY1gRg2"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d4b919f92aa0",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "1.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "AFHfwzJAJYrn"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d4b919f92aa0",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " In Q2 ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "6Tu1Zkc5Cl"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d4b919f92aa0",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2023,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "FM2xXQk4Xg"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d4b919f92aa0",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " new technical features were deployed in the US region,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "JWXl941NV"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d4b919f92aa0",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " indicating progress in engineering and technical advancements <|file-247992711531|>",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "Cf"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d4b919f92aa0",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": ".\n\n",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "rTeLdej6"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d4b919f92aa0",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "0CRQo167cVwS"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d4b919f92aa0",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " For Q1 ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "io5dVAPR6"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d4b919f92aa0",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "2023,",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "JEEfXMv3tZ"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d4b919f92aa0",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": " there were notable marketing activities with promotional campaigns leading to a ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "jS6I4Aw"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d4b919f92aa0",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "15% increase in revenue in the US region <|file-247992711530|>.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "UFLsAL2aZCu"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-d4b919f92aa0",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "Nk1jxtn"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,202 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_file_search.py::test_response_text_format[client_with_models-txt=openai/gpt-4o-text_format1]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What is the capital of France?"
|
||||
}
|
||||
],
|
||||
"response_format": {
|
||||
"type": "json_schema",
|
||||
"json_schema": {
|
||||
"name": "capitals",
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"capital": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"stream": true
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-de35ec2b1494",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_cbf1785567",
|
||||
"usage": null,
|
||||
"obfuscation": "raD8PNyHuff"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-de35ec2b1494",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "{\"",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_cbf1785567",
|
||||
"usage": null,
|
||||
"obfuscation": "bKNctm8IOW"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-de35ec2b1494",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "capital\":\"",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_cbf1785567",
|
||||
"usage": null,
|
||||
"obfuscation": "sCtYBe"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-de35ec2b1494",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "The capital of France is Paris.\"",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_cbf1785567",
|
||||
"usage": null,
|
||||
"obfuscation": "CfKJfvFsG3"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-de35ec2b1494",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "}",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_cbf1785567",
|
||||
"usage": null,
|
||||
"obfuscation": "5CAMRCsJP8iK"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-de35ec2b1494",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_cbf1785567",
|
||||
"usage": null,
|
||||
"obfuscation": "GqXk1WL"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
|
|
@ -0,0 +1,596 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_file_search.py::test_response_file_search_filter_by_date_range[client_with_models-txt=openai/gpt-4o]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "What happened in Q1 2023?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_a9Kxt1z020OLhioOdi8pVAl6",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"events in Q1 2023\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_a9Kxt1z020OLhioOdi8pVAl6",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 2 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-664068282482, score: 1.0007501503163803, attributes: {'region': 'us', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'us_marketing_q1.txt', 'document_id': 'file-664068282482', 'token_count': 21.0, 'metadata_token_count': 32.0} (cite as <|file-664068282482|>)\nUS promotional campaigns for Q1 2023. Revenue increased by 15% in the US region.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[2] document_id: file-664068282484, score: 0.9776032276748411, attributes: {'region': 'eu', 'category': 'marketing', 'date': 1672531200.0, 'filename': 'eu_marketing_q1.txt', 'document_id': 'file-664068282484', 'token_count': 17.0, 'metadata_token_count': 32.0} (cite as <|file-664068282484|>)\nEuropean advertising campaign results for Q1 2023. Strong growth in EU markets.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"events in Q1 2023\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-e69443fa2e44",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_CwdXa6wdMO3Jbh0oB3NNlIjE",
|
||||
"function": {
|
||||
"arguments": "",
|
||||
"name": "knowledge_search"
|
||||
},
|
||||
"type": "function"
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "r"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-e69443fa2e44",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "{\"",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-e69443fa2e44",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "query",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "sJPYFmJf6VNnKV"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-e69443fa2e44",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "\":\"",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "hPZ5ojU9O1JCwk"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-e69443fa2e44",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "key",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-e69443fa2e44",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": " global",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "PnDPR1Y8zCab"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-e69443fa2e44",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": " events",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "ePyNFvW19GBw"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-e69443fa2e44",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": " Q",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "H"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-e69443fa2e44",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "1",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "4D"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-e69443fa2e44",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": " ",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "IA"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-e69443fa2e44",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "202",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-e69443fa2e44",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "3",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "qR"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-e69443fa2e44",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": null,
|
||||
"function": {
|
||||
"arguments": "\"}",
|
||||
"name": null
|
||||
},
|
||||
"type": null
|
||||
}
|
||||
]
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": ""
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-e69443fa2e44",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "tool_calls",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "n"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,219 @@
|
|||
{
|
||||
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_file_search[openai_client-txt=openai/gpt-4o:emb=openai/text-embedding-3-small:dim=1536-llama_experts]",
|
||||
"request": {
|
||||
"method": "POST",
|
||||
"url": "https://api.openai.com/v1/v1/chat/completions",
|
||||
"headers": {},
|
||||
"body": {
|
||||
"model": "gpt-4o",
|
||||
"messages": [
|
||||
{
|
||||
"role": "user",
|
||||
"content": "How many experts does the Llama 4 Maverick model have?"
|
||||
},
|
||||
{
|
||||
"role": "assistant",
|
||||
"content": "",
|
||||
"tool_calls": [
|
||||
{
|
||||
"index": 0,
|
||||
"id": "call_9Ofp0Uepi3uOnEmuFtm9yvCU",
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"role": "tool",
|
||||
"tool_call_id": "call_9Ofp0Uepi3uOnEmuFtm9yvCU",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "knowledge_search tool found 1 chunks:\nBEGIN of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "[1] document_id: file-78420035045, score: 2.933222791810999, attributes: {'filename': 'test_response_non_streaming_file_search.txt', 'document_id': 'file-78420035045', 'token_count': 10.0, 'metadata_token_count': 13.0} (cite as <|file-78420035045|>)\nLlama 4 Maverick has 128 experts\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "END of knowledge_search tool results.\n"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"stream": true,
|
||||
"tools": [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "knowledge_search",
|
||||
"description": "Search for information in a database.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The query to search for. Can be a natural language sentence or keywords."
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"query"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"endpoint": "/v1/chat/completions",
|
||||
"model": "gpt-4o"
|
||||
},
|
||||
"response": {
|
||||
"body": [
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-fbc3663df932",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": "assistant",
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "jPCn755gSJm"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-fbc3663df932",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "The Llama ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "7DiMbaNjQh"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-fbc3663df932",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "4 Maverick model has ",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "YHm5Z1DAbROO"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-fbc3663df932",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": "128 experts <|file-78420035045|>.",
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": null,
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "BC68VJcdY6"
|
||||
}
|
||||
},
|
||||
{
|
||||
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
||||
"__data__": {
|
||||
"id": "rec-fbc3663df932",
|
||||
"choices": [
|
||||
{
|
||||
"delta": {
|
||||
"content": null,
|
||||
"function_call": null,
|
||||
"refusal": null,
|
||||
"role": null,
|
||||
"tool_calls": null
|
||||
},
|
||||
"finish_reason": "stop",
|
||||
"index": 0,
|
||||
"logprobs": null
|
||||
}
|
||||
],
|
||||
"created": 0,
|
||||
"model": "gpt-4o-2024-08-06",
|
||||
"object": "chat.completion.chunk",
|
||||
"service_tier": "default",
|
||||
"system_fingerprint": "fp_f64f290af2",
|
||||
"usage": null,
|
||||
"obfuscation": "2y0NJct"
|
||||
}
|
||||
}
|
||||
],
|
||||
"is_streaming": true
|
||||
},
|
||||
"id_normalization_mapping": {}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue