feat(tests): enable MCP tests in server mode (#4146)

We would like to run all OpenAI compatibility tests using only the
openai-client library. This is most friendly for contributors since they
can run tests without needing to update the client-sdks (which is
getting easier but still a long pole.)

This is the first step in enabling that -- no using "library client" for
any of the Responses tests. This seems like a reasonable trade-off since
the usage of an embeddeble library client for Responses (or any
OpenAI-compatible) behavior seems to be not very common. To do this, we
needed to enable MCP tests (which only worked in library client mode)
for server mode.
This commit is contained in:
Ashwin Bharambe 2025-11-13 07:23:23 -08:00 committed by GitHub
parent 9eb81439d2
commit 1e81056a22
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
29 changed files with 13388 additions and 127 deletions

View file

@ -162,6 +162,17 @@ if [[ "$COLLECT_ONLY" == false ]]; then
export LLAMA_STACK_TEST_STACK_CONFIG_TYPE="library_client" export LLAMA_STACK_TEST_STACK_CONFIG_TYPE="library_client"
echo "Setting stack config type: library_client" echo "Setting stack config type: library_client"
fi fi
# Set MCP host for in-process MCP server tests
# - For library client and server mode: localhost (both on same host)
# - For docker mode: host.docker.internal (container needs to reach host)
if [[ "$STACK_CONFIG" == docker:* ]]; then
export LLAMA_STACK_TEST_MCP_HOST="host.docker.internal"
echo "Setting MCP host: host.docker.internal (docker mode)"
else
export LLAMA_STACK_TEST_MCP_HOST="localhost"
echo "Setting MCP host: localhost (library/server mode)"
fi
fi fi
SETUP_ENV=$(PYTHONPATH=$THIS_DIR/.. python "$THIS_DIR/get_setup_env.py" --suite "$TEST_SUITE" --setup "$TEST_SETUP" --format bash) SETUP_ENV=$(PYTHONPATH=$THIS_DIR/.. python "$THIS_DIR/get_setup_env.py" --suite "$TEST_SUITE" --setup "$TEST_SETUP" --format bash)
@ -338,6 +349,7 @@ if [[ "$STACK_CONFIG" == *"docker:"* && "$COLLECT_ONLY" == false ]]; then
DOCKER_ENV_VARS="" DOCKER_ENV_VARS=""
DOCKER_ENV_VARS="$DOCKER_ENV_VARS -e LLAMA_STACK_TEST_INFERENCE_MODE=$INFERENCE_MODE" DOCKER_ENV_VARS="$DOCKER_ENV_VARS -e LLAMA_STACK_TEST_INFERENCE_MODE=$INFERENCE_MODE"
DOCKER_ENV_VARS="$DOCKER_ENV_VARS -e LLAMA_STACK_TEST_STACK_CONFIG_TYPE=server" DOCKER_ENV_VARS="$DOCKER_ENV_VARS -e LLAMA_STACK_TEST_STACK_CONFIG_TYPE=server"
DOCKER_ENV_VARS="$DOCKER_ENV_VARS -e LLAMA_STACK_TEST_MCP_HOST=${LLAMA_STACK_TEST_MCP_HOST:-host.docker.internal}"
# Disabled: https://github.com/llamastack/llama-stack/issues/4089 # Disabled: https://github.com/llamastack/llama-stack/issues/4089
#DOCKER_ENV_VARS="$DOCKER_ENV_VARS -e OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:${COLLECTOR_PORT}" #DOCKER_ENV_VARS="$DOCKER_ENV_VARS -e OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:${COLLECTOR_PORT}"
DOCKER_ENV_VARS="$DOCKER_ENV_VARS -e OTEL_METRIC_EXPORT_INTERVAL=200" DOCKER_ENV_VARS="$DOCKER_ENV_VARS -e OTEL_METRIC_EXPORT_INTERVAL=200"
@ -371,8 +383,11 @@ if [[ "$STACK_CONFIG" == *"docker:"* && "$COLLECT_ONLY" == false ]]; then
# Use regular port mapping instead # Use regular port mapping instead
NETWORK_MODE="" NETWORK_MODE=""
PORT_MAPPINGS="" PORT_MAPPINGS=""
ADD_HOST_FLAG=""
if [[ "$(uname)" != "Darwin" ]] && [[ "$(uname)" != *"MINGW"* ]]; then if [[ "$(uname)" != "Darwin" ]] && [[ "$(uname)" != *"MINGW"* ]]; then
NETWORK_MODE="--network host" NETWORK_MODE="--network host"
# On Linux with host network, also add host.docker.internal mapping for consistency
ADD_HOST_FLAG="--add-host=host.docker.internal:host-gateway"
else else
# On non-Linux (macOS, Windows), need explicit port mappings for both app and telemetry # On non-Linux (macOS, Windows), need explicit port mappings for both app and telemetry
PORT_MAPPINGS="-p $LLAMA_STACK_PORT:$LLAMA_STACK_PORT -p $COLLECTOR_PORT:$COLLECTOR_PORT" PORT_MAPPINGS="-p $LLAMA_STACK_PORT:$LLAMA_STACK_PORT -p $COLLECTOR_PORT:$COLLECTOR_PORT"
@ -381,6 +396,7 @@ if [[ "$STACK_CONFIG" == *"docker:"* && "$COLLECT_ONLY" == false ]]; then
docker run -d $NETWORK_MODE --name "$container_name" \ docker run -d $NETWORK_MODE --name "$container_name" \
$PORT_MAPPINGS \ $PORT_MAPPINGS \
$ADD_HOST_FLAG \
$DOCKER_ENV_VARS \ $DOCKER_ENV_VARS \
"$IMAGE_NAME" \ "$IMAGE_NAME" \
--port $LLAMA_STACK_PORT --port $LLAMA_STACK_PORT

View file

@ -244,8 +244,14 @@ def make_mcp_server(required_auth_token: str | None = None, tools: dict[str, Cal
timeout = 2 timeout = 2
start_time = time.time() start_time = time.time()
server_url = f"http://localhost:{port}/sse" # Determine the appropriate host for the server URL based on test environment
logger.debug(f"Waiting for MCP server thread to start on port {port}") # - For library client and server mode: use localhost (both on same host)
# - For docker mode: use host.docker.internal (container needs to reach host)
import os
mcp_host = os.environ.get("LLAMA_STACK_TEST_MCP_HOST", "localhost")
server_url = f"http://{mcp_host}:{port}/sse"
logger.debug(f"Waiting for MCP server thread to start on port {port} (accessible via {mcp_host})")
while time.time() - start_time < timeout: while time.time() - start_time < timeout:
if server_thread.is_alive(): if server_thread.is_alive():

View file

@ -0,0 +1,17 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
import pytest
from llama_stack.core.library_client import LlamaStackAsLibraryClient
@pytest.fixture
def responses_client(compat_client):
"""Provide a client for responses tests, skipping library client mode."""
if isinstance(compat_client, LlamaStackAsLibraryClient):
pytest.skip("Responses API tests are not supported in library client mode")
return compat_client

View file

@ -0,0 +1,549 @@
{
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_streaming_multi_turn_tool_execution[openai_client-txt=openai/gpt-4o-experiment_analysis_streaming]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "I need a complete analysis: First, get the experiment ID for 'chemical_reaction', then get the results for that experiment, and tell me if the yield was above 80%. Return only one tool call per step. Please stream your analysis process."
},
{
"role": "assistant",
"content": "",
"tool_calls": [
{
"index": 0,
"id": "call_Q9Gcxub7UbQsxJWVkiy4FETr",
"type": "function",
"function": {
"name": "get_experiment_id",
"arguments": "{\"experiment_name\":\"chemical_reaction\"}"
}
}
]
},
{
"role": "tool",
"tool_call_id": "call_Q9Gcxub7UbQsxJWVkiy4FETr",
"content": [
{
"type": "text",
"text": "exp_003"
}
]
}
],
"stream": true,
"stream_options": {
"include_usage": true
},
"tools": [
{
"type": "function",
"function": {
"name": "get_user_id",
"description": "\n Get the user ID for a given username. This ID is needed for other operations.\n\n :param username: The username to look up\n :return: The user ID for the username\n ",
"parameters": {
"properties": {
"username": {
"title": "Username",
"type": "string"
}
},
"required": [
"username"
],
"title": "get_user_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_user_permissions",
"description": "\n Get the permissions for a user ID. Requires a valid user ID from get_user_id.\n\n :param user_id: The user ID to check permissions for\n :return: The permissions for the user\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
}
},
"required": [
"user_id"
],
"title": "get_user_permissionsArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "check_file_access",
"description": "\n Check if a user can access a specific file. Requires a valid user ID.\n\n :param user_id: The user ID to check access for\n :param filename: The filename to check access to\n :return: Whether the user can access the file (yes/no)\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
},
"filename": {
"title": "Filename",
"type": "string"
}
},
"required": [
"user_id",
"filename"
],
"title": "check_file_accessArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_id",
"description": "\n Get the experiment ID for a given experiment name. This ID is needed to get results.\n\n :param experiment_name: The name of the experiment\n :return: The experiment ID\n ",
"parameters": {
"properties": {
"experiment_name": {
"title": "Experiment Name",
"type": "string"
}
},
"required": [
"experiment_name"
],
"title": "get_experiment_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_results",
"description": "\n Get the results for an experiment ID. Requires a valid experiment ID from get_experiment_id.\n\n :param experiment_id: The experiment ID to get results for\n :return: The experiment results\n ",
"parameters": {
"properties": {
"experiment_id": {
"title": "Experiment Id",
"type": "string"
}
},
"required": [
"experiment_id"
],
"title": "get_experiment_resultsArguments",
"type": "object"
}
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-0a4aca0cd075",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": [
{
"index": 0,
"id": "call_yTMuQEKu7x115q8XvhqelRub",
"function": {
"arguments": "",
"name": "get_experiment_results"
},
"type": "function"
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "9CSOZwfG5M7nid"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-0a4aca0cd075",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "{\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Wss"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-0a4aca0cd075",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "experiment",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "5AmVsa0S6NBy"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-0a4aca0cd075",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "_id",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "2Sf"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-0a4aca0cd075",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\":\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "z"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-0a4aca0cd075",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "exp",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "leu"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-0a4aca0cd075",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "_",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "omxpR"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-0a4aca0cd075",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "003",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "kW6"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-0a4aca0cd075",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\"}",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Zm6"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-0a4aca0cd075",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "tool_calls",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "aXvC"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-0a4aca0cd075",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": {
"completion_tokens": 19,
"prompt_tokens": 457,
"total_tokens": 476,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": "s13YHOCCaCDcJ"
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,295 @@
{
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_multi_turn_tool_execution[openai_client-txt=openai/gpt-4o-user_file_access_check]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "I need to check if user 'alice' can access the file 'document.txt'. First, get alice's user ID, then check if that user ID can access the file 'document.txt'. Do this as a series of steps, where each step is a separate message. Return only one tool call per step. Summarize the final result with a single 'yes' or 'no' response."
},
{
"role": "assistant",
"content": "",
"tool_calls": [
{
"index": 0,
"id": "call_EsVvmBUqtJb42kNkYnK19QkJ",
"type": "function",
"function": {
"name": "get_user_id",
"arguments": "{\"username\":\"alice\"}"
}
}
]
},
{
"role": "tool",
"tool_call_id": "call_EsVvmBUqtJb42kNkYnK19QkJ",
"content": [
{
"type": "text",
"text": "user_12345"
}
]
},
{
"role": "assistant",
"content": "",
"tool_calls": [
{
"index": 0,
"id": "call_kCmSE8ORKfQoiEsW2UCYr5Sh",
"type": "function",
"function": {
"name": "check_file_access",
"arguments": "{\"user_id\":\"user_12345\",\"filename\":\"document.txt\"}"
}
}
]
},
{
"role": "tool",
"tool_call_id": "call_kCmSE8ORKfQoiEsW2UCYr5Sh",
"content": [
{
"type": "text",
"text": "yes"
}
]
}
],
"stream": true,
"stream_options": {
"include_usage": true
},
"tools": [
{
"type": "function",
"function": {
"name": "get_user_id",
"description": "\n Get the user ID for a given username. This ID is needed for other operations.\n\n :param username: The username to look up\n :return: The user ID for the username\n ",
"parameters": {
"properties": {
"username": {
"title": "Username",
"type": "string"
}
},
"required": [
"username"
],
"title": "get_user_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_user_permissions",
"description": "\n Get the permissions for a user ID. Requires a valid user ID from get_user_id.\n\n :param user_id: The user ID to check permissions for\n :return: The permissions for the user\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
}
},
"required": [
"user_id"
],
"title": "get_user_permissionsArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "check_file_access",
"description": "\n Check if a user can access a specific file. Requires a valid user ID.\n\n :param user_id: The user ID to check access for\n :param filename: The filename to check access to\n :return: Whether the user can access the file (yes/no)\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
},
"filename": {
"title": "Filename",
"type": "string"
}
},
"required": [
"user_id",
"filename"
],
"title": "check_file_accessArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_id",
"description": "\n Get the experiment ID for a given experiment name. This ID is needed to get results.\n\n :param experiment_name: The name of the experiment\n :return: The experiment ID\n ",
"parameters": {
"properties": {
"experiment_name": {
"title": "Experiment Name",
"type": "string"
}
},
"required": [
"experiment_name"
],
"title": "get_experiment_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_results",
"description": "\n Get the results for an experiment ID. Requires a valid experiment ID from get_experiment_id.\n\n :param experiment_id: The experiment ID to get results for\n :return: The experiment results\n ",
"parameters": {
"properties": {
"experiment_id": {
"title": "Experiment Id",
"type": "string"
}
},
"required": [
"experiment_id"
],
"title": "get_experiment_resultsArguments",
"type": "object"
}
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2bd4c8dc08b3",
"choices": [
{
"delta": {
"content": "",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "UxHf8fChwO3CUY"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2bd4c8dc08b3",
"choices": [
{
"delta": {
"content": "yes",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "GOexNEhopELIg"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2bd4c8dc08b3",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "stop",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "O41d8hC8zD"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2bd4c8dc08b3",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": {
"completion_tokens": 2,
"prompt_tokens": 516,
"total_tokens": 518,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": "9VQklZAZMYAfa0"
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,833 @@
{
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_streaming_multi_turn_tool_execution[openai_client-txt=openai/gpt-4o-user_permissions_workflow]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "Help me with this security check: First, get the user ID for 'charlie', then get the permissions for that user ID, and finally check if that user can access 'secret_file.txt'. Stream your progress as you work through each step. Return only one tool call per step. Summarize the final result with a single 'yes' or 'no' response."
},
{
"role": "assistant",
"content": "",
"tool_calls": [
{
"index": 0,
"id": "call_fsxGbKmceUbLSXCe4sx9WLXO",
"type": "function",
"function": {
"name": "get_user_id",
"arguments": "{\"username\":\"charlie\"}"
}
}
]
},
{
"role": "tool",
"tool_call_id": "call_fsxGbKmceUbLSXCe4sx9WLXO",
"content": [
{
"type": "text",
"text": "user_11111"
}
]
},
{
"role": "assistant",
"content": "",
"tool_calls": [
{
"index": 0,
"id": "call_moRBxqnBJ48EWTSEoQ1llgib",
"type": "function",
"function": {
"name": "get_user_permissions",
"arguments": "{\"user_id\":\"user_11111\"}"
}
}
]
},
{
"role": "tool",
"tool_call_id": "call_moRBxqnBJ48EWTSEoQ1llgib",
"content": [
{
"type": "text",
"text": "admin"
}
]
},
{
"role": "assistant",
"content": "",
"tool_calls": [
{
"index": 0,
"id": "call_ybUqAP9oQn3rwQqVdOLs5Wb4",
"type": "function",
"function": {
"name": "check_file_access",
"arguments": "{\"user_id\":\"user_11111\",\"filename\":\"secret_file.txt\"}"
}
}
]
},
{
"role": "tool",
"tool_call_id": "call_ybUqAP9oQn3rwQqVdOLs5Wb4",
"content": [
{
"type": "text",
"text": "no"
}
]
}
],
"stream": true,
"stream_options": {
"include_usage": true
},
"tools": [
{
"type": "function",
"function": {
"name": "get_user_id",
"description": "\n Get the user ID for a given username. This ID is needed for other operations.\n\n :param username: The username to look up\n :return: The user ID for the username\n ",
"parameters": {
"properties": {
"username": {
"title": "Username",
"type": "string"
}
},
"required": [
"username"
],
"title": "get_user_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_user_permissions",
"description": "\n Get the permissions for a user ID. Requires a valid user ID from get_user_id.\n\n :param user_id: The user ID to check permissions for\n :return: The permissions for the user\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
}
},
"required": [
"user_id"
],
"title": "get_user_permissionsArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "check_file_access",
"description": "\n Check if a user can access a specific file. Requires a valid user ID.\n\n :param user_id: The user ID to check access for\n :param filename: The filename to check access to\n :return: Whether the user can access the file (yes/no)\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
},
"filename": {
"title": "Filename",
"type": "string"
}
},
"required": [
"user_id",
"filename"
],
"title": "check_file_accessArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_id",
"description": "\n Get the experiment ID for a given experiment name. This ID is needed to get results.\n\n :param experiment_name: The name of the experiment\n :return: The experiment ID\n ",
"parameters": {
"properties": {
"experiment_name": {
"title": "Experiment Name",
"type": "string"
}
},
"required": [
"experiment_name"
],
"title": "get_experiment_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_results",
"description": "\n Get the results for an experiment ID. Requires a valid experiment ID from get_experiment_id.\n\n :param experiment_id: The experiment ID to get results for\n :return: The experiment results\n ",
"parameters": {
"properties": {
"experiment_id": {
"title": "Experiment Id",
"type": "string"
}
},
"required": [
"experiment_id"
],
"title": "get_experiment_resultsArguments",
"type": "object"
}
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2ed23a428984",
"choices": [
{
"delta": {
"content": "",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "WLGSIGDbuImIc2"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2ed23a428984",
"choices": [
{
"delta": {
"content": "The",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "tOPrT8GpCzqCn"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2ed23a428984",
"choices": [
{
"delta": {
"content": " user",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "ViOvVDT7owF"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2ed23a428984",
"choices": [
{
"delta": {
"content": " '",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "EkiYJGYtRb2KCr"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2ed23a428984",
"choices": [
{
"delta": {
"content": "char",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "ioC2G58DuWTx"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2ed23a428984",
"choices": [
{
"delta": {
"content": "lie",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "A5rxByl55APwi"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2ed23a428984",
"choices": [
{
"delta": {
"content": "'",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "kmDNWRqOyy2r3ST"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2ed23a428984",
"choices": [
{
"delta": {
"content": " cannot",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "JHGD4XKFC"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2ed23a428984",
"choices": [
{
"delta": {
"content": " access",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "6IPkFhs93"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2ed23a428984",
"choices": [
{
"delta": {
"content": " '",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "LGHjKnVq2lF1DS"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2ed23a428984",
"choices": [
{
"delta": {
"content": "secret",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "1nGoXVjnK0"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2ed23a428984",
"choices": [
{
"delta": {
"content": "_file",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "OeR7YlvZQLa"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2ed23a428984",
"choices": [
{
"delta": {
"content": ".txt",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "yLKHaSgjE64R"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2ed23a428984",
"choices": [
{
"delta": {
"content": "'.",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "waZY1Js7DPWtoN"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2ed23a428984",
"choices": [
{
"delta": {
"content": " The",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "km3Gr5HspErW"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2ed23a428984",
"choices": [
{
"delta": {
"content": " final",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Mvzf8AUstX"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2ed23a428984",
"choices": [
{
"delta": {
"content": " result",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "660CrCPne"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2ed23a428984",
"choices": [
{
"delta": {
"content": " is",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "lq7NyKvIo8UEO"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2ed23a428984",
"choices": [
{
"delta": {
"content": ":",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "qjIz07y1RQsKqTo"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2ed23a428984",
"choices": [
{
"delta": {
"content": " no",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "xhcVwxM4RaQcN"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2ed23a428984",
"choices": [
{
"delta": {
"content": ".",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "dPxBJZ3WUesIy8T"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2ed23a428984",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "stop",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Z9wFfcEaK2"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2ed23a428984",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": {
"completion_tokens": 21,
"prompt_tokens": 542,
"total_tokens": 563,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": "fSoZk1lrb3nJt"
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,759 @@
{
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_mcp_tool_approval[openai_client-txt=openai/gpt-4o-True-boiling_point_tool]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "What is the boiling point of myawesomeliquid in Celsius?"
}
],
"stream": true,
"stream_options": {
"include_usage": true
},
"tools": [
{
"type": "function",
"function": {
"name": "greet_everyone",
"parameters": {
"properties": {
"url": {
"title": "Url",
"type": "string"
}
},
"required": [
"url"
],
"title": "greet_everyoneArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_boiling_point",
"description": "\n Returns the boiling point of a liquid in Celsius or Fahrenheit.\n\n :param liquid_name: The name of the liquid\n :param celsius: Whether to return the boiling point in Celsius\n :return: The boiling point of the liquid in Celcius or Fahrenheit\n ",
"parameters": {
"properties": {
"liquid_name": {
"title": "Liquid Name",
"type": "string"
},
"celsius": {
"default": true,
"title": "Celsius",
"type": "boolean"
}
},
"required": [
"liquid_name"
],
"title": "get_boiling_pointArguments",
"type": "object"
}
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-3177a984c900",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": [
{
"index": 0,
"id": "call_bL84OWNnE1s75GJEqGLAK35W",
"function": {
"arguments": "",
"name": "get_boiling_point"
},
"type": "function"
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "ptE"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-3177a984c900",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "{\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "UEV"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-3177a984c900",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "li",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "hMko"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-3177a984c900",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "quid",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "nr"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-3177a984c900",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "_name",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "x"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-3177a984c900",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\":\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "D"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-3177a984c900",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "my",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "aLLC"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-3177a984c900",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "aw",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "EZdr"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-3177a984c900",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "esom",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "yV"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-3177a984c900",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "eli",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "0bj"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-3177a984c900",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "quid",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "5J"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-3177a984c900",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\",\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "z"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-3177a984c900",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "c",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "7dZEY"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-3177a984c900",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "elsius",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": ""
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-3177a984c900",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\":",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "AqP"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-3177a984c900",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "true",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "X8"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-3177a984c900",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "}",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "oa7h2"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-3177a984c900",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "tool_calls",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "1Is8"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-3177a984c900",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": {
"completion_tokens": 27,
"prompt_tokens": 156,
"total_tokens": 183,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": "DfwHMdbjUVww7"
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,549 @@
{
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_multi_turn_tool_execution[openai_client-txt=openai/gpt-4o-experiment_results_lookup]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "I need to get the results for the 'boiling_point' experiment. First, get the experiment ID for 'boiling_point', then use that ID to get the experiment results. Tell me the boiling point in Celsius."
},
{
"role": "assistant",
"content": "",
"tool_calls": [
{
"index": 0,
"id": "call_dZwjBxH3aTRhnaS0bJVPqRcz",
"type": "function",
"function": {
"name": "get_experiment_id",
"arguments": "{\"experiment_name\":\"boiling_point\"}"
}
}
]
},
{
"role": "tool",
"tool_call_id": "call_dZwjBxH3aTRhnaS0bJVPqRcz",
"content": [
{
"type": "text",
"text": "exp_004"
}
]
}
],
"stream": true,
"stream_options": {
"include_usage": true
},
"tools": [
{
"type": "function",
"function": {
"name": "get_user_id",
"description": "\n Get the user ID for a given username. This ID is needed for other operations.\n\n :param username: The username to look up\n :return: The user ID for the username\n ",
"parameters": {
"properties": {
"username": {
"title": "Username",
"type": "string"
}
},
"required": [
"username"
],
"title": "get_user_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_user_permissions",
"description": "\n Get the permissions for a user ID. Requires a valid user ID from get_user_id.\n\n :param user_id: The user ID to check permissions for\n :return: The permissions for the user\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
}
},
"required": [
"user_id"
],
"title": "get_user_permissionsArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "check_file_access",
"description": "\n Check if a user can access a specific file. Requires a valid user ID.\n\n :param user_id: The user ID to check access for\n :param filename: The filename to check access to\n :return: Whether the user can access the file (yes/no)\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
},
"filename": {
"title": "Filename",
"type": "string"
}
},
"required": [
"user_id",
"filename"
],
"title": "check_file_accessArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_id",
"description": "\n Get the experiment ID for a given experiment name. This ID is needed to get results.\n\n :param experiment_name: The name of the experiment\n :return: The experiment ID\n ",
"parameters": {
"properties": {
"experiment_name": {
"title": "Experiment Name",
"type": "string"
}
},
"required": [
"experiment_name"
],
"title": "get_experiment_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_results",
"description": "\n Get the results for an experiment ID. Requires a valid experiment ID from get_experiment_id.\n\n :param experiment_id: The experiment ID to get results for\n :return: The experiment results\n ",
"parameters": {
"properties": {
"experiment_id": {
"title": "Experiment Id",
"type": "string"
}
},
"required": [
"experiment_id"
],
"title": "get_experiment_resultsArguments",
"type": "object"
}
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-318c5361647d",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": [
{
"index": 0,
"id": "call_skNUKbERbtdoADH834U9OE91",
"function": {
"arguments": "",
"name": "get_experiment_results"
},
"type": "function"
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "5aHvu2xes6Amy8"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-318c5361647d",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "{\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "9HQ"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-318c5361647d",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "experiment",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "ckAh5OXg9JIe"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-318c5361647d",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "_id",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "avh"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-318c5361647d",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\":\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "x"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-318c5361647d",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "exp",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "f75"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-318c5361647d",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "_",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Nini1"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-318c5361647d",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "004",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "MXB"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-318c5361647d",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\"}",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Vc4"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-318c5361647d",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "tool_calls",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "rnph"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-318c5361647d",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": {
"completion_tokens": 19,
"prompt_tokens": 450,
"total_tokens": 469,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": "nUptVmnQlQZrH"
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,413 @@
{
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_multi_turn_tool_execution[openai_client-txt=openai/gpt-4o-user_file_access_check]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "I need to check if user 'alice' can access the file 'document.txt'. First, get alice's user ID, then check if that user ID can access the file 'document.txt'. Do this as a series of steps, where each step is a separate message. Return only one tool call per step. Summarize the final result with a single 'yes' or 'no' response."
}
],
"stream": true,
"stream_options": {
"include_usage": true
},
"tools": [
{
"type": "function",
"function": {
"name": "get_user_id",
"description": "\n Get the user ID for a given username. This ID is needed for other operations.\n\n :param username: The username to look up\n :return: The user ID for the username\n ",
"parameters": {
"properties": {
"username": {
"title": "Username",
"type": "string"
}
},
"required": [
"username"
],
"title": "get_user_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_user_permissions",
"description": "\n Get the permissions for a user ID. Requires a valid user ID from get_user_id.\n\n :param user_id: The user ID to check permissions for\n :return: The permissions for the user\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
}
},
"required": [
"user_id"
],
"title": "get_user_permissionsArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "check_file_access",
"description": "\n Check if a user can access a specific file. Requires a valid user ID.\n\n :param user_id: The user ID to check access for\n :param filename: The filename to check access to\n :return: Whether the user can access the file (yes/no)\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
},
"filename": {
"title": "Filename",
"type": "string"
}
},
"required": [
"user_id",
"filename"
],
"title": "check_file_accessArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_id",
"description": "\n Get the experiment ID for a given experiment name. This ID is needed to get results.\n\n :param experiment_name: The name of the experiment\n :return: The experiment ID\n ",
"parameters": {
"properties": {
"experiment_name": {
"title": "Experiment Name",
"type": "string"
}
},
"required": [
"experiment_name"
],
"title": "get_experiment_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_results",
"description": "\n Get the results for an experiment ID. Requires a valid experiment ID from get_experiment_id.\n\n :param experiment_id: The experiment ID to get results for\n :return: The experiment results\n ",
"parameters": {
"properties": {
"experiment_id": {
"title": "Experiment Id",
"type": "string"
}
},
"required": [
"experiment_id"
],
"title": "get_experiment_resultsArguments",
"type": "object"
}
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-430a49246c97",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": [
{
"index": 0,
"id": "call_EsVvmBUqtJb42kNkYnK19QkJ",
"function": {
"arguments": "",
"name": "get_user_id"
},
"type": "function"
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Ma7aiZxSs"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-430a49246c97",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "{\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "DXu"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-430a49246c97",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "username",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "rtfrl7gxu80vmN"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-430a49246c97",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\":\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "r"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-430a49246c97",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "alice",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "M"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-430a49246c97",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\"}",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "vSu"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-430a49246c97",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "tool_calls",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "sXfh"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-430a49246c97",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": {
"completion_tokens": 15,
"prompt_tokens": 454,
"total_tokens": 469,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": "bEe7hWJ6U62YQ"
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,586 @@
{
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_streaming_multi_turn_tool_execution[openai_client-txt=openai/gpt-4o-user_permissions_workflow]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "Help me with this security check: First, get the user ID for 'charlie', then get the permissions for that user ID, and finally check if that user can access 'secret_file.txt'. Stream your progress as you work through each step. Return only one tool call per step. Summarize the final result with a single 'yes' or 'no' response."
},
{
"role": "assistant",
"content": "",
"tool_calls": [
{
"index": 0,
"id": "call_fsxGbKmceUbLSXCe4sx9WLXO",
"type": "function",
"function": {
"name": "get_user_id",
"arguments": "{\"username\":\"charlie\"}"
}
}
]
},
{
"role": "tool",
"tool_call_id": "call_fsxGbKmceUbLSXCe4sx9WLXO",
"content": [
{
"type": "text",
"text": "user_11111"
}
]
}
],
"stream": true,
"stream_options": {
"include_usage": true
},
"tools": [
{
"type": "function",
"function": {
"name": "get_user_id",
"description": "\n Get the user ID for a given username. This ID is needed for other operations.\n\n :param username: The username to look up\n :return: The user ID for the username\n ",
"parameters": {
"properties": {
"username": {
"title": "Username",
"type": "string"
}
},
"required": [
"username"
],
"title": "get_user_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_user_permissions",
"description": "\n Get the permissions for a user ID. Requires a valid user ID from get_user_id.\n\n :param user_id: The user ID to check permissions for\n :return: The permissions for the user\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
}
},
"required": [
"user_id"
],
"title": "get_user_permissionsArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "check_file_access",
"description": "\n Check if a user can access a specific file. Requires a valid user ID.\n\n :param user_id: The user ID to check access for\n :param filename: The filename to check access to\n :return: Whether the user can access the file (yes/no)\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
},
"filename": {
"title": "Filename",
"type": "string"
}
},
"required": [
"user_id",
"filename"
],
"title": "check_file_accessArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_id",
"description": "\n Get the experiment ID for a given experiment name. This ID is needed to get results.\n\n :param experiment_name: The name of the experiment\n :return: The experiment ID\n ",
"parameters": {
"properties": {
"experiment_name": {
"title": "Experiment Name",
"type": "string"
}
},
"required": [
"experiment_name"
],
"title": "get_experiment_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_results",
"description": "\n Get the results for an experiment ID. Requires a valid experiment ID from get_experiment_id.\n\n :param experiment_id: The experiment ID to get results for\n :return: The experiment results\n ",
"parameters": {
"properties": {
"experiment_id": {
"title": "Experiment Id",
"type": "string"
}
},
"required": [
"experiment_id"
],
"title": "get_experiment_resultsArguments",
"type": "object"
}
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-52a2b9678196",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": [
{
"index": 0,
"id": "call_moRBxqnBJ48EWTSEoQ1llgib",
"function": {
"arguments": "",
"name": "get_user_permissions"
},
"type": "function"
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": ""
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-52a2b9678196",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "{\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "00p"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-52a2b9678196",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "user",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Y0"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-52a2b9678196",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "_id",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "i2I"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-52a2b9678196",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\":\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "P"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-52a2b9678196",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "user",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "IG"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-52a2b9678196",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "_",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "QY61l"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-52a2b9678196",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "111",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "YAZ"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-52a2b9678196",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "11",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Nw7U"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-52a2b9678196",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\"}",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Ev7"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-52a2b9678196",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "tool_calls",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "CSaD"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-52a2b9678196",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": {
"completion_tokens": 19,
"prompt_tokens": 478,
"total_tokens": 497,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": "kMNEyeKFT75vK"
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,524 @@
{
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_multi_turn_tool_execution[openai_client-txt=openai/gpt-4o-experiment_results_lookup]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "I need to get the results for the 'boiling_point' experiment. First, get the experiment ID for 'boiling_point', then use that ID to get the experiment results. Tell me the boiling point in Celsius."
}
],
"stream": true,
"stream_options": {
"include_usage": true
},
"tools": [
{
"type": "function",
"function": {
"name": "get_user_id",
"description": "\n Get the user ID for a given username. This ID is needed for other operations.\n\n :param username: The username to look up\n :return: The user ID for the username\n ",
"parameters": {
"properties": {
"username": {
"title": "Username",
"type": "string"
}
},
"required": [
"username"
],
"title": "get_user_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_user_permissions",
"description": "\n Get the permissions for a user ID. Requires a valid user ID from get_user_id.\n\n :param user_id: The user ID to check permissions for\n :return: The permissions for the user\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
}
},
"required": [
"user_id"
],
"title": "get_user_permissionsArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "check_file_access",
"description": "\n Check if a user can access a specific file. Requires a valid user ID.\n\n :param user_id: The user ID to check access for\n :param filename: The filename to check access to\n :return: Whether the user can access the file (yes/no)\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
},
"filename": {
"title": "Filename",
"type": "string"
}
},
"required": [
"user_id",
"filename"
],
"title": "check_file_accessArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_id",
"description": "\n Get the experiment ID for a given experiment name. This ID is needed to get results.\n\n :param experiment_name: The name of the experiment\n :return: The experiment ID\n ",
"parameters": {
"properties": {
"experiment_name": {
"title": "Experiment Name",
"type": "string"
}
},
"required": [
"experiment_name"
],
"title": "get_experiment_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_results",
"description": "\n Get the results for an experiment ID. Requires a valid experiment ID from get_experiment_id.\n\n :param experiment_id: The experiment ID to get results for\n :return: The experiment results\n ",
"parameters": {
"properties": {
"experiment_id": {
"title": "Experiment Id",
"type": "string"
}
},
"required": [
"experiment_id"
],
"title": "get_experiment_resultsArguments",
"type": "object"
}
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-541b5db7789e",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": [
{
"index": 0,
"id": "call_dZwjBxH3aTRhnaS0bJVPqRcz",
"function": {
"arguments": "",
"name": "get_experiment_id"
},
"type": "function"
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "W3B"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-541b5db7789e",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "{\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "L7n"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-541b5db7789e",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "experiment",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "lXUc0FKJkRea"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-541b5db7789e",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "_name",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "0"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-541b5db7789e",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\":\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "D"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-541b5db7789e",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "bo",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "3dUQ"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-541b5db7789e",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "iling",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "1"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-541b5db7789e",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "_point",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": ""
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-541b5db7789e",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\"}",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "48i"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-541b5db7789e",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "tool_calls",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "eQyU"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-541b5db7789e",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": {
"completion_tokens": 19,
"prompt_tokens": 418,
"total_tokens": 437,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": "5tVrc5IEigum8"
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,614 @@
{
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_mcp_tool[openai_client-txt=openai/gpt-4o-boiling_point_tool]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "What is the boiling point of myawesomeliquid in Celsius?"
},
{
"role": "assistant",
"content": "",
"tool_calls": [
{
"index": 0,
"id": "call_8kf8fNIDcWOelbCmUEcretON",
"type": "function",
"function": {
"name": "get_boiling_point",
"arguments": "{\"liquid_name\":\"myawesomeliquid\",\"celsius\":true}"
}
}
]
},
{
"role": "tool",
"tool_call_id": "call_8kf8fNIDcWOelbCmUEcretON",
"content": [
{
"type": "text",
"text": "-100"
}
]
}
],
"stream": true,
"stream_options": {
"include_usage": true
},
"tools": [
{
"type": "function",
"function": {
"name": "greet_everyone",
"parameters": {
"properties": {
"url": {
"title": "Url",
"type": "string"
}
},
"required": [
"url"
],
"title": "greet_everyoneArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_boiling_point",
"description": "\n Returns the boiling point of a liquid in Celsius or Fahrenheit.\n\n :param liquid_name: The name of the liquid\n :param celsius: Whether to return the boiling point in Celsius\n :return: The boiling point of the liquid in Celcius or Fahrenheit\n ",
"parameters": {
"properties": {
"liquid_name": {
"title": "Liquid Name",
"type": "string"
},
"celsius": {
"default": true,
"title": "Celsius",
"type": "boolean"
}
},
"required": [
"liquid_name"
],
"title": "get_boiling_pointArguments",
"type": "object"
}
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6a05cad89f13",
"choices": [
{
"delta": {
"content": "",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "QvigjcdULEdran"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6a05cad89f13",
"choices": [
{
"delta": {
"content": "The",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "sIHyVud88f1Ri"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6a05cad89f13",
"choices": [
{
"delta": {
"content": " boiling",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "L46IcJeM"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6a05cad89f13",
"choices": [
{
"delta": {
"content": " point",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "j0afpRCRBL"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6a05cad89f13",
"choices": [
{
"delta": {
"content": " of",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "tuzBzZB7jURPj"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6a05cad89f13",
"choices": [
{
"delta": {
"content": " \"",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "iq6vUNVBRuRH5"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6a05cad89f13",
"choices": [
{
"delta": {
"content": "my",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Nkkz9uUPfhHdqZ"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6a05cad89f13",
"choices": [
{
"delta": {
"content": "aw",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "oR3PEQpsXLwYOJ"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6a05cad89f13",
"choices": [
{
"delta": {
"content": "esom",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "VBFf1ewix1rj"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6a05cad89f13",
"choices": [
{
"delta": {
"content": "eli",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "yEx3rYoaZjsTw"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6a05cad89f13",
"choices": [
{
"delta": {
"content": "quid",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "I6VR8wzPmnpa"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6a05cad89f13",
"choices": [
{
"delta": {
"content": "\"",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "xld69F07KIb2Yc"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6a05cad89f13",
"choices": [
{
"delta": {
"content": " is",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "GKgtQZJiWLVKj"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6a05cad89f13",
"choices": [
{
"delta": {
"content": " -",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "1by4tgiJqNgaI1"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6a05cad89f13",
"choices": [
{
"delta": {
"content": "100",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "2RdP6HDQApUpN"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6a05cad89f13",
"choices": [
{
"delta": {
"content": "\u00b0C",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "21ABialEpJBCcX"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6a05cad89f13",
"choices": [
{
"delta": {
"content": ".",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "uoaaRgmiGLD815k"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6a05cad89f13",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "stop",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "QKEKTjUUam"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6a05cad89f13",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": {
"completion_tokens": 17,
"prompt_tokens": 195,
"total_tokens": 212,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": "ceWQr6uzZRuj3"
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,574 @@
{
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_mcp_tool_approval[openai_client-txt=openai/gpt-4o-False-boiling_point_tool]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "What is the boiling point of myawesomeliquid in Celsius?"
}
],
"stream": true,
"stream_options": {
"include_usage": true
},
"tools": [
{
"type": "function",
"function": {
"name": "greet_everyone",
"parameters": {
"properties": {
"url": {
"title": "Url",
"type": "string"
}
},
"required": [
"url"
],
"title": "greet_everyoneArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_boiling_point",
"description": "\n Returns the boiling point of a liquid in Celsius or Fahrenheit.\n\n :param liquid_name: The name of the liquid\n :param celsius: Whether to return the boiling point in Celsius\n :return: The boiling point of the liquid in Celcius or Fahrenheit\n ",
"parameters": {
"properties": {
"liquid_name": {
"title": "Liquid Name",
"type": "string"
},
"celsius": {
"default": true,
"title": "Celsius",
"type": "boolean"
}
},
"required": [
"liquid_name"
],
"title": "get_boiling_pointArguments",
"type": "object"
}
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6d7f54b7be48",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": [
{
"index": 0,
"id": "call_4ldOwO71od1E0lrdgYQCoe2e",
"function": {
"arguments": "",
"name": "get_boiling_point"
},
"type": "function"
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "TdV"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6d7f54b7be48",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "{\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "L5f"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6d7f54b7be48",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "li",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "qo3z"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6d7f54b7be48",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "quid",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "i3"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6d7f54b7be48",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "_name",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Z"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6d7f54b7be48",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\":\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Z"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6d7f54b7be48",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "my",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "QdX5"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6d7f54b7be48",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "aw",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "sJYi"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6d7f54b7be48",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "esom",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Yk"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6d7f54b7be48",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "eli",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "pnS"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6d7f54b7be48",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "quid",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "y5"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6d7f54b7be48",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\"}",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Tjs"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6d7f54b7be48",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "tool_calls",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Cx0I"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-6d7f54b7be48",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": {
"completion_tokens": 22,
"prompt_tokens": 156,
"total_tokens": 178,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": "bmRrd4XLuhmCv"
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,771 @@
{
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_multi_turn_tool_execution[openai_client-txt=openai/gpt-4o-user_file_access_check]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "I need to check if user 'alice' can access the file 'document.txt'. First, get alice's user ID, then check if that user ID can access the file 'document.txt'. Do this as a series of steps, where each step is a separate message. Return only one tool call per step. Summarize the final result with a single 'yes' or 'no' response."
},
{
"role": "assistant",
"content": "",
"tool_calls": [
{
"index": 0,
"id": "call_EsVvmBUqtJb42kNkYnK19QkJ",
"type": "function",
"function": {
"name": "get_user_id",
"arguments": "{\"username\":\"alice\"}"
}
}
]
},
{
"role": "tool",
"tool_call_id": "call_EsVvmBUqtJb42kNkYnK19QkJ",
"content": [
{
"type": "text",
"text": "user_12345"
}
]
}
],
"stream": true,
"stream_options": {
"include_usage": true
},
"tools": [
{
"type": "function",
"function": {
"name": "get_user_id",
"description": "\n Get the user ID for a given username. This ID is needed for other operations.\n\n :param username: The username to look up\n :return: The user ID for the username\n ",
"parameters": {
"properties": {
"username": {
"title": "Username",
"type": "string"
}
},
"required": [
"username"
],
"title": "get_user_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_user_permissions",
"description": "\n Get the permissions for a user ID. Requires a valid user ID from get_user_id.\n\n :param user_id: The user ID to check permissions for\n :return: The permissions for the user\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
}
},
"required": [
"user_id"
],
"title": "get_user_permissionsArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "check_file_access",
"description": "\n Check if a user can access a specific file. Requires a valid user ID.\n\n :param user_id: The user ID to check access for\n :param filename: The filename to check access to\n :return: Whether the user can access the file (yes/no)\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
},
"filename": {
"title": "Filename",
"type": "string"
}
},
"required": [
"user_id",
"filename"
],
"title": "check_file_accessArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_id",
"description": "\n Get the experiment ID for a given experiment name. This ID is needed to get results.\n\n :param experiment_name: The name of the experiment\n :return: The experiment ID\n ",
"parameters": {
"properties": {
"experiment_name": {
"title": "Experiment Name",
"type": "string"
}
},
"required": [
"experiment_name"
],
"title": "get_experiment_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_results",
"description": "\n Get the results for an experiment ID. Requires a valid experiment ID from get_experiment_id.\n\n :param experiment_id: The experiment ID to get results for\n :return: The experiment results\n ",
"parameters": {
"properties": {
"experiment_id": {
"title": "Experiment Id",
"type": "string"
}
},
"required": [
"experiment_id"
],
"title": "get_experiment_resultsArguments",
"type": "object"
}
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-73c9287059db",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": [
{
"index": 0,
"id": "call_kCmSE8ORKfQoiEsW2UCYr5Sh",
"function": {
"arguments": "",
"name": "check_file_access"
},
"type": "function"
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "sCU"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-73c9287059db",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "{\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "iHp"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-73c9287059db",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "user",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "3b"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-73c9287059db",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "_id",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "4hG"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-73c9287059db",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\":\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "z"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-73c9287059db",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "user",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "zX"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-73c9287059db",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "_",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "WRFf5"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-73c9287059db",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "123",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "PvE"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-73c9287059db",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "45",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "xak8"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-73c9287059db",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\",\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "v"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-73c9287059db",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "filename",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "l7Rfy5le49BJu0"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-73c9287059db",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\":\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "p"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-73c9287059db",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "document",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "EpFPZH128OUIsw"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-73c9287059db",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": ".txt",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Zg"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-73c9287059db",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\"}",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "jH3"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-73c9287059db",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "tool_calls",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "UubI"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-73c9287059db",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": {
"completion_tokens": 24,
"prompt_tokens": 482,
"total_tokens": 506,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": "GITY7sf69sAJd"
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,759 @@
{
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_sequential_mcp_tool[openai_client-txt=openai/gpt-4o-boiling_point_tool]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "What is the boiling point of myawesomeliquid in Celsius?"
}
],
"stream": true,
"stream_options": {
"include_usage": true
},
"tools": [
{
"type": "function",
"function": {
"name": "greet_everyone",
"parameters": {
"properties": {
"url": {
"title": "Url",
"type": "string"
}
},
"required": [
"url"
],
"title": "greet_everyoneArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_boiling_point",
"description": "\n Returns the boiling point of a liquid in Celsius or Fahrenheit.\n\n :param liquid_name: The name of the liquid\n :param celsius: Whether to return the boiling point in Celsius\n :return: The boiling point of the liquid in Celcius or Fahrenheit\n ",
"parameters": {
"properties": {
"liquid_name": {
"title": "Liquid Name",
"type": "string"
},
"celsius": {
"default": true,
"title": "Celsius",
"type": "boolean"
}
},
"required": [
"liquid_name"
],
"title": "get_boiling_pointArguments",
"type": "object"
}
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-9f10c42f1338",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": [
{
"index": 0,
"id": "call_b5k2yeqIi5ucElnnrVPyYU4x",
"function": {
"arguments": "",
"name": "get_boiling_point"
},
"type": "function"
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "AhH"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-9f10c42f1338",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "{\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "SMa"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-9f10c42f1338",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "li",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "fBD0"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-9f10c42f1338",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "quid",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "LL"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-9f10c42f1338",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "_name",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "h"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-9f10c42f1338",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\":\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "5"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-9f10c42f1338",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "my",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "ySpU"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-9f10c42f1338",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "aw",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "fra1"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-9f10c42f1338",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "esom",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Hb"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-9f10c42f1338",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "eli",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "INi"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-9f10c42f1338",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "quid",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "jF"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-9f10c42f1338",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\",\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "i"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-9f10c42f1338",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "c",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "2dDeK"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-9f10c42f1338",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "elsius",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": ""
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-9f10c42f1338",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\":",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "DSb"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-9f10c42f1338",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "true",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "vP"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-9f10c42f1338",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "}",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "9boiy"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-9f10c42f1338",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "tool_calls",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "ZZRa"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-9f10c42f1338",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": {
"completion_tokens": 27,
"prompt_tokens": 156,
"total_tokens": 183,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": "HoutUcx6gZI1g"
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,833 @@
{
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_streaming_multi_turn_tool_execution[openai_client-txt=openai/gpt-4o-user_permissions_workflow]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "Help me with this security check: First, get the user ID for 'charlie', then get the permissions for that user ID, and finally check if that user can access 'secret_file.txt'. Stream your progress as you work through each step. Return only one tool call per step. Summarize the final result with a single 'yes' or 'no' response."
},
{
"role": "assistant",
"content": "",
"tool_calls": [
{
"index": 0,
"id": "call_fsxGbKmceUbLSXCe4sx9WLXO",
"type": "function",
"function": {
"name": "get_user_id",
"arguments": "{\"username\":\"charlie\"}"
}
}
]
},
{
"role": "tool",
"tool_call_id": "call_fsxGbKmceUbLSXCe4sx9WLXO",
"content": [
{
"type": "text",
"text": "user_11111"
}
]
},
{
"role": "assistant",
"content": "",
"tool_calls": [
{
"index": 0,
"id": "call_moRBxqnBJ48EWTSEoQ1llgib",
"type": "function",
"function": {
"name": "get_user_permissions",
"arguments": "{\"user_id\":\"user_11111\"}"
}
}
]
},
{
"role": "tool",
"tool_call_id": "call_moRBxqnBJ48EWTSEoQ1llgib",
"content": [
{
"type": "text",
"text": "admin"
}
]
}
],
"stream": true,
"stream_options": {
"include_usage": true
},
"tools": [
{
"type": "function",
"function": {
"name": "get_user_id",
"description": "\n Get the user ID for a given username. This ID is needed for other operations.\n\n :param username: The username to look up\n :return: The user ID for the username\n ",
"parameters": {
"properties": {
"username": {
"title": "Username",
"type": "string"
}
},
"required": [
"username"
],
"title": "get_user_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_user_permissions",
"description": "\n Get the permissions for a user ID. Requires a valid user ID from get_user_id.\n\n :param user_id: The user ID to check permissions for\n :return: The permissions for the user\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
}
},
"required": [
"user_id"
],
"title": "get_user_permissionsArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "check_file_access",
"description": "\n Check if a user can access a specific file. Requires a valid user ID.\n\n :param user_id: The user ID to check access for\n :param filename: The filename to check access to\n :return: Whether the user can access the file (yes/no)\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
},
"filename": {
"title": "Filename",
"type": "string"
}
},
"required": [
"user_id",
"filename"
],
"title": "check_file_accessArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_id",
"description": "\n Get the experiment ID for a given experiment name. This ID is needed to get results.\n\n :param experiment_name: The name of the experiment\n :return: The experiment ID\n ",
"parameters": {
"properties": {
"experiment_name": {
"title": "Experiment Name",
"type": "string"
}
},
"required": [
"experiment_name"
],
"title": "get_experiment_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_results",
"description": "\n Get the results for an experiment ID. Requires a valid experiment ID from get_experiment_id.\n\n :param experiment_id: The experiment ID to get results for\n :return: The experiment results\n ",
"parameters": {
"properties": {
"experiment_id": {
"title": "Experiment Id",
"type": "string"
}
},
"required": [
"experiment_id"
],
"title": "get_experiment_resultsArguments",
"type": "object"
}
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-a97d8a2f2fd7",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": [
{
"index": 0,
"id": "call_ybUqAP9oQn3rwQqVdOLs5Wb4",
"function": {
"arguments": "",
"name": "check_file_access"
},
"type": "function"
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "xpc"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-a97d8a2f2fd7",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "{\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "xXs"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-a97d8a2f2fd7",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "user",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "XY"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-a97d8a2f2fd7",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "_id",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "HbC"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-a97d8a2f2fd7",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\":\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "f"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-a97d8a2f2fd7",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "user",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Ds"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-a97d8a2f2fd7",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "_",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Osfy3"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-a97d8a2f2fd7",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "111",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "ioI"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-a97d8a2f2fd7",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "11",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "GQg6"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-a97d8a2f2fd7",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\",\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "2"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-a97d8a2f2fd7",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "filename",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "b2qqKbGC68nHMB"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-a97d8a2f2fd7",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\":\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "H"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-a97d8a2f2fd7",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "secret",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": ""
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-a97d8a2f2fd7",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "_file",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "6"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-a97d8a2f2fd7",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": ".txt",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Wz"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-a97d8a2f2fd7",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\"}",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "ImW"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-a97d8a2f2fd7",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "tool_calls",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "nRAE"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-a97d8a2f2fd7",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": {
"completion_tokens": 25,
"prompt_tokens": 507,
"total_tokens": 532,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": "rgbYyZ54cN8La"
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,524 @@
{
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_streaming_multi_turn_tool_execution[openai_client-txt=openai/gpt-4o-experiment_analysis_streaming]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "I need a complete analysis: First, get the experiment ID for 'chemical_reaction', then get the results for that experiment, and tell me if the yield was above 80%. Return only one tool call per step. Please stream your analysis process."
}
],
"stream": true,
"stream_options": {
"include_usage": true
},
"tools": [
{
"type": "function",
"function": {
"name": "get_user_id",
"description": "\n Get the user ID for a given username. This ID is needed for other operations.\n\n :param username: The username to look up\n :return: The user ID for the username\n ",
"parameters": {
"properties": {
"username": {
"title": "Username",
"type": "string"
}
},
"required": [
"username"
],
"title": "get_user_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_user_permissions",
"description": "\n Get the permissions for a user ID. Requires a valid user ID from get_user_id.\n\n :param user_id: The user ID to check permissions for\n :return: The permissions for the user\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
}
},
"required": [
"user_id"
],
"title": "get_user_permissionsArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "check_file_access",
"description": "\n Check if a user can access a specific file. Requires a valid user ID.\n\n :param user_id: The user ID to check access for\n :param filename: The filename to check access to\n :return: Whether the user can access the file (yes/no)\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
},
"filename": {
"title": "Filename",
"type": "string"
}
},
"required": [
"user_id",
"filename"
],
"title": "check_file_accessArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_id",
"description": "\n Get the experiment ID for a given experiment name. This ID is needed to get results.\n\n :param experiment_name: The name of the experiment\n :return: The experiment ID\n ",
"parameters": {
"properties": {
"experiment_name": {
"title": "Experiment Name",
"type": "string"
}
},
"required": [
"experiment_name"
],
"title": "get_experiment_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_results",
"description": "\n Get the results for an experiment ID. Requires a valid experiment ID from get_experiment_id.\n\n :param experiment_id: The experiment ID to get results for\n :return: The experiment results\n ",
"parameters": {
"properties": {
"experiment_id": {
"title": "Experiment Id",
"type": "string"
}
},
"required": [
"experiment_id"
],
"title": "get_experiment_resultsArguments",
"type": "object"
}
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b30da6311477",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": [
{
"index": 0,
"id": "call_Q9Gcxub7UbQsxJWVkiy4FETr",
"function": {
"arguments": "",
"name": "get_experiment_id"
},
"type": "function"
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "c8d"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b30da6311477",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "{\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "QoE"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b30da6311477",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "experiment",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "1krtmewG8p36"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b30da6311477",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "_name",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "P"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b30da6311477",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\":\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "D"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b30da6311477",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "chemical",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "FoS4ov7pi99K5h"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b30da6311477",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "_re",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "BhD"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b30da6311477",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "action",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": ""
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b30da6311477",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\"}",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "KWC"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b30da6311477",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "tool_calls",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "PFmv"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b30da6311477",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": {
"completion_tokens": 19,
"prompt_tokens": 425,
"total_tokens": 444,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": "NYdC3zepOXLsO"
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,649 @@
{
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_sequential_mcp_tool[openai_client-txt=openai/gpt-4o-boiling_point_tool]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "What is the boiling point of myawesomeliquid in Celsius?"
},
{
"role": "assistant",
"content": "",
"tool_calls": [
{
"index": 0,
"id": "call_b5k2yeqIi5ucElnnrVPyYU4x",
"type": "function",
"function": {
"name": "get_boiling_point",
"arguments": "{\"liquid_name\":\"myawesomeliquid\",\"celsius\":true}"
}
}
]
},
{
"role": "tool",
"tool_call_id": "call_b5k2yeqIi5ucElnnrVPyYU4x",
"content": [
{
"type": "text",
"text": "-100"
}
]
},
{
"role": "assistant",
"content": "The boiling point of \"myawesomeliquid\" is -100 degrees Celsius."
},
{
"role": "user",
"content": "What is the boiling point of myawesomeliquid in Celsius?"
}
],
"stream": true,
"stream_options": {
"include_usage": true
},
"tools": [
{
"type": "function",
"function": {
"name": "greet_everyone",
"parameters": {
"properties": {
"url": {
"title": "Url",
"type": "string"
}
},
"required": [
"url"
],
"title": "greet_everyoneArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_boiling_point",
"description": "\n Returns the boiling point of a liquid in Celsius or Fahrenheit.\n\n :param liquid_name: The name of the liquid\n :param celsius: Whether to return the boiling point in Celsius\n :return: The boiling point of the liquid in Celcius or Fahrenheit\n ",
"parameters": {
"properties": {
"liquid_name": {
"title": "Liquid Name",
"type": "string"
},
"celsius": {
"default": true,
"title": "Celsius",
"type": "boolean"
}
},
"required": [
"liquid_name"
],
"title": "get_boiling_pointArguments",
"type": "object"
}
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b6b7282ca0ad",
"choices": [
{
"delta": {
"content": "",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "7S5XpbMeFTTZba"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b6b7282ca0ad",
"choices": [
{
"delta": {
"content": "The",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "G4KYajpQCgm5p"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b6b7282ca0ad",
"choices": [
{
"delta": {
"content": " boiling",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "krw8d3Np"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b6b7282ca0ad",
"choices": [
{
"delta": {
"content": " point",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "sOEsvVtCEV"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b6b7282ca0ad",
"choices": [
{
"delta": {
"content": " of",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "5eAw89OUrx7VT"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b6b7282ca0ad",
"choices": [
{
"delta": {
"content": " \"",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "PFghmTocqCYea"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b6b7282ca0ad",
"choices": [
{
"delta": {
"content": "my",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "IRJRbKIoXwNh0e"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b6b7282ca0ad",
"choices": [
{
"delta": {
"content": "aw",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "wuoL6MoA21KfMP"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b6b7282ca0ad",
"choices": [
{
"delta": {
"content": "esom",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "DLRS3D5YVekk"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b6b7282ca0ad",
"choices": [
{
"delta": {
"content": "eli",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "PQZQlOncwl01F"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b6b7282ca0ad",
"choices": [
{
"delta": {
"content": "quid",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "TVfNNxYtZgXQ"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b6b7282ca0ad",
"choices": [
{
"delta": {
"content": "\"",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "LscPqJGnbMf6Qw"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b6b7282ca0ad",
"choices": [
{
"delta": {
"content": " is",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "X8NSrxHcpYYXL"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b6b7282ca0ad",
"choices": [
{
"delta": {
"content": " -",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "5nfdb4DuFapoeT"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b6b7282ca0ad",
"choices": [
{
"delta": {
"content": "100",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "K2qXQYFAd591w"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b6b7282ca0ad",
"choices": [
{
"delta": {
"content": " degrees",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "b0rvHdF1"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b6b7282ca0ad",
"choices": [
{
"delta": {
"content": " Celsius",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "kFoGt52c"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b6b7282ca0ad",
"choices": [
{
"delta": {
"content": ".",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "SJjhJwz2zgz693C"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b6b7282ca0ad",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "stop",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "MityMxFgBz"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-b6b7282ca0ad",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": {
"completion_tokens": 18,
"prompt_tokens": 234,
"total_tokens": 252,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": "qf0j6dzuNPifV"
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,450 @@
{
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_streaming_multi_turn_tool_execution[openai_client-txt=openai/gpt-4o-user_permissions_workflow]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "Help me with this security check: First, get the user ID for 'charlie', then get the permissions for that user ID, and finally check if that user can access 'secret_file.txt'. Stream your progress as you work through each step. Return only one tool call per step. Summarize the final result with a single 'yes' or 'no' response."
}
],
"stream": true,
"stream_options": {
"include_usage": true
},
"tools": [
{
"type": "function",
"function": {
"name": "get_user_id",
"description": "\n Get the user ID for a given username. This ID is needed for other operations.\n\n :param username: The username to look up\n :return: The user ID for the username\n ",
"parameters": {
"properties": {
"username": {
"title": "Username",
"type": "string"
}
},
"required": [
"username"
],
"title": "get_user_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_user_permissions",
"description": "\n Get the permissions for a user ID. Requires a valid user ID from get_user_id.\n\n :param user_id: The user ID to check permissions for\n :return: The permissions for the user\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
}
},
"required": [
"user_id"
],
"title": "get_user_permissionsArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "check_file_access",
"description": "\n Check if a user can access a specific file. Requires a valid user ID.\n\n :param user_id: The user ID to check access for\n :param filename: The filename to check access to\n :return: Whether the user can access the file (yes/no)\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
},
"filename": {
"title": "Filename",
"type": "string"
}
},
"required": [
"user_id",
"filename"
],
"title": "check_file_accessArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_id",
"description": "\n Get the experiment ID for a given experiment name. This ID is needed to get results.\n\n :param experiment_name: The name of the experiment\n :return: The experiment ID\n ",
"parameters": {
"properties": {
"experiment_name": {
"title": "Experiment Name",
"type": "string"
}
},
"required": [
"experiment_name"
],
"title": "get_experiment_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_results",
"description": "\n Get the results for an experiment ID. Requires a valid experiment ID from get_experiment_id.\n\n :param experiment_id: The experiment ID to get results for\n :return: The experiment results\n ",
"parameters": {
"properties": {
"experiment_id": {
"title": "Experiment Id",
"type": "string"
}
},
"required": [
"experiment_id"
],
"title": "get_experiment_resultsArguments",
"type": "object"
}
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-c27df465b299",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": [
{
"index": 0,
"id": "call_fsxGbKmceUbLSXCe4sx9WLXO",
"function": {
"arguments": "",
"name": "get_user_id"
},
"type": "function"
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "sOa6fZEKZ"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-c27df465b299",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "{\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "HBO"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-c27df465b299",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "username",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "7kcXlaglccmA8a"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-c27df465b299",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\":\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "a"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-c27df465b299",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "char",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "bS"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-c27df465b299",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "lie",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "d2e"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-c27df465b299",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\"}",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "fhE"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-c27df465b299",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "tool_calls",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "SlsZ"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-c27df465b299",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": {
"completion_tokens": 16,
"prompt_tokens": 449,
"total_tokens": 465,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": "fjMWRTbF1Ni06"
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,759 @@
{
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_mcp_tool[openai_client-txt=openai/gpt-4o-boiling_point_tool]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "What is the boiling point of myawesomeliquid in Celsius?"
}
],
"stream": true,
"stream_options": {
"include_usage": true
},
"tools": [
{
"type": "function",
"function": {
"name": "greet_everyone",
"parameters": {
"properties": {
"url": {
"title": "Url",
"type": "string"
}
},
"required": [
"url"
],
"title": "greet_everyoneArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_boiling_point",
"description": "\n Returns the boiling point of a liquid in Celsius or Fahrenheit.\n\n :param liquid_name: The name of the liquid\n :param celsius: Whether to return the boiling point in Celsius\n :return: The boiling point of the liquid in Celcius or Fahrenheit\n ",
"parameters": {
"properties": {
"liquid_name": {
"title": "Liquid Name",
"type": "string"
},
"celsius": {
"default": true,
"title": "Celsius",
"type": "boolean"
}
},
"required": [
"liquid_name"
],
"title": "get_boiling_pointArguments",
"type": "object"
}
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d35c1244fbbe",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": [
{
"index": 0,
"id": "call_8kf8fNIDcWOelbCmUEcretON",
"function": {
"arguments": "",
"name": "get_boiling_point"
},
"type": "function"
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "1xG"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d35c1244fbbe",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "{\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "RQj"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d35c1244fbbe",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "li",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "XncI"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d35c1244fbbe",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "quid",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "86"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d35c1244fbbe",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "_name",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "L"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d35c1244fbbe",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\":\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "8"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d35c1244fbbe",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "my",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "lnSu"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d35c1244fbbe",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "aw",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "ksr1"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d35c1244fbbe",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "esom",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "CU"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d35c1244fbbe",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "eli",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "hrv"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d35c1244fbbe",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "quid",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "K9"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d35c1244fbbe",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\",\"",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "a"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d35c1244fbbe",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "c",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "LKw52"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d35c1244fbbe",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "elsius",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": ""
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d35c1244fbbe",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "\":",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "yGY"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d35c1244fbbe",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "true",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "wC"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d35c1244fbbe",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": [
{
"index": 0,
"id": null,
"function": {
"arguments": "}",
"name": null
},
"type": null
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "8fF8B"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d35c1244fbbe",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "tool_calls",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "bbwp"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d35c1244fbbe",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": {
"completion_tokens": 27,
"prompt_tokens": 156,
"total_tokens": 183,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": "k0bo4JwUfLNKW"
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,808 @@
{
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_streaming_multi_turn_tool_execution[openai_client-txt=openai/gpt-4o-experiment_analysis_streaming]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "I need a complete analysis: First, get the experiment ID for 'chemical_reaction', then get the results for that experiment, and tell me if the yield was above 80%. Return only one tool call per step. Please stream your analysis process."
},
{
"role": "assistant",
"content": "",
"tool_calls": [
{
"index": 0,
"id": "call_Q9Gcxub7UbQsxJWVkiy4FETr",
"type": "function",
"function": {
"name": "get_experiment_id",
"arguments": "{\"experiment_name\":\"chemical_reaction\"}"
}
}
]
},
{
"role": "tool",
"tool_call_id": "call_Q9Gcxub7UbQsxJWVkiy4FETr",
"content": [
{
"type": "text",
"text": "exp_003"
}
]
},
{
"role": "assistant",
"content": "",
"tool_calls": [
{
"index": 0,
"id": "call_yTMuQEKu7x115q8XvhqelRub",
"type": "function",
"function": {
"name": "get_experiment_results",
"arguments": "{\"experiment_id\":\"exp_003\"}"
}
}
]
},
{
"role": "tool",
"tool_call_id": "call_yTMuQEKu7x115q8XvhqelRub",
"content": [
{
"type": "text",
"text": "Yield: 85%, Status: Complete"
}
]
}
],
"stream": true,
"stream_options": {
"include_usage": true
},
"tools": [
{
"type": "function",
"function": {
"name": "get_user_id",
"description": "\n Get the user ID for a given username. This ID is needed for other operations.\n\n :param username: The username to look up\n :return: The user ID for the username\n ",
"parameters": {
"properties": {
"username": {
"title": "Username",
"type": "string"
}
},
"required": [
"username"
],
"title": "get_user_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_user_permissions",
"description": "\n Get the permissions for a user ID. Requires a valid user ID from get_user_id.\n\n :param user_id: The user ID to check permissions for\n :return: The permissions for the user\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
}
},
"required": [
"user_id"
],
"title": "get_user_permissionsArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "check_file_access",
"description": "\n Check if a user can access a specific file. Requires a valid user ID.\n\n :param user_id: The user ID to check access for\n :param filename: The filename to check access to\n :return: Whether the user can access the file (yes/no)\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
},
"filename": {
"title": "Filename",
"type": "string"
}
},
"required": [
"user_id",
"filename"
],
"title": "check_file_accessArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_id",
"description": "\n Get the experiment ID for a given experiment name. This ID is needed to get results.\n\n :param experiment_name: The name of the experiment\n :return: The experiment ID\n ",
"parameters": {
"properties": {
"experiment_name": {
"title": "Experiment Name",
"type": "string"
}
},
"required": [
"experiment_name"
],
"title": "get_experiment_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_results",
"description": "\n Get the results for an experiment ID. Requires a valid experiment ID from get_experiment_id.\n\n :param experiment_id: The experiment ID to get results for\n :return: The experiment results\n ",
"parameters": {
"properties": {
"experiment_id": {
"title": "Experiment Id",
"type": "string"
}
},
"required": [
"experiment_id"
],
"title": "get_experiment_resultsArguments",
"type": "object"
}
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d42e1020edee",
"choices": [
{
"delta": {
"content": "",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "7yA3503fehs27D"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d42e1020edee",
"choices": [
{
"delta": {
"content": "The",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "T95BeWrgJQMHt"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d42e1020edee",
"choices": [
{
"delta": {
"content": " yield",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "VveNEnHuMQ"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d42e1020edee",
"choices": [
{
"delta": {
"content": " for",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "KupSssWahehO"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d42e1020edee",
"choices": [
{
"delta": {
"content": " the",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Ogot8KLW0IXw"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d42e1020edee",
"choices": [
{
"delta": {
"content": " '",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "dYKJ6jPstuAso4"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d42e1020edee",
"choices": [
{
"delta": {
"content": "chemical",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "wcSKhZVd"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d42e1020edee",
"choices": [
{
"delta": {
"content": "_re",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "6ZlTlRGLyclHo"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d42e1020edee",
"choices": [
{
"delta": {
"content": "action",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "WpYqOmrhXr"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d42e1020edee",
"choices": [
{
"delta": {
"content": "'",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "qUhq7HrrwdFEyuY"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d42e1020edee",
"choices": [
{
"delta": {
"content": " experiment",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "WWO2y"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d42e1020edee",
"choices": [
{
"delta": {
"content": " is",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "pFVMO1BRN37n4"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d42e1020edee",
"choices": [
{
"delta": {
"content": " ",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "TtQlcHeU2mPl830"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d42e1020edee",
"choices": [
{
"delta": {
"content": "85",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "zyw8OdA0pXZCp5"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d42e1020edee",
"choices": [
{
"delta": {
"content": "%,",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "VcHVTGGXrqvev1"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d42e1020edee",
"choices": [
{
"delta": {
"content": " which",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "FI9FAA2rX6"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d42e1020edee",
"choices": [
{
"delta": {
"content": " is",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Cc65gPYGA6Xfd"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d42e1020edee",
"choices": [
{
"delta": {
"content": " above",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "T7BlLMIQGs"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d42e1020edee",
"choices": [
{
"delta": {
"content": " ",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "2oKThCybRdG8MzZ"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d42e1020edee",
"choices": [
{
"delta": {
"content": "80",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "QHWdJWXK6hzQVS"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d42e1020edee",
"choices": [
{
"delta": {
"content": "%.",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "lJnplmQYyl0SL3"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d42e1020edee",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "stop",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "NPaAVrOB4J"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d42e1020edee",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": {
"completion_tokens": 21,
"prompt_tokens": 494,
"total_tokens": 515,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": "ngidabPDDHECm"
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,668 @@
{
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_mcp_tool_approval[openai_client-txt=openai/gpt-4o-True-boiling_point_tool]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "What is the boiling point of myawesomeliquid in Celsius?"
},
{
"role": "assistant",
"content": "",
"tool_calls": [
{
"index": 0,
"id": "call_bL84OWNnE1s75GJEqGLAK35W",
"type": "function",
"function": {
"name": "get_boiling_point",
"arguments": "{\"liquid_name\":\"myawesomeliquid\",\"celsius\":true}"
}
}
]
},
{
"role": "tool",
"tool_call_id": "call_bL84OWNnE1s75GJEqGLAK35W",
"content": [
{
"type": "text",
"text": "-100"
}
]
}
],
"stream": true,
"stream_options": {
"include_usage": true
},
"tools": [
{
"type": "function",
"function": {
"name": "greet_everyone",
"parameters": {
"properties": {
"url": {
"title": "Url",
"type": "string"
}
},
"required": [
"url"
],
"title": "greet_everyoneArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_boiling_point",
"description": "\n Returns the boiling point of a liquid in Celsius or Fahrenheit.\n\n :param liquid_name: The name of the liquid\n :param celsius: Whether to return the boiling point in Celsius\n :return: The boiling point of the liquid in Celcius or Fahrenheit\n ",
"parameters": {
"properties": {
"liquid_name": {
"title": "Liquid Name",
"type": "string"
},
"celsius": {
"default": true,
"title": "Celsius",
"type": "boolean"
}
},
"required": [
"liquid_name"
],
"title": "get_boiling_pointArguments",
"type": "object"
}
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e2dc09dc546d",
"choices": [
{
"delta": {
"content": "",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "STnb1nbwTsG4JZ"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e2dc09dc546d",
"choices": [
{
"delta": {
"content": "The",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "aEUUYMIYjnZpH"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e2dc09dc546d",
"choices": [
{
"delta": {
"content": " boiling",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "2QzI8Zau"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e2dc09dc546d",
"choices": [
{
"delta": {
"content": " point",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "gZw7vp0bnu"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e2dc09dc546d",
"choices": [
{
"delta": {
"content": " of",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "TYru3DcfZVc6B"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e2dc09dc546d",
"choices": [
{
"delta": {
"content": " \"",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "h5P3cluszFa21"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e2dc09dc546d",
"choices": [
{
"delta": {
"content": "my",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "ggSDGSgtWOR3d9"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e2dc09dc546d",
"choices": [
{
"delta": {
"content": "aw",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "lm72CS5Lt7lW76"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e2dc09dc546d",
"choices": [
{
"delta": {
"content": "esom",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "fKXRsLB1CG0e"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e2dc09dc546d",
"choices": [
{
"delta": {
"content": "eli",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "JxZBNjkfyXquH"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e2dc09dc546d",
"choices": [
{
"delta": {
"content": "quid",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "egtKHFRBAqZn"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e2dc09dc546d",
"choices": [
{
"delta": {
"content": "\"",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "R7MdHaS5Rj2mMV"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e2dc09dc546d",
"choices": [
{
"delta": {
"content": " in",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "LydsYLrAIj6PU"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e2dc09dc546d",
"choices": [
{
"delta": {
"content": " Celsius",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "4MmAUDk0"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e2dc09dc546d",
"choices": [
{
"delta": {
"content": " is",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Ivlu4M0VfRH8b"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e2dc09dc546d",
"choices": [
{
"delta": {
"content": " -",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "OfTmU32oCtMsuo"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e2dc09dc546d",
"choices": [
{
"delta": {
"content": "100",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "IUbbHa5oyIPjr"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e2dc09dc546d",
"choices": [
{
"delta": {
"content": "\u00b0C",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "llluAF0LBNJIwi"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e2dc09dc546d",
"choices": [
{
"delta": {
"content": ".",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "LnUC3LPx43OfUbC"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e2dc09dc546d",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "stop",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "ULfebGmmMn"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e2dc09dc546d",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": {
"completion_tokens": 19,
"prompt_tokens": 195,
"total_tokens": 214,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": "w11BVXjZVXRtg"
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,700 @@
{
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_multi_turn_tool_execution[openai_client-txt=openai/gpt-4o-experiment_results_lookup]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "I need to get the results for the 'boiling_point' experiment. First, get the experiment ID for 'boiling_point', then use that ID to get the experiment results. Tell me the boiling point in Celsius."
},
{
"role": "assistant",
"content": "",
"tool_calls": [
{
"index": 0,
"id": "call_dZwjBxH3aTRhnaS0bJVPqRcz",
"type": "function",
"function": {
"name": "get_experiment_id",
"arguments": "{\"experiment_name\":\"boiling_point\"}"
}
}
]
},
{
"role": "tool",
"tool_call_id": "call_dZwjBxH3aTRhnaS0bJVPqRcz",
"content": [
{
"type": "text",
"text": "exp_004"
}
]
},
{
"role": "assistant",
"content": "",
"tool_calls": [
{
"index": 0,
"id": "call_skNUKbERbtdoADH834U9OE91",
"type": "function",
"function": {
"name": "get_experiment_results",
"arguments": "{\"experiment_id\":\"exp_004\"}"
}
}
]
},
{
"role": "tool",
"tool_call_id": "call_skNUKbERbtdoADH834U9OE91",
"content": [
{
"type": "text",
"text": "Boiling Point: 100\u00b0C, Status: Verified"
}
]
}
],
"stream": true,
"stream_options": {
"include_usage": true
},
"tools": [
{
"type": "function",
"function": {
"name": "get_user_id",
"description": "\n Get the user ID for a given username. This ID is needed for other operations.\n\n :param username: The username to look up\n :return: The user ID for the username\n ",
"parameters": {
"properties": {
"username": {
"title": "Username",
"type": "string"
}
},
"required": [
"username"
],
"title": "get_user_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_user_permissions",
"description": "\n Get the permissions for a user ID. Requires a valid user ID from get_user_id.\n\n :param user_id: The user ID to check permissions for\n :return: The permissions for the user\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
}
},
"required": [
"user_id"
],
"title": "get_user_permissionsArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "check_file_access",
"description": "\n Check if a user can access a specific file. Requires a valid user ID.\n\n :param user_id: The user ID to check access for\n :param filename: The filename to check access to\n :return: Whether the user can access the file (yes/no)\n ",
"parameters": {
"properties": {
"user_id": {
"title": "User Id",
"type": "string"
},
"filename": {
"title": "Filename",
"type": "string"
}
},
"required": [
"user_id",
"filename"
],
"title": "check_file_accessArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_id",
"description": "\n Get the experiment ID for a given experiment name. This ID is needed to get results.\n\n :param experiment_name: The name of the experiment\n :return: The experiment ID\n ",
"parameters": {
"properties": {
"experiment_name": {
"title": "Experiment Name",
"type": "string"
}
},
"required": [
"experiment_name"
],
"title": "get_experiment_idArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_experiment_results",
"description": "\n Get the results for an experiment ID. Requires a valid experiment ID from get_experiment_id.\n\n :param experiment_id: The experiment ID to get results for\n :return: The experiment results\n ",
"parameters": {
"properties": {
"experiment_id": {
"title": "Experiment Id",
"type": "string"
}
},
"required": [
"experiment_id"
],
"title": "get_experiment_resultsArguments",
"type": "object"
}
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e9f1cc3da429",
"choices": [
{
"delta": {
"content": "",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "OzNg5nfMI5VouN"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e9f1cc3da429",
"choices": [
{
"delta": {
"content": "The",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "EBvjjqFPfytPb"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e9f1cc3da429",
"choices": [
{
"delta": {
"content": " boiling",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "HhEiLgKg"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e9f1cc3da429",
"choices": [
{
"delta": {
"content": " point",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "hLc2aAgg1D"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e9f1cc3da429",
"choices": [
{
"delta": {
"content": " for",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "q3AsmJJ6Rvyt"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e9f1cc3da429",
"choices": [
{
"delta": {
"content": " the",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "4QJrcjxcuFLd"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e9f1cc3da429",
"choices": [
{
"delta": {
"content": " experiment",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "BQQJ8"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e9f1cc3da429",
"choices": [
{
"delta": {
"content": " '",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "nj2SOixVU5KocZ"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e9f1cc3da429",
"choices": [
{
"delta": {
"content": "bo",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "ookLm9qkLqQQ3M"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e9f1cc3da429",
"choices": [
{
"delta": {
"content": "iling",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "J4axWnSRvQU"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e9f1cc3da429",
"choices": [
{
"delta": {
"content": "_point",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "QG6jvQWF8t"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e9f1cc3da429",
"choices": [
{
"delta": {
"content": "'",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "veUGdbLd3d8r2yU"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e9f1cc3da429",
"choices": [
{
"delta": {
"content": " is",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "ZOCkbhGksYmsF"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e9f1cc3da429",
"choices": [
{
"delta": {
"content": " ",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "fbNuaYkAA8gREQ7"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e9f1cc3da429",
"choices": [
{
"delta": {
"content": "100",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "3rdZxDq7QoXcl"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e9f1cc3da429",
"choices": [
{
"delta": {
"content": "\u00b0C",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "upjHViB9dUBWAd"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e9f1cc3da429",
"choices": [
{
"delta": {
"content": ".",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "hBZNqRjyLGCIMjg"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e9f1cc3da429",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "stop",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "PrtgvDwRZp"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-e9f1cc3da429",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": {
"completion_tokens": 17,
"prompt_tokens": 490,
"total_tokens": 507,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": "euYYBnLE4Mj0Z"
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,641 @@
{
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_sequential_mcp_tool[openai_client-txt=openai/gpt-4o-boiling_point_tool]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "What is the boiling point of myawesomeliquid in Celsius?"
},
{
"role": "assistant",
"content": "",
"tool_calls": [
{
"index": 0,
"id": "call_b5k2yeqIi5ucElnnrVPyYU4x",
"type": "function",
"function": {
"name": "get_boiling_point",
"arguments": "{\"liquid_name\":\"myawesomeliquid\",\"celsius\":true}"
}
}
]
},
{
"role": "tool",
"tool_call_id": "call_b5k2yeqIi5ucElnnrVPyYU4x",
"content": [
{
"type": "text",
"text": "-100"
}
]
}
],
"stream": true,
"stream_options": {
"include_usage": true
},
"tools": [
{
"type": "function",
"function": {
"name": "greet_everyone",
"parameters": {
"properties": {
"url": {
"title": "Url",
"type": "string"
}
},
"required": [
"url"
],
"title": "greet_everyoneArguments",
"type": "object"
}
}
},
{
"type": "function",
"function": {
"name": "get_boiling_point",
"description": "\n Returns the boiling point of a liquid in Celsius or Fahrenheit.\n\n :param liquid_name: The name of the liquid\n :param celsius: Whether to return the boiling point in Celsius\n :return: The boiling point of the liquid in Celcius or Fahrenheit\n ",
"parameters": {
"properties": {
"liquid_name": {
"title": "Liquid Name",
"type": "string"
},
"celsius": {
"default": true,
"title": "Celsius",
"type": "boolean"
}
},
"required": [
"liquid_name"
],
"title": "get_boiling_pointArguments",
"type": "object"
}
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-ed89b57fec93",
"choices": [
{
"delta": {
"content": "",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "WGXCgkwfwMDUCG"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-ed89b57fec93",
"choices": [
{
"delta": {
"content": "The",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "pkdvw6gGNrtXN"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-ed89b57fec93",
"choices": [
{
"delta": {
"content": " boiling",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "RO5YJeZc"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-ed89b57fec93",
"choices": [
{
"delta": {
"content": " point",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "riZZHSDEz0"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-ed89b57fec93",
"choices": [
{
"delta": {
"content": " of",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "1zjk8zIdt2Y2b"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-ed89b57fec93",
"choices": [
{
"delta": {
"content": " \"",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "XGHv0dlif7IrC"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-ed89b57fec93",
"choices": [
{
"delta": {
"content": "my",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Ii2KeTyV3U0uiU"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-ed89b57fec93",
"choices": [
{
"delta": {
"content": "aw",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "3OyYvSytdOYhpT"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-ed89b57fec93",
"choices": [
{
"delta": {
"content": "esom",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "zCnXbjW4JE6l"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-ed89b57fec93",
"choices": [
{
"delta": {
"content": "eli",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "0bwcz2K91q7EO"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-ed89b57fec93",
"choices": [
{
"delta": {
"content": "quid",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Um0jFlJegpXI"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-ed89b57fec93",
"choices": [
{
"delta": {
"content": "\"",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "4OllZlS2JmoD3l"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-ed89b57fec93",
"choices": [
{
"delta": {
"content": " is",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "x4jApO80AyXpX"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-ed89b57fec93",
"choices": [
{
"delta": {
"content": " -",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "wq0D3Wzc1l3h6S"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-ed89b57fec93",
"choices": [
{
"delta": {
"content": "100",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Dn78V58iZ9wKK"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-ed89b57fec93",
"choices": [
{
"delta": {
"content": " degrees",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "fjHDBTqT"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-ed89b57fec93",
"choices": [
{
"delta": {
"content": " Celsius",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Cnp6KULL"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-ed89b57fec93",
"choices": [
{
"delta": {
"content": ".",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "grbygHexDT4JwGx"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-ed89b57fec93",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "stop",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "upSRpiQQKE"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-ed89b57fec93",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": {
"completion_tokens": 18,
"prompt_tokens": 195,
"total_tokens": 213,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": "psE6Es6zZ2Kz4"
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -13,8 +13,8 @@ from .streaming_assertions import StreamingValidator
@pytest.mark.parametrize("case", basic_test_cases) @pytest.mark.parametrize("case", basic_test_cases)
def test_response_non_streaming_basic(compat_client, text_model_id, case): def test_response_non_streaming_basic(responses_client, text_model_id, case):
response = compat_client.responses.create( response = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input=case.input, input=case.input,
stream=False, stream=False,
@ -31,10 +31,10 @@ def test_response_non_streaming_basic(compat_client, text_model_id, case):
"Total tokens should equal input + output tokens" "Total tokens should equal input + output tokens"
) )
retrieved_response = compat_client.responses.retrieve(response_id=response.id) retrieved_response = responses_client.responses.retrieve(response_id=response.id)
assert retrieved_response.output_text == response.output_text assert retrieved_response.output_text == response.output_text
next_response = compat_client.responses.create( next_response = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input="Repeat your previous response in all caps.", input="Repeat your previous response in all caps.",
previous_response_id=response.id, previous_response_id=response.id,
@ -44,8 +44,8 @@ def test_response_non_streaming_basic(compat_client, text_model_id, case):
@pytest.mark.parametrize("case", basic_test_cases) @pytest.mark.parametrize("case", basic_test_cases)
def test_response_streaming_basic(compat_client, text_model_id, case): def test_response_streaming_basic(responses_client, text_model_id, case):
response = compat_client.responses.create( response = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input=case.input, input=case.input,
stream=True, stream=True,
@ -98,15 +98,15 @@ def test_response_streaming_basic(compat_client, text_model_id, case):
validator.assert_response_consistency() validator.assert_response_consistency()
# Verify stored response matches streamed response # Verify stored response matches streamed response
retrieved_response = compat_client.responses.retrieve(response_id=response_id) retrieved_response = responses_client.responses.retrieve(response_id=response_id)
final_event = events[-1] final_event = events[-1]
assert retrieved_response.output_text == final_event.response.output_text assert retrieved_response.output_text == final_event.response.output_text
@pytest.mark.parametrize("case", basic_test_cases) @pytest.mark.parametrize("case", basic_test_cases)
def test_response_streaming_incremental_content(compat_client, text_model_id, case): def test_response_streaming_incremental_content(responses_client, text_model_id, case):
"""Test that streaming actually delivers content incrementally, not just at the end.""" """Test that streaming actually delivers content incrementally, not just at the end."""
response = compat_client.responses.create( response = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input=case.input, input=case.input,
stream=True, stream=True,
@ -170,10 +170,10 @@ def test_response_streaming_incremental_content(compat_client, text_model_id, ca
@pytest.mark.parametrize("case", multi_turn_test_cases) @pytest.mark.parametrize("case", multi_turn_test_cases)
def test_response_non_streaming_multi_turn(compat_client, text_model_id, case): def test_response_non_streaming_multi_turn(responses_client, text_model_id, case):
previous_response_id = None previous_response_id = None
for turn_input, turn_expected in case.turns: for turn_input, turn_expected in case.turns:
response = compat_client.responses.create( response = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input=turn_input, input=turn_input,
previous_response_id=previous_response_id, previous_response_id=previous_response_id,
@ -184,8 +184,8 @@ def test_response_non_streaming_multi_turn(compat_client, text_model_id, case):
@pytest.mark.parametrize("case", image_test_cases) @pytest.mark.parametrize("case", image_test_cases)
def test_response_non_streaming_image(compat_client, text_model_id, case): def test_response_non_streaming_image(responses_client, text_model_id, case):
response = compat_client.responses.create( response = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input=case.input, input=case.input,
stream=False, stream=False,
@ -195,10 +195,10 @@ def test_response_non_streaming_image(compat_client, text_model_id, case):
@pytest.mark.parametrize("case", multi_turn_image_test_cases) @pytest.mark.parametrize("case", multi_turn_image_test_cases)
def test_response_non_streaming_multi_turn_image(compat_client, text_model_id, case): def test_response_non_streaming_multi_turn_image(responses_client, text_model_id, case):
previous_response_id = None previous_response_id = None
for turn_input, turn_expected in case.turns: for turn_input, turn_expected in case.turns:
response = compat_client.responses.create( response = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input=turn_input, input=turn_input,
previous_response_id=previous_response_id, previous_response_id=previous_response_id,

View file

@ -131,18 +131,18 @@ class TestConversationResponses:
assert len(response.output_text.strip()) > 0 assert len(response.output_text.strip()) > 0
# this is not ready yet # this is not ready yet
# def test_conversation_compat_client(self, compat_client, text_model_id): # def test_conversation_compat_client(self, responses_client, text_model_id):
# """Test conversation parameter works with compatibility client.""" # """Test conversation parameter works with compatibility client."""
# if not hasattr(compat_client, "conversations"): # if not hasattr(responses_client, "conversations"):
# pytest.skip("compat_client does not support conversations API") # pytest.skip("responses_client does not support conversations API")
# #
# conversation = compat_client.conversations.create() # conversation = responses_client.conversations.create()
# response = compat_client.responses.create( # response = responses_client.responses.create(
# model=text_model_id, input="Tell me a joke", conversation=conversation.id # model=text_model_id, input="Tell me a joke", conversation=conversation.id
# ) # )
# #
# assert response is not None # assert response is not None
# assert len(response.output_text.strip()) > 0 # assert len(response.output_text.strip()) > 0
# #
# conversation_items = compat_client.conversations.items.list(conversation.id) # conversation_items = responses_client.conversations.items.list(conversation.id)
# assert len(conversation_items.data) >= 2 # assert len(conversation_items.data) >= 2

View file

@ -9,8 +9,6 @@ import time
import pytest import pytest
from llama_stack.core.library_client import LlamaStackAsLibraryClient
from .helpers import new_vector_store, upload_file from .helpers import new_vector_store, upload_file
@ -28,12 +26,9 @@ from .helpers import new_vector_store, upload_file
}, },
], ],
) )
def test_response_text_format(compat_client, text_model_id, text_format): def test_response_text_format(responses_client, text_model_id, text_format):
if isinstance(compat_client, LlamaStackAsLibraryClient):
pytest.skip("Responses API text format is not yet supported in library client.")
stream = False stream = False
response = compat_client.responses.create( response = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input="What is the capital of France?", input="What is the capital of France?",
stream=stream, stream=stream,
@ -47,13 +42,10 @@ def test_response_text_format(compat_client, text_model_id, text_format):
@pytest.fixture @pytest.fixture
def vector_store_with_filtered_files(compat_client, embedding_model_id, embedding_dimension, tmp_path_factory): def vector_store_with_filtered_files(responses_client, embedding_model_id, embedding_dimension, tmp_path_factory):
# """Create a vector store with multiple files that have different attributes for filtering tests.""" # """Create a vector store with multiple files that have different attributes for filtering tests."""
if isinstance(compat_client, LlamaStackAsLibraryClient):
pytest.skip("upload_file() is not yet supported in library client somehow?")
vector_store = new_vector_store( vector_store = new_vector_store(
compat_client, "test_vector_store_with_filters", embedding_model_id, embedding_dimension responses_client, "test_vector_store_with_filters", embedding_model_id, embedding_dimension
) )
tmp_path = tmp_path_factory.mktemp("filter_test_files") tmp_path = tmp_path_factory.mktemp("filter_test_files")
@ -104,11 +96,11 @@ def vector_store_with_filtered_files(compat_client, embedding_model_id, embeddin
file_path.write_text(file_data["content"]) file_path.write_text(file_data["content"])
# Upload file # Upload file
file_response = upload_file(compat_client, file_data["name"], str(file_path)) file_response = upload_file(responses_client, file_data["name"], str(file_path))
file_ids.append(file_response.id) file_ids.append(file_response.id)
# Attach file to vector store with attributes # Attach file to vector store with attributes
file_attach_response = compat_client.vector_stores.files.create( file_attach_response = responses_client.vector_stores.files.create(
vector_store_id=vector_store.id, vector_store_id=vector_store.id,
file_id=file_response.id, file_id=file_response.id,
attributes=file_data["attributes"], attributes=file_data["attributes"],
@ -117,7 +109,7 @@ def vector_store_with_filtered_files(compat_client, embedding_model_id, embeddin
# Wait for attachment # Wait for attachment
while file_attach_response.status == "in_progress": while file_attach_response.status == "in_progress":
time.sleep(0.1) time.sleep(0.1)
file_attach_response = compat_client.vector_stores.files.retrieve( file_attach_response = responses_client.vector_stores.files.retrieve(
vector_store_id=vector_store.id, vector_store_id=vector_store.id,
file_id=file_response.id, file_id=file_response.id,
) )
@ -127,17 +119,17 @@ def vector_store_with_filtered_files(compat_client, embedding_model_id, embeddin
# Cleanup: delete vector store and files # Cleanup: delete vector store and files
try: try:
compat_client.vector_stores.delete(vector_store_id=vector_store.id) responses_client.vector_stores.delete(vector_store_id=vector_store.id)
for file_id in file_ids: for file_id in file_ids:
try: try:
compat_client.files.delete(file_id=file_id) responses_client.files.delete(file_id=file_id)
except Exception: except Exception:
pass # File might already be deleted pass # File might already be deleted
except Exception: except Exception:
pass # Best effort cleanup pass # Best effort cleanup
def test_response_file_search_filter_by_region(compat_client, text_model_id, vector_store_with_filtered_files): def test_response_file_search_filter_by_region(responses_client, text_model_id, vector_store_with_filtered_files):
"""Test file search with region equality filter.""" """Test file search with region equality filter."""
tools = [ tools = [
{ {
@ -147,7 +139,7 @@ def test_response_file_search_filter_by_region(compat_client, text_model_id, vec
} }
] ]
response = compat_client.responses.create( response = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input="What are the updates from the US region?", input="What are the updates from the US region?",
tools=tools, tools=tools,
@ -168,7 +160,7 @@ def test_response_file_search_filter_by_region(compat_client, text_model_id, vec
assert "asia" not in result.text.lower() assert "asia" not in result.text.lower()
def test_response_file_search_filter_by_category(compat_client, text_model_id, vector_store_with_filtered_files): def test_response_file_search_filter_by_category(responses_client, text_model_id, vector_store_with_filtered_files):
"""Test file search with category equality filter.""" """Test file search with category equality filter."""
tools = [ tools = [
{ {
@ -178,7 +170,7 @@ def test_response_file_search_filter_by_category(compat_client, text_model_id, v
} }
] ]
response = compat_client.responses.create( response = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input="Show me all marketing reports", input="Show me all marketing reports",
tools=tools, tools=tools,
@ -198,7 +190,7 @@ def test_response_file_search_filter_by_category(compat_client, text_model_id, v
assert "revenue figures" not in result.text.lower() assert "revenue figures" not in result.text.lower()
def test_response_file_search_filter_by_date_range(compat_client, text_model_id, vector_store_with_filtered_files): def test_response_file_search_filter_by_date_range(responses_client, text_model_id, vector_store_with_filtered_files):
"""Test file search with date range filter using compound AND.""" """Test file search with date range filter using compound AND."""
tools = [ tools = [
{ {
@ -222,7 +214,7 @@ def test_response_file_search_filter_by_date_range(compat_client, text_model_id,
} }
] ]
response = compat_client.responses.create( response = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input="What happened in Q1 2023?", input="What happened in Q1 2023?",
tools=tools, tools=tools,
@ -241,7 +233,7 @@ def test_response_file_search_filter_by_date_range(compat_client, text_model_id,
assert "q3" not in result.text.lower() assert "q3" not in result.text.lower()
def test_response_file_search_filter_compound_and(compat_client, text_model_id, vector_store_with_filtered_files): def test_response_file_search_filter_compound_and(responses_client, text_model_id, vector_store_with_filtered_files):
"""Test file search with compound AND filter (region AND category).""" """Test file search with compound AND filter (region AND category)."""
tools = [ tools = [
{ {
@ -257,7 +249,7 @@ def test_response_file_search_filter_compound_and(compat_client, text_model_id,
} }
] ]
response = compat_client.responses.create( response = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input="What are the engineering updates from the US?", input="What are the engineering updates from the US?",
tools=tools, tools=tools,
@ -277,7 +269,7 @@ def test_response_file_search_filter_compound_and(compat_client, text_model_id,
assert "promotional" not in result.text.lower() and "revenue" not in result.text.lower() assert "promotional" not in result.text.lower() and "revenue" not in result.text.lower()
def test_response_file_search_filter_compound_or(compat_client, text_model_id, vector_store_with_filtered_files): def test_response_file_search_filter_compound_or(responses_client, text_model_id, vector_store_with_filtered_files):
"""Test file search with compound OR filter (marketing OR sales).""" """Test file search with compound OR filter (marketing OR sales)."""
tools = [ tools = [
{ {
@ -293,7 +285,7 @@ def test_response_file_search_filter_compound_or(compat_client, text_model_id, v
} }
] ]
response = compat_client.responses.create( response = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input="Show me marketing and sales documents", input="Show me marketing and sales documents",
tools=tools, tools=tools,
@ -320,7 +312,7 @@ def test_response_file_search_filter_compound_or(compat_client, text_model_id, v
assert categories_found.issubset({"marketing", "sales"}), f"Found unexpected categories: {categories_found}" assert categories_found.issubset({"marketing", "sales"}), f"Found unexpected categories: {categories_found}"
def test_response_file_search_streaming_events(compat_client, text_model_id, vector_store_with_filtered_files): def test_response_file_search_streaming_events(responses_client, text_model_id, vector_store_with_filtered_files):
"""Test that file search emits proper streaming events (in_progress, searching, completed).""" """Test that file search emits proper streaming events (in_progress, searching, completed)."""
tools = [ tools = [
{ {
@ -329,7 +321,7 @@ def test_response_file_search_streaming_events(compat_client, text_model_id, vec
} }
] ]
stream = compat_client.responses.create( stream = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input="What are the marketing updates?", input="What are the marketing updates?",
tools=tools, tools=tools,

View file

@ -9,6 +9,7 @@ import logging # allow-direct-logging
import os import os
import httpx import httpx
import llama_stack_client
import openai import openai
import pytest import pytest
@ -29,8 +30,8 @@ from .streaming_assertions import StreamingValidator
@pytest.mark.parametrize("case", web_search_test_cases) @pytest.mark.parametrize("case", web_search_test_cases)
def test_response_non_streaming_web_search(compat_client, text_model_id, case): def test_response_non_streaming_web_search(responses_client, text_model_id, case):
response = compat_client.responses.create( response = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input=case.input, input=case.input,
tools=case.tools, tools=case.tools,
@ -48,12 +49,9 @@ def test_response_non_streaming_web_search(compat_client, text_model_id, case):
@pytest.mark.parametrize("case", file_search_test_cases) @pytest.mark.parametrize("case", file_search_test_cases)
def test_response_non_streaming_file_search( def test_response_non_streaming_file_search(
compat_client, text_model_id, embedding_model_id, embedding_dimension, tmp_path, case responses_client, text_model_id, embedding_model_id, embedding_dimension, tmp_path, case
): ):
if isinstance(compat_client, LlamaStackAsLibraryClient): vector_store = new_vector_store(responses_client, "test_vector_store", embedding_model_id, embedding_dimension)
pytest.skip("Responses API file search is not yet supported in library client.")
vector_store = new_vector_store(compat_client, "test_vector_store", embedding_model_id, embedding_dimension)
if case.file_content: if case.file_content:
file_name = "test_response_non_streaming_file_search.txt" file_name = "test_response_non_streaming_file_search.txt"
@ -65,16 +63,16 @@ def test_response_non_streaming_file_search(
else: else:
raise ValueError("No file content or path provided for case") raise ValueError("No file content or path provided for case")
file_response = upload_file(compat_client, file_name, file_path) file_response = upload_file(responses_client, file_name, file_path)
# Attach our file to the vector store # Attach our file to the vector store
compat_client.vector_stores.files.create( responses_client.vector_stores.files.create(
vector_store_id=vector_store.id, vector_store_id=vector_store.id,
file_id=file_response.id, file_id=file_response.id,
) )
# Wait for the file to be attached # Wait for the file to be attached
wait_for_file_attachment(compat_client, vector_store.id, file_response.id) wait_for_file_attachment(responses_client, vector_store.id, file_response.id)
# Update our tools with the right vector store id # Update our tools with the right vector store id
tools = case.tools tools = case.tools
@ -83,7 +81,7 @@ def test_response_non_streaming_file_search(
tool["vector_store_ids"] = [vector_store.id] tool["vector_store_ids"] = [vector_store.id]
# Create the response request, which should query our vector store # Create the response request, which should query our vector store
response = compat_client.responses.create( response = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input=case.input, input=case.input,
tools=tools, tools=tools,
@ -105,15 +103,12 @@ def test_response_non_streaming_file_search(
def test_response_non_streaming_file_search_empty_vector_store( def test_response_non_streaming_file_search_empty_vector_store(
compat_client, text_model_id, embedding_model_id, embedding_dimension responses_client, text_model_id, embedding_model_id, embedding_dimension
): ):
if isinstance(compat_client, LlamaStackAsLibraryClient): vector_store = new_vector_store(responses_client, "test_vector_store", embedding_model_id, embedding_dimension)
pytest.skip("Responses API file search is not yet supported in library client.")
vector_store = new_vector_store(compat_client, "test_vector_store", embedding_model_id, embedding_dimension)
# Create the response request, which should query our vector store # Create the response request, which should query our vector store
response = compat_client.responses.create( response = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input="How many experts does the Llama 4 Maverick model have?", input="How many experts does the Llama 4 Maverick model have?",
tools=[{"type": "file_search", "vector_store_ids": [vector_store.id]}], tools=[{"type": "file_search", "vector_store_ids": [vector_store.id]}],
@ -133,13 +128,10 @@ def test_response_non_streaming_file_search_empty_vector_store(
def test_response_sequential_file_search( def test_response_sequential_file_search(
compat_client, text_model_id, embedding_model_id, embedding_dimension, tmp_path responses_client, text_model_id, embedding_model_id, embedding_dimension, tmp_path
): ):
"""Test file search with sequential responses using previous_response_id.""" """Test file search with sequential responses using previous_response_id."""
if isinstance(compat_client, LlamaStackAsLibraryClient): vector_store = new_vector_store(responses_client, "test_vector_store", embedding_model_id, embedding_dimension)
pytest.skip("Responses API file search is not yet supported in library client.")
vector_store = new_vector_store(compat_client, "test_vector_store", embedding_model_id, embedding_dimension)
# Create a test file with content # Create a test file with content
file_content = "The Llama 4 Maverick model has 128 experts in its mixture of experts architecture." file_content = "The Llama 4 Maverick model has 128 experts in its mixture of experts architecture."
@ -147,21 +139,21 @@ def test_response_sequential_file_search(
file_path = tmp_path / file_name file_path = tmp_path / file_name
file_path.write_text(file_content) file_path.write_text(file_content)
file_response = upload_file(compat_client, file_name, file_path) file_response = upload_file(responses_client, file_name, file_path)
# Attach the file to the vector store # Attach the file to the vector store
compat_client.vector_stores.files.create( responses_client.vector_stores.files.create(
vector_store_id=vector_store.id, vector_store_id=vector_store.id,
file_id=file_response.id, file_id=file_response.id,
) )
# Wait for the file to be attached # Wait for the file to be attached
wait_for_file_attachment(compat_client, vector_store.id, file_response.id) wait_for_file_attachment(responses_client, vector_store.id, file_response.id)
tools = [{"type": "file_search", "vector_store_ids": [vector_store.id]}] tools = [{"type": "file_search", "vector_store_ids": [vector_store.id]}]
# First response request with file search # First response request with file search
response = compat_client.responses.create( response = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input="How many experts does the Llama 4 Maverick model have?", input="How many experts does the Llama 4 Maverick model have?",
tools=tools, tools=tools,
@ -178,7 +170,7 @@ def test_response_sequential_file_search(
assert "128" in response.output_text or "experts" in response.output_text.lower() assert "128" in response.output_text or "experts" in response.output_text.lower()
# Second response request using previous_response_id # Second response request using previous_response_id
response2 = compat_client.responses.create( response2 = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input="Can you tell me more about the architecture?", input="Can you tell me more about the architecture?",
tools=tools, tools=tools,
@ -199,14 +191,11 @@ def test_response_sequential_file_search(
@pytest.mark.parametrize("case", mcp_tool_test_cases) @pytest.mark.parametrize("case", mcp_tool_test_cases)
def test_response_non_streaming_mcp_tool(compat_client, text_model_id, case, caplog): def test_response_non_streaming_mcp_tool(responses_client, text_model_id, case, caplog):
if not isinstance(compat_client, LlamaStackAsLibraryClient):
pytest.skip("in-process MCP server is only supported in library client")
with make_mcp_server() as mcp_server_info: with make_mcp_server() as mcp_server_info:
tools = setup_mcp_tools(case.tools, mcp_server_info) tools = setup_mcp_tools(case.tools, mcp_server_info)
response = compat_client.responses.create( response = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input=case.input, input=case.input,
tools=tools, tools=tools,
@ -243,15 +232,15 @@ def test_response_non_streaming_mcp_tool(compat_client, text_model_id, case, cap
exc_type = ( exc_type = (
AuthenticationRequiredError AuthenticationRequiredError
if isinstance(compat_client, LlamaStackAsLibraryClient) if isinstance(responses_client, LlamaStackAsLibraryClient)
else (httpx.HTTPStatusError, openai.AuthenticationError) else (httpx.HTTPStatusError, openai.AuthenticationError, llama_stack_client.AuthenticationError)
) )
# Suppress expected auth error logs only for the failing auth attempt # Suppress expected auth error logs only for the failing auth attempt
with caplog.at_level( with caplog.at_level(
logging.CRITICAL, logger="llama_stack.providers.inline.agents.meta_reference.responses.streaming" logging.CRITICAL, logger="llama_stack.providers.inline.agents.meta_reference.responses.streaming"
): ):
with pytest.raises(exc_type): with pytest.raises(exc_type):
compat_client.responses.create( responses_client.responses.create(
model=text_model_id, model=text_model_id,
input=case.input, input=case.input,
tools=tools, tools=tools,
@ -262,7 +251,7 @@ def test_response_non_streaming_mcp_tool(compat_client, text_model_id, case, cap
if tool["type"] == "mcp": if tool["type"] == "mcp":
tool["headers"] = {"Authorization": "Bearer test-token"} tool["headers"] = {"Authorization": "Bearer test-token"}
response = compat_client.responses.create( response = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input=case.input, input=case.input,
tools=tools, tools=tools,
@ -272,14 +261,11 @@ def test_response_non_streaming_mcp_tool(compat_client, text_model_id, case, cap
@pytest.mark.parametrize("case", mcp_tool_test_cases) @pytest.mark.parametrize("case", mcp_tool_test_cases)
def test_response_sequential_mcp_tool(compat_client, text_model_id, case): def test_response_sequential_mcp_tool(responses_client, text_model_id, case):
if not isinstance(compat_client, LlamaStackAsLibraryClient):
pytest.skip("in-process MCP server is only supported in library client")
with make_mcp_server() as mcp_server_info: with make_mcp_server() as mcp_server_info:
tools = setup_mcp_tools(case.tools, mcp_server_info) tools = setup_mcp_tools(case.tools, mcp_server_info)
response = compat_client.responses.create( response = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input=case.input, input=case.input,
tools=tools, tools=tools,
@ -311,7 +297,7 @@ def test_response_sequential_mcp_tool(compat_client, text_model_id, case):
text_content = message.content[0].text text_content = message.content[0].text
assert "boiling point" in text_content.lower() assert "boiling point" in text_content.lower()
response2 = compat_client.responses.create( response2 = responses_client.responses.create(
model=text_model_id, input=case.input, tools=tools, stream=False, previous_response_id=response.id model=text_model_id, input=case.input, tools=tools, stream=False, previous_response_id=response.id
) )
@ -323,16 +309,13 @@ def test_response_sequential_mcp_tool(compat_client, text_model_id, case):
@pytest.mark.parametrize("case", mcp_tool_test_cases) @pytest.mark.parametrize("case", mcp_tool_test_cases)
@pytest.mark.parametrize("approve", [True, False]) @pytest.mark.parametrize("approve", [True, False])
def test_response_mcp_tool_approval(compat_client, text_model_id, case, approve): def test_response_mcp_tool_approval(responses_client, text_model_id, case, approve):
if not isinstance(compat_client, LlamaStackAsLibraryClient):
pytest.skip("in-process MCP server is only supported in library client")
with make_mcp_server() as mcp_server_info: with make_mcp_server() as mcp_server_info:
tools = setup_mcp_tools(case.tools, mcp_server_info) tools = setup_mcp_tools(case.tools, mcp_server_info)
for tool in tools: for tool in tools:
tool["require_approval"] = "always" tool["require_approval"] = "always"
response = compat_client.responses.create( response = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input=case.input, input=case.input,
tools=tools, tools=tools,
@ -352,13 +335,13 @@ def test_response_mcp_tool_approval(compat_client, text_model_id, case, approve)
approval_request = response.output[1] approval_request = response.output[1]
assert approval_request.type == "mcp_approval_request" assert approval_request.type == "mcp_approval_request"
assert approval_request.name == "get_boiling_point" assert approval_request.name == "get_boiling_point"
assert json.loads(approval_request.arguments) == { args = json.loads(approval_request.arguments)
"liquid_name": "myawesomeliquid", assert args["liquid_name"] == "myawesomeliquid"
"celsius": True, # celsius has a default value of True, so it may be omitted or explicitly set
} assert args.get("celsius", True) is True
# send approval response # send approval response
response = compat_client.responses.create( response = responses_client.responses.create(
previous_response_id=response.id, previous_response_id=response.id,
model=text_model_id, model=text_model_id,
input=[{"type": "mcp_approval_response", "approval_request_id": approval_request.id, "approve": approve}], input=[{"type": "mcp_approval_response", "approval_request_id": approval_request.id, "approve": approve}],
@ -398,8 +381,8 @@ def test_response_mcp_tool_approval(compat_client, text_model_id, case, approve)
@pytest.mark.parametrize("case", custom_tool_test_cases) @pytest.mark.parametrize("case", custom_tool_test_cases)
def test_response_non_streaming_custom_tool(compat_client, text_model_id, case): def test_response_non_streaming_custom_tool(responses_client, text_model_id, case):
response = compat_client.responses.create( response = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input=case.input, input=case.input,
tools=case.tools, tools=case.tools,
@ -412,8 +395,8 @@ def test_response_non_streaming_custom_tool(compat_client, text_model_id, case):
@pytest.mark.parametrize("case", custom_tool_test_cases) @pytest.mark.parametrize("case", custom_tool_test_cases)
def test_response_function_call_ordering_1(compat_client, text_model_id, case): def test_response_function_call_ordering_1(responses_client, text_model_id, case):
response = compat_client.responses.create( response = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input=case.input, input=case.input,
tools=case.tools, tools=case.tools,
@ -437,13 +420,13 @@ def test_response_function_call_ordering_1(compat_client, text_model_id, case):
"call_id": response.output[0].call_id, "call_id": response.output[0].call_id,
} }
) )
response = compat_client.responses.create( response = responses_client.responses.create(
model=text_model_id, input=inputs, tools=case.tools, stream=False, previous_response_id=response.id model=text_model_id, input=inputs, tools=case.tools, stream=False, previous_response_id=response.id
) )
assert len(response.output) == 1 assert len(response.output) == 1
def test_response_function_call_ordering_2(compat_client, text_model_id): def test_response_function_call_ordering_2(responses_client, text_model_id):
tools = [ tools = [
{ {
"type": "function", "type": "function",
@ -468,7 +451,7 @@ def test_response_function_call_ordering_2(compat_client, text_model_id):
"content": "Is the weather better in San Francisco or Los Angeles?", "content": "Is the weather better in San Francisco or Los Angeles?",
} }
] ]
response = compat_client.responses.create( response = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input=inputs, input=inputs,
tools=tools, tools=tools,
@ -489,7 +472,7 @@ def test_response_function_call_ordering_2(compat_client, text_model_id):
"call_id": output.call_id, "call_id": output.call_id,
} }
) )
response = compat_client.responses.create( response = responses_client.responses.create(
model=text_model_id, model=text_model_id,
input=inputs, input=inputs,
tools=tools, tools=tools,
@ -500,15 +483,12 @@ def test_response_function_call_ordering_2(compat_client, text_model_id):
@pytest.mark.parametrize("case", multi_turn_tool_execution_test_cases) @pytest.mark.parametrize("case", multi_turn_tool_execution_test_cases)
def test_response_non_streaming_multi_turn_tool_execution(compat_client, text_model_id, case): def test_response_non_streaming_multi_turn_tool_execution(responses_client, text_model_id, case):
"""Test multi-turn tool execution where multiple MCP tool calls are performed in sequence.""" """Test multi-turn tool execution where multiple MCP tool calls are performed in sequence."""
if not isinstance(compat_client, LlamaStackAsLibraryClient):
pytest.skip("in-process MCP server is only supported in library client")
with make_mcp_server(tools=dependency_tools()) as mcp_server_info: with make_mcp_server(tools=dependency_tools()) as mcp_server_info:
tools = setup_mcp_tools(case.tools, mcp_server_info) tools = setup_mcp_tools(case.tools, mcp_server_info)
response = compat_client.responses.create( response = responses_client.responses.create(
input=case.input, input=case.input,
model=text_model_id, model=text_model_id,
tools=tools, tools=tools,
@ -550,15 +530,12 @@ def test_response_non_streaming_multi_turn_tool_execution(compat_client, text_mo
@pytest.mark.parametrize("case", multi_turn_tool_execution_streaming_test_cases) @pytest.mark.parametrize("case", multi_turn_tool_execution_streaming_test_cases)
def test_response_streaming_multi_turn_tool_execution(compat_client, text_model_id, case): def test_response_streaming_multi_turn_tool_execution(responses_client, text_model_id, case):
"""Test streaming multi-turn tool execution where multiple MCP tool calls are performed in sequence.""" """Test streaming multi-turn tool execution where multiple MCP tool calls are performed in sequence."""
if not isinstance(compat_client, LlamaStackAsLibraryClient):
pytest.skip("in-process MCP server is only supported in library client")
with make_mcp_server(tools=dependency_tools()) as mcp_server_info: with make_mcp_server(tools=dependency_tools()) as mcp_server_info:
tools = setup_mcp_tools(case.tools, mcp_server_info) tools = setup_mcp_tools(case.tools, mcp_server_info)
stream = compat_client.responses.create( stream = responses_client.responses.create(
input=case.input, input=case.input,
model=text_model_id, model=text_model_id,
tools=tools, tools=tools,

View file

@ -10,8 +10,6 @@ import pytest
from llama_stack_client.lib.agents.agent import Agent from llama_stack_client.lib.agents.agent import Agent
from llama_stack_client.lib.agents.turn_events import StepCompleted, StepProgress, ToolCallIssuedDelta from llama_stack_client.lib.agents.turn_events import StepCompleted, StepProgress, ToolCallIssuedDelta
from llama_stack.core.library_client import LlamaStackAsLibraryClient
AUTH_TOKEN = "test-token" AUTH_TOKEN = "test-token"
from tests.common.mcp import MCP_TOOLGROUP_ID, make_mcp_server from tests.common.mcp import MCP_TOOLGROUP_ID, make_mcp_server
@ -24,9 +22,6 @@ def mcp_server():
def test_mcp_invocation(llama_stack_client, text_model_id, mcp_server): def test_mcp_invocation(llama_stack_client, text_model_id, mcp_server):
if not isinstance(llama_stack_client, LlamaStackAsLibraryClient):
pytest.skip("The local MCP server only reliably reachable from library client.")
test_toolgroup_id = MCP_TOOLGROUP_ID test_toolgroup_id = MCP_TOOLGROUP_ID
uri = mcp_server["server_url"] uri = mcp_server["server_url"]