Merge branch 'main' into responses-and-safety

This commit is contained in:
slekkala1 2025-10-10 13:57:47 -07:00 committed by GitHub
commit 90ee3001d9
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
33 changed files with 16970 additions and 713 deletions

View file

@ -92,7 +92,8 @@ jobs:
run: |
echo "Waiting for Llama Stack server..."
for i in {1..30}; do
if curl -s -L -H "Authorization: Bearer $(cat llama-stack-auth-token)" http://localhost:8321/v1/health | grep -q "OK"; then
# Note: /v1/health does not require authentication
if curl -s -L http://localhost:8321/v1/health | grep -q "OK"; then
echo "Llama Stack server is up!"
if grep -q "Enabling authentication with provider: ${{ matrix.auth-provider }}" server.log; then
echo "Llama Stack server is configured to use ${{ matrix.auth-provider }} auth"
@ -111,4 +112,27 @@ jobs:
- name: Test auth
run: |
curl -s -L -H "Authorization: Bearer $(cat llama-stack-auth-token)" http://127.0.0.1:8321/v1/providers|jq
echo "Testing /v1/version without token (should succeed)..."
if curl -s -L -o /dev/null -w "%{http_code}" http://127.0.0.1:8321/v1/version | grep -q "200"; then
echo "/v1/version accessible without token (200)"
else
echo "/v1/version returned non-200 status without token"
exit 1
fi
echo "Testing /v1/providers without token (should fail with 401)..."
if curl -s -L -o /dev/null -w "%{http_code}" http://127.0.0.1:8321/v1/providers | grep -q "401"; then
echo "/v1/providers blocked without token (401)"
else
echo "/v1/providers did not return 401 without token"
exit 1
fi
echo "Testing /v1/providers with valid token (should succeed)..."
curl -s -L -H "Authorization: Bearer $(cat llama-stack-auth-token)" http://127.0.0.1:8321/v1/providers | jq
if [ $? -eq 0 ]; then
echo "/v1/providers accessible with valid token"
else
echo "/v1/providers failed with valid token"
exit 1
fi

View file

@ -73,7 +73,7 @@ class Inspect(Protocol):
"""
...
@webmethod(route="/health", method="GET", level=LLAMA_STACK_API_V1)
@webmethod(route="/health", method="GET", level=LLAMA_STACK_API_V1, require_authentication=False)
async def health(self) -> HealthInfo:
"""Get health status.
@ -83,7 +83,7 @@ class Inspect(Protocol):
"""
...
@webmethod(route="/version", method="GET", level=LLAMA_STACK_API_V1)
@webmethod(route="/version", method="GET", level=LLAMA_STACK_API_V1, require_authentication=False)
async def version(self) -> VersionInfo:
"""Get version.

View file

@ -27,6 +27,11 @@ class AuthenticationMiddleware:
3. Extracts user attributes from the provider's response
4. Makes these attributes available to the route handlers for access control
Unauthenticated Access:
Endpoints can opt out of authentication by setting require_authentication=False
in their @webmethod decorator. This is typically used for operational endpoints
like /health and /version to support monitoring, load balancers, and observability tools.
The middleware supports multiple authentication providers through the AuthProvider interface:
- Kubernetes: Validates tokens against the Kubernetes API server
- Custom: Validates tokens against a custom endpoint
@ -88,7 +93,26 @@ class AuthenticationMiddleware:
async def __call__(self, scope, receive, send):
if scope["type"] == "http":
# First, handle authentication
# Find the route and check if authentication is required
path = scope.get("path", "")
method = scope.get("method", hdrs.METH_GET)
if not hasattr(self, "route_impls"):
self.route_impls = initialize_route_impls(self.impls)
webmethod = None
try:
_, _, _, webmethod = find_matching_route(method, path, self.route_impls)
except ValueError:
# If no matching endpoint is found, pass here to run auth anyways
pass
# If webmethod explicitly sets require_authentication=False, allow without auth
if webmethod and webmethod.require_authentication is False:
logger.debug(f"Allowing unauthenticated access to endpoint: {path}")
return await self.app(scope, receive, send)
# Handle authentication
headers = dict(scope.get("headers", []))
auth_header = headers.get(b"authorization", b"").decode()
@ -127,19 +151,7 @@ class AuthenticationMiddleware:
)
# Scope-based API access control
path = scope.get("path", "")
method = scope.get("method", hdrs.METH_GET)
if not hasattr(self, "route_impls"):
self.route_impls = initialize_route_impls(self.impls)
try:
_, _, _, webmethod = find_matching_route(method, path, self.route_impls)
except ValueError:
# If no matching endpoint is found, pass through to FastAPI
return await self.app(scope, receive, send)
if webmethod.required_scope:
if webmethod and webmethod.required_scope:
user = user_from_scope(scope)
if not _has_required_scope(webmethod.required_scope, user):
return await self._send_auth_error(

View file

@ -41,12 +41,16 @@ from llama_stack.apis.agents.openai_responses import (
OpenAIResponseOutputMessageFunctionToolCall,
OpenAIResponseOutputMessageMCPListTools,
OpenAIResponseText,
OpenAIResponseUsage,
OpenAIResponseUsageInputTokensDetails,
OpenAIResponseUsageOutputTokensDetails,
WebSearchToolTypes,
)
from llama_stack.apis.inference import (
Inference,
OpenAIAssistantMessageParam,
OpenAIChatCompletion,
OpenAIChatCompletionChunk,
OpenAIChatCompletionToolCall,
OpenAIChoice,
OpenAIMessageParam,
@ -116,43 +120,8 @@ class StreamingResponseOrchestrator:
self.final_messages: list[OpenAIMessageParam] = []
# mapping for annotations
self.citation_files: dict[str, str] = {}
# Track accumulated text for shield validation
self.accumulated_text = ""
# Track if we've sent a refusal response
self.violation_detected = False
async def _check_output_stream_safety(self, text_delta: str) -> str | None:
"""Check streaming text content against shields. Returns violation message if blocked."""
if not self.shield_ids:
return None
self.accumulated_text += text_delta
# Check accumulated text periodically for violations (every 50 characters or at word boundaries)
if len(self.accumulated_text) > 50 or text_delta.endswith((" ", "\n", ".", "!", "?")):
temp_messages = [{"role": "assistant", "content": self.accumulated_text}]
messages = convert_openai_to_inference_messages(temp_messages)
try:
await run_multiple_shields(self.safety_api, messages, self.shield_ids)
except SafetyException as e:
logger.info(f"Output shield violation: {e.violation.user_message}")
return e.violation.user_message or "Generated content blocked by safety shields"
async def _create_refusal_response(self, violation_message: str) -> OpenAIResponseObjectStream:
"""Create a refusal response to replace streaming content."""
refusal_content = OpenAIResponseContentPartRefusal(refusal=violation_message)
# Create a completed refusal response
refusal_response = OpenAIResponseObject(
id=self.response_id,
created_at=self.created_at,
model=self.ctx.model,
status="completed",
output=[OpenAIResponseMessage(role="assistant", content=[refusal_content], type="message")],
)
return OpenAIResponseObjectStreamResponseCompleted(response=refusal_response)
# Track accumulated usage across all inference calls
self.accumulated_usage: OpenAIResponseUsage | None = None
def _clone_outputs(self, outputs: list[OpenAIResponseOutput]) -> list[OpenAIResponseOutput]:
cloned: list[OpenAIResponseOutput] = []
@ -180,6 +149,7 @@ class StreamingResponseOrchestrator:
text=self.text,
tools=self.ctx.available_tools(),
error=error,
usage=self.accumulated_usage,
)
async def create_response(self) -> AsyncIterator[OpenAIResponseObjectStream]:
@ -217,6 +187,9 @@ class StreamingResponseOrchestrator:
stream=True,
temperature=self.ctx.temperature,
response_format=response_format,
stream_options={
"include_usage": True,
},
)
# Process streaming chunks and build complete response
@ -352,6 +325,51 @@ class StreamingResponseOrchestrator:
return function_tool_calls, non_function_tool_calls, approvals, next_turn_messages
def _accumulate_chunk_usage(self, chunk: OpenAIChatCompletionChunk) -> None:
"""Accumulate usage from a streaming chunk into the response usage format."""
if not chunk.usage:
return
if self.accumulated_usage is None:
# Convert from chat completion format to response format
self.accumulated_usage = OpenAIResponseUsage(
input_tokens=chunk.usage.prompt_tokens,
output_tokens=chunk.usage.completion_tokens,
total_tokens=chunk.usage.total_tokens,
input_tokens_details=(
OpenAIResponseUsageInputTokensDetails(cached_tokens=chunk.usage.prompt_tokens_details.cached_tokens)
if chunk.usage.prompt_tokens_details
else None
),
output_tokens_details=(
OpenAIResponseUsageOutputTokensDetails(
reasoning_tokens=chunk.usage.completion_tokens_details.reasoning_tokens
)
if chunk.usage.completion_tokens_details
else None
),
)
else:
# Accumulate across multiple inference calls
self.accumulated_usage = OpenAIResponseUsage(
input_tokens=self.accumulated_usage.input_tokens + chunk.usage.prompt_tokens,
output_tokens=self.accumulated_usage.output_tokens + chunk.usage.completion_tokens,
total_tokens=self.accumulated_usage.total_tokens + chunk.usage.total_tokens,
# Use latest non-null details
input_tokens_details=(
OpenAIResponseUsageInputTokensDetails(cached_tokens=chunk.usage.prompt_tokens_details.cached_tokens)
if chunk.usage.prompt_tokens_details
else self.accumulated_usage.input_tokens_details
),
output_tokens_details=(
OpenAIResponseUsageOutputTokensDetails(
reasoning_tokens=chunk.usage.completion_tokens_details.reasoning_tokens
)
if chunk.usage.completion_tokens_details
else self.accumulated_usage.output_tokens_details
),
)
async def _process_streaming_chunks(
self, completion_result, output_messages: list[OpenAIResponseOutput]
) -> AsyncIterator[OpenAIResponseObjectStream | ChatCompletionResult]:
@ -377,6 +395,10 @@ class StreamingResponseOrchestrator:
chat_response_id = chunk.id
chunk_created = chunk.created
chunk_model = chunk.model
# Accumulate usage from chunks (typically in final chunk with stream_options)
self._accumulate_chunk_usage(chunk)
for chunk_choice in chunk.choices:
# Emit incremental text content as delta events
if chunk_choice.delta.content:

View file

@ -178,9 +178,9 @@ class ReferenceBatchesImpl(Batches):
# TODO: set expiration time for garbage collection
if endpoint not in ["/v1/chat/completions", "/v1/completions"]:
if endpoint not in ["/v1/chat/completions", "/v1/completions", "/v1/embeddings"]:
raise ValueError(
f"Invalid endpoint: {endpoint}. Supported values: /v1/chat/completions, /v1/completions. Code: invalid_value. Param: endpoint",
f"Invalid endpoint: {endpoint}. Supported values: /v1/chat/completions, /v1/completions, /v1/embeddings. Code: invalid_value. Param: endpoint",
)
if completion_window != "24h":
@ -425,18 +425,23 @@ class ReferenceBatchesImpl(Batches):
valid = False
if batch.endpoint == "/v1/chat/completions":
required_params = [
required_params: list[tuple[str, Any, str]] = [
("model", str, "a string"),
# messages is specific to /v1/chat/completions
# we could skip validating messages here and let inference fail. however,
# that would be a very expensive way to find out messages is wrong.
("messages", list, "an array"), # TODO: allow messages to be a string?
]
else: # /v1/completions
elif batch.endpoint == "/v1/completions":
required_params = [
("model", str, "a string"),
("prompt", str, "a string"), # TODO: allow prompt to be a list of strings??
]
else: # /v1/embeddings
required_params = [
("model", str, "a string"),
("input", (str, list), "a string or array of strings"),
]
for param, expected_type, type_string in required_params:
if param not in body:
@ -614,7 +619,7 @@ class ReferenceBatchesImpl(Batches):
"body": chat_response.model_dump_json(),
},
}
else: # /v1/completions
elif request.url == "/v1/completions":
completion_response = await self.inference_api.openai_completion(**request.body)
# this is for mypy, we don't allow streaming so we'll get the right type
@ -630,6 +635,20 @@ class ReferenceBatchesImpl(Batches):
"body": completion_response.model_dump_json(),
},
}
else: # /v1/embeddings
embeddings_response = await self.inference_api.openai_embeddings(**request.body)
assert hasattr(embeddings_response, "model_dump_json"), (
"Embeddings response must have model_dump_json method"
)
return {
"id": request_id,
"custom_id": request.custom_id,
"response": {
"status_code": 200,
"request_id": request_id, # TODO: should this be different?
"body": embeddings_response.model_dump_json(),
},
}
except Exception as e:
logger.info(f"Error processing request {request.custom_id} in batch {batch_id}: {e}")
return {

View file

@ -61,6 +61,7 @@ class WebMethod:
descriptive_name: str | None = None
required_scope: str | None = None
deprecated: bool | None = False
require_authentication: bool | None = True
CallableT = TypeVar("CallableT", bound=Callable[..., Any])
@ -77,6 +78,7 @@ def webmethod(
descriptive_name: str | None = None,
required_scope: str | None = None,
deprecated: bool | None = False,
require_authentication: bool | None = True,
) -> Callable[[CallableT], CallableT]:
"""
Decorator that supplies additional metadata to an endpoint operation function.
@ -86,6 +88,7 @@ def webmethod(
:param request_examples: Sample requests that the operation might take. Pass a list of objects, not JSON.
:param response_examples: Sample responses that the operation might produce. Pass a list of objects, not JSON.
:param required_scope: Required scope for this endpoint (e.g., 'monitoring.viewer').
:param require_authentication: Whether this endpoint requires authentication (default True).
"""
def wrap(func: CallableT) -> CallableT:
@ -100,6 +103,7 @@ def webmethod(
descriptive_name=descriptive_name,
required_scope=required_scope,
deprecated=deprecated,
require_authentication=require_authentication if require_authentication is not None else True,
)
# Store all webmethods in a list to support multiple decorators

View file

@ -0,0 +1,130 @@
{
"test_id": "tests/integration/agents/test_openai_responses.py::test_responses_store[openai_client-txt=ollama/llama3.2:3b-instruct-fp16-tools1-True]",
"request": {
"method": "POST",
"url": "http://0.0.0.0:11434/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "llama3.2:3b-instruct-fp16",
"messages": [
{
"role": "user",
"content": "What's the weather in Tokyo? YOU MUST USE THE get_weather function to get the weather."
}
],
"stream": true,
"stream_options": {
"include_usage": true
},
"tools": [
{
"type": "function",
"function": {
"type": "function",
"name": "get_weather",
"description": "Get the weather in a given city",
"parameters": {
"type": "object",
"properties": {
"city": {
"type": "string",
"description": "The city to get the weather for"
}
}
},
"strict": null
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "llama3.2:3b-instruct-fp16"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2172059863d4",
"choices": [
{
"delta": {
"content": "",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": [
{
"index": 0,
"id": "call_os3xa9go",
"function": {
"arguments": "{\"city\":\"Tokyo\"}",
"name": "get_weather"
},
"type": "function"
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2172059863d4",
"choices": [
{
"delta": {
"content": "",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": "tool_calls",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-2172059863d4",
"choices": [],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": {
"completion_tokens": 15,
"prompt_tokens": 179,
"total_tokens": 194,
"completion_tokens_details": null,
"prompt_tokens_details": null
}
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,272 @@
{
"test_id": "tests/integration/agents/test_openai_responses.py::test_list_response_input_items_with_limit_and_order[txt=ollama/llama3.2:3b-instruct-fp16]",
"request": {
"method": "POST",
"url": "http://0.0.0.0:11434/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "llama3.2:3b-instruct-fp16",
"messages": [
{
"role": "user",
"content": "Message A: What is the capital of France?"
},
{
"role": "assistant",
"content": "The capital of France is Paris."
},
{
"role": "user",
"content": "Message B: What about Spain?"
},
{
"role": "assistant",
"content": "The capital of Spain is Madrid."
},
{
"role": "user",
"content": "Message C: And Italy?"
}
],
"stream": true,
"stream_options": {
"include_usage": true
}
},
"endpoint": "/v1/chat/completions",
"model": "llama3.2:3b-instruct-fp16"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-4da32cdf48ae",
"choices": [
{
"delta": {
"content": "The",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-4da32cdf48ae",
"choices": [
{
"delta": {
"content": " capital",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-4da32cdf48ae",
"choices": [
{
"delta": {
"content": " of",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-4da32cdf48ae",
"choices": [
{
"delta": {
"content": " Italy",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-4da32cdf48ae",
"choices": [
{
"delta": {
"content": " is",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-4da32cdf48ae",
"choices": [
{
"delta": {
"content": " Rome",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-4da32cdf48ae",
"choices": [
{
"delta": {
"content": ".",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-4da32cdf48ae",
"choices": [
{
"delta": {
"content": "",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": "stop",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-4da32cdf48ae",
"choices": [],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": {
"completion_tokens": 8,
"prompt_tokens": 82,
"total_tokens": 90,
"completion_tokens_details": null,
"prompt_tokens_details": null
}
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,122 @@
{
"test_id": "tests/integration/agents/test_openai_responses.py::test_function_call_output_response_with_none_arguments[txt=ollama/llama3.2:3b-instruct-fp16]",
"request": {
"method": "POST",
"url": "http://0.0.0.0:11434/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "llama3.2:3b-instruct-fp16",
"messages": [
{
"role": "user",
"content": "what's the current time? You MUST call the `get_current_time` function to find out."
}
],
"stream": true,
"stream_options": {
"include_usage": true
},
"tools": [
{
"type": "function",
"function": {
"type": "function",
"name": "get_current_time",
"description": "Get the current time",
"parameters": {},
"strict": null
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "llama3.2:3b-instruct-fp16"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-585a2cf2c22b",
"choices": [
{
"delta": {
"content": "",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": [
{
"index": 0,
"id": "call_lqrdy0rt",
"function": {
"arguments": "{}",
"name": "get_current_time"
},
"type": "function"
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-585a2cf2c22b",
"choices": [
{
"delta": {
"content": "",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": "tool_calls",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-585a2cf2c22b",
"choices": [],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": {
"completion_tokens": 14,
"prompt_tokens": 161,
"total_tokens": 175,
"completion_tokens_details": null,
"prompt_tokens_details": null
}
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,256 @@
{
"test_id": "tests/integration/agents/test_openai_responses.py::test_list_response_input_items[openai_client-txt=ollama/llama3.2:3b-instruct-fp16]",
"request": {
"method": "POST",
"url": "http://0.0.0.0:11434/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "llama3.2:3b-instruct-fp16",
"messages": [
{
"role": "user",
"content": "What is the capital of France?"
}
],
"stream": true,
"stream_options": {
"include_usage": true
}
},
"endpoint": "/v1/chat/completions",
"model": "llama3.2:3b-instruct-fp16"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5e4ea6b89ff4",
"choices": [
{
"delta": {
"content": "The",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5e4ea6b89ff4",
"choices": [
{
"delta": {
"content": " capital",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5e4ea6b89ff4",
"choices": [
{
"delta": {
"content": " of",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5e4ea6b89ff4",
"choices": [
{
"delta": {
"content": " France",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5e4ea6b89ff4",
"choices": [
{
"delta": {
"content": " is",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5e4ea6b89ff4",
"choices": [
{
"delta": {
"content": " Paris",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5e4ea6b89ff4",
"choices": [
{
"delta": {
"content": ".",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5e4ea6b89ff4",
"choices": [
{
"delta": {
"content": "",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": "stop",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5e4ea6b89ff4",
"choices": [],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": {
"completion_tokens": 8,
"prompt_tokens": 32,
"total_tokens": 40,
"completion_tokens_details": null,
"prompt_tokens_details": null
}
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,130 @@
{
"test_id": "tests/integration/agents/test_openai_responses.py::test_responses_store[openai_client-txt=ollama/llama3.2:3b-instruct-fp16-tools1-False]",
"request": {
"method": "POST",
"url": "http://0.0.0.0:11434/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "llama3.2:3b-instruct-fp16",
"messages": [
{
"role": "user",
"content": "What's the weather in Tokyo? YOU MUST USE THE get_weather function to get the weather."
}
],
"stream": true,
"stream_options": {
"include_usage": true
},
"tools": [
{
"type": "function",
"function": {
"type": "function",
"name": "get_weather",
"description": "Get the weather in a given city",
"parameters": {
"type": "object",
"properties": {
"city": {
"type": "string",
"description": "The city to get the weather for"
}
}
},
"strict": null
}
}
]
},
"endpoint": "/v1/chat/completions",
"model": "llama3.2:3b-instruct-fp16"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5edf2f0b7a9c",
"choices": [
{
"delta": {
"content": "",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": [
{
"index": 0,
"id": "call_4ibtjudr",
"function": {
"arguments": "{\"city\":\"Tokyo\"}",
"name": "get_weather"
},
"type": "function"
}
]
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5edf2f0b7a9c",
"choices": [
{
"delta": {
"content": "",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": "tool_calls",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5edf2f0b7a9c",
"choices": [],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": {
"completion_tokens": 15,
"prompt_tokens": 179,
"total_tokens": 194,
"completion_tokens_details": null,
"prompt_tokens_details": null
}
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,256 @@
{
"test_id": "tests/integration/agents/test_openai_responses.py::test_list_response_input_items[client_with_models-txt=ollama/llama3.2:3b-instruct-fp16]",
"request": {
"method": "POST",
"url": "http://0.0.0.0:11434/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "llama3.2:3b-instruct-fp16",
"messages": [
{
"role": "user",
"content": "What is the capital of France?"
}
],
"stream": true,
"stream_options": {
"include_usage": true
}
},
"endpoint": "/v1/chat/completions",
"model": "llama3.2:3b-instruct-fp16"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-f4894c38303b",
"choices": [
{
"delta": {
"content": "The",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-f4894c38303b",
"choices": [
{
"delta": {
"content": " capital",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-f4894c38303b",
"choices": [
{
"delta": {
"content": " of",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-f4894c38303b",
"choices": [
{
"delta": {
"content": " France",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-f4894c38303b",
"choices": [
{
"delta": {
"content": " is",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-f4894c38303b",
"choices": [
{
"delta": {
"content": " Paris",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-f4894c38303b",
"choices": [
{
"delta": {
"content": ".",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-f4894c38303b",
"choices": [
{
"delta": {
"content": "",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": "stop",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": null
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-f4894c38303b",
"choices": [],
"created": 0,
"model": "llama3.2:3b-instruct-fp16",
"object": "chat.completion.chunk",
"service_tier": null,
"system_fingerprint": "fp_ollama",
"usage": {
"completion_tokens": 8,
"prompt_tokens": 32,
"total_tokens": 40,
"completion_tokens_details": null,
"prompt_tokens_details": null
}
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,422 @@
{
"test_id": "tests/integration/batches/test_batches.py::TestBatchesIntegration::test_batch_e2e_embeddings[emb=ollama/all-minilm:l6-v2]",
"request": {
"method": "POST",
"url": "http://0.0.0.0:11434/v1/v1/embeddings",
"headers": {},
"body": {
"model": "all-minilm:l6-v2",
"input": "Hello world",
"encoding_format": "float"
},
"endpoint": "/v1/embeddings",
"model": "all-minilm:l6-v2"
},
"response": {
"body": {
"__type__": "openai.types.create_embedding_response.CreateEmbeddingResponse",
"__data__": {
"data": [
{
"embedding": [
-0.034477483,
0.030899182,
0.0066526434,
0.026075281,
-0.039411988,
-0.16037956,
0.06692074,
-0.006511468,
-0.047467157,
0.014774274,
0.07094562,
0.055527706,
0.019183245,
-0.026297163,
-0.010018651,
-0.02694715,
0.0223884,
-0.02220693,
-0.14977267,
-0.017530814,
0.0075938613,
0.054253556,
0.0032258728,
0.031724673,
-0.08466085,
-0.029342307,
0.05155048,
0.048105717,
-0.0032670307,
-0.05822795,
0.041971523,
0.022229431,
0.1281518,
-0.022270948,
-0.011725874,
0.06294936,
-0.032847952,
-0.09124354,
-0.031128692,
0.05274829,
0.047067728,
-0.08414196,
-0.029979317,
-0.020692566,
0.00949804,
-0.0035992558,
0.0074442336,
0.03928378,
0.09326073,
-0.0037437282,
-0.052663893,
-0.058101393,
-0.006925679,
0.0052269334,
0.08290669,
0.019312402,
0.0062818974,
-0.010331665,
0.008930684,
-0.037712026,
-0.045175705,
0.023950849,
-0.006926045,
0.013429504,
0.100098,
-0.0715888,
-0.021700105,
0.031693522,
-0.05161389,
-0.08224763,
-0.06577986,
-0.009853981,
0.005808086,
0.07364217,
-0.034008067,
0.024907362,
0.014441484,
0.02645124,
0.009659713,
0.030284341,
0.052878983,
-0.07536944,
0.009890014,
0.029907802,
0.017498897,
0.02313779,
0.0018918256,
0.0013156217,
-0.047173936,
-0.011251131,
-0.11422648,
-0.019960148,
0.040278148,
0.0022633963,
-0.07986738,
-0.025357265,
0.094500035,
-0.029062947,
-0.14495483,
0.2309815,
0.027703581,
0.03208736,
0.031073036,
0.042917974,
0.064246915,
0.032118786,
-0.004844535,
0.055775862,
-0.03756279,
-0.021487191,
-0.028432492,
-0.028887685,
0.03842892,
-0.017359573,
0.052465834,
-0.07493626,
-0.031175744,
0.021936033,
-0.039823197,
-0.008681939,
0.026978256,
-0.048551314,
0.011414809,
0.029628372,
-0.020587107,
0.013077965,
0.028824588,
-3.1978743e-33,
0.06475607,
-0.018065408,
0.05190019,
0.12193858,
0.028755108,
0.008794777,
-0.07044016,
-0.016856866,
0.040675826,
0.04222898,
0.025450956,
0.035772353,
-0.049134083,
0.0021395232,
-0.015527445,
0.05065655,
-0.04814189,
0.03586998,
-0.004134139,
0.10165314,
-0.055980552,
-0.010677752,
0.011231545,
0.09068785,
0.004311188,
0.035094332,
-0.009658399,
-0.09383056,
0.092755266,
0.00799794,
-0.0077075018,
-0.052119244,
-0.01259255,
0.0032277475,
0.005989667,
0.0075889886,
0.010571857,
-0.08629758,
-0.06985891,
-0.002511263,
-0.091053724,
0.0468712,
0.05203361,
0.0072902967,
0.010906411,
-0.0052922186,
0.013883815,
0.021929385,
0.0341257,
0.060227357,
0.00018942523,
0.0146624865,
-0.07000342,
0.028425341,
-0.027542787,
0.01082086,
0.03491755,
-0.022430921,
0.0096813915,
0.07725412,
0.021618832,
0.114911504,
-0.06805403,
0.023872944,
-0.015999107,
-0.017794114,
0.06442477,
0.03206309,
0.050293576,
-0.005988605,
-0.03376946,
0.017821673,
0.016567992,
0.063335925,
0.034753703,
0.046586752,
0.09789875,
-0.006560692,
0.025039855,
-0.07780643,
0.016878096,
-0.0010056288,
0.02257608,
-0.0382721,
0.09572481,
-0.005296001,
0.010567662,
-0.11538674,
-0.013233586,
-0.010786205,
-0.083147496,
0.073254965,
0.049377624,
-0.009025328,
-0.0957893,
3.3687185e-33,
0.12494067,
0.019226579,
-0.058172084,
-0.035952393,
-0.050862074,
-0.045700952,
-0.0826631,
0.14819908,
-0.088347495,
0.060315337,
0.05109269,
0.010308115,
0.1411753,
0.030833788,
0.06101746,
-0.052806143,
0.13661332,
0.00917483,
-0.017295862,
-0.0128495265,
-0.007851698,
-0.051084496,
-0.05235087,
0.0076632234,
-0.015217299,
0.017015414,
0.021324545,
0.020506723,
-0.12004153,
0.014523494,
0.026743378,
0.025221687,
-0.04270567,
0.00676352,
-0.014453511,
0.045142446,
-0.091383636,
-0.019459482,
-0.017806036,
-0.055010412,
-0.05270923,
-0.010370778,
-0.052053526,
0.020918628,
-0.080037735,
-0.012147244,
-0.057777684,
0.023249507,
-0.007838778,
-0.025807643,
-0.07987164,
-0.020683115,
0.04888083,
-0.020459235,
-0.049192864,
0.01407799,
-0.063744746,
-0.0077936463,
0.016429903,
-0.025707569,
0.013326097,
0.026210392,
0.009855086,
0.06317218,
0.0026150644,
-0.0065879063,
0.0166049,
0.032400407,
0.038005095,
-0.036269873,
-0.0069020875,
0.00019545198,
-0.0017537851,
-0.027427403,
-0.02801922,
0.049696837,
-0.028842367,
-0.0023814398,
0.01481421,
0.00976869,
0.0057697925,
0.01341087,
0.00551593,
0.037237898,
0.007291808,
0.040068958,
0.08141818,
0.07197348,
-0.013163506,
-0.042782705,
-0.010938265,
0.0049547236,
-0.00923014,
0.035068717,
-0.051007,
-1.5708556e-08,
-0.088558294,
0.02391312,
-0.016132735,
0.03169382,
0.027184812,
0.052484553,
-0.047118798,
-0.058789898,
-0.063239954,
0.040775288,
0.049807984,
0.106462926,
-0.07448737,
-0.012401869,
0.018361589,
0.039486438,
-0.024830224,
0.014500051,
-0.03712332,
0.020043189,
8.399218e-05,
0.009852795,
0.024823224,
-0.05252818,
0.02932855,
-0.0871494,
-0.01447227,
0.025996566,
-0.018731978,
-0.07618361,
0.03505914,
0.10363578,
-0.0280213,
0.012769872,
-0.076482065,
-0.018743375,
0.024961015,
0.08152011,
0.06866303,
-0.06411612,
-0.08387694,
0.061479986,
-0.03345598,
-0.10615398,
-0.040166635,
0.032536518,
0.076652974,
-0.07297006,
0.00039833272,
-0.0409393,
-0.07580284,
0.027465926,
0.07468789,
0.017779494,
0.09106629,
0.11033428,
0.00065298256,
0.051472265,
-0.01461242,
0.033237122,
0.023671487,
-0.022980422,
0.038988944,
0.030206418
],
"index": 0,
"object": "embedding"
}
],
"model": "all-minilm:l6-v2",
"object": "list",
"usage": {
"prompt_tokens": 2,
"total_tokens": 2
}
}
},
"is_streaming": false
},
"id_normalization_mapping": {}
}

View file

@ -323,3 +323,92 @@ class TestBatchesIntegration:
if final_batch.error_file_id is not None:
deleted_error_file = openai_client.files.delete(final_batch.error_file_id)
assert deleted_error_file.deleted
def test_batch_e2e_embeddings(self, openai_client, batch_helper, embedding_model_id):
"""Run an end-to-end batch with embeddings requests including both string and list inputs."""
batch_requests = [
{
"custom_id": "success-1",
"method": "POST",
"url": "/v1/embeddings",
"body": {"model": embedding_model_id, "input": "Hello world", "encoding_format": "float"},
},
{
"custom_id": "success-2",
"method": "POST",
"url": "/v1/embeddings",
"body": {
"model": embedding_model_id,
"input": ["How are you?", "Good morning", "Have a great day"],
"encoding_format": "float",
},
},
]
with batch_helper.create_file(batch_requests) as uploaded_file:
batch = openai_client.batches.create(
input_file_id=uploaded_file.id,
endpoint="/v1/embeddings",
completion_window="24h",
metadata={"test": "e2e_embeddings_success"},
)
final_batch = batch_helper.wait_for(
batch.id,
max_wait_time=3 * 60,
expected_statuses={"completed"},
timeout_action="skip",
)
assert final_batch.status == "completed"
assert final_batch.request_counts is not None
assert final_batch.request_counts.total == 2
assert final_batch.request_counts.completed == 2
assert final_batch.output_file_id is not None
output_content = openai_client.files.content(final_batch.output_file_id)
if isinstance(output_content, str):
output_text = output_content
else:
output_text = output_content.content.decode("utf-8")
output_lines = output_text.strip().split("\n")
assert len(output_lines) == 2
# Check first result (string input)
result1 = json.loads(output_lines[0])
assert result1["custom_id"] in ["success-1", "success-2"]
assert "response" in result1
assert result1["response"]["status_code"] == 200
# Verify the response body contains embeddings data
response_body1 = json.loads(result1["response"]["body"])
assert response_body1["object"] == "list"
assert "data" in response_body1
assert len(response_body1["data"]) == 1
assert "embedding" in response_body1["data"][0]
assert "index" in response_body1["data"][0]
assert response_body1["data"][0]["index"] == 0
# Check second result (list input)
result2 = json.loads(output_lines[1])
assert result2["custom_id"] in ["success-1", "success-2"]
assert "response" in result2
assert result2["response"]["status_code"] == 200
# Verify the response body contains embeddings data for list input
response_body2 = json.loads(result2["response"]["body"])
assert response_body2["object"] == "list"
assert "data" in response_body2
assert len(response_body2["data"]) == 3 # Three strings in the list
for i, embedding_data in enumerate(response_body2["data"]):
assert "embedding" in embedding_data
assert "index" in embedding_data
assert embedding_data["index"] == i
deleted_output_file = openai_client.files.delete(final_batch.output_file_id)
assert deleted_output_file.deleted
if final_batch.error_file_id is not None:
deleted_error_file = openai_client.files.delete(final_batch.error_file_id)
assert deleted_error_file.deleted

View file

@ -12,28 +12,10 @@
"body": {
"__type__": "ollama._types.ProcessResponse",
"__data__": {
"models": [
{
"model": "llama3.2-vision:11b",
"name": "llama3.2-vision:11b",
"digest": "6f2f9757ae97e8a3f8ea33d6adb2b11d93d9a35bef277cd2c0b1b5af8e8d0b1e",
"expires_at": "2025-10-08T12:40:47.430429-07:00",
"size": 11765236384,
"size_vram": 11765236384,
"details": {
"parent_model": "",
"format": "gguf",
"family": "mllama",
"families": [
"mllama"
],
"parameter_size": "10.7B",
"quantization_level": "Q4_K_M"
}
}
]
"models": []
}
},
"is_streaming": false
}
},
"id_normalization_mapping": {}
}

View file

@ -93,5 +93,6 @@
}
],
"is_streaming": false
}
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,273 @@
{
"test_id": "tests/integration/responses/test_basic_responses.py::test_response_streaming_basic[client_with_models-txt=openai/gpt-4o-earth]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "Which planet do humans live on?"
}
],
"stream": true,
"stream_options": {
"include_usage": true
}
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-56a01bed56ba",
"choices": [
{
"delta": {
"content": "",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "hA9E1EFNDdppk3"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-56a01bed56ba",
"choices": [
{
"delta": {
"content": "Hum",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "waalMD9oTwIyd"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-56a01bed56ba",
"choices": [
{
"delta": {
"content": "ans",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "vyTLO6JSApar2"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-56a01bed56ba",
"choices": [
{
"delta": {
"content": " live",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "KQU8fOkg5R6"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-56a01bed56ba",
"choices": [
{
"delta": {
"content": " on",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "j7D1acmtXVxh1"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-56a01bed56ba",
"choices": [
{
"delta": {
"content": " Earth",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "F4dwv7pOew"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-56a01bed56ba",
"choices": [
{
"delta": {
"content": ".",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "4xPS4mauTagzkxj"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-56a01bed56ba",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "stop",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "pjCVonxRXx"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-56a01bed56ba",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_1827dd0c55",
"usage": {
"completion_tokens": 6,
"prompt_tokens": 14,
"total_tokens": 20,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": ""
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,948 @@
{
"test_id": "tests/integration/responses/test_basic_responses.py::test_response_streaming_basic[client_with_models-txt=openai/gpt-4o-saturn]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "Which planet has rings around it with a name starting with letter S?"
}
],
"stream": true,
"stream_options": {
"include_usage": true
}
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": "",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "UKZMfDO4alqdUr"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": "The",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "PauvZTS9JhHJQ"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": " planet",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "I1PMKwlor"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": " with",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "NKus7N8SshY"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": " rings",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "hjPcORfLsk"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": " around",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "ceeQx9iTw"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": " it",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "8ad9CC6rmF939"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": " and",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "9zCNPfknf9Wk"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": " a",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "d0Aa4FFslxFL5t"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": " name",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "xWj2hnWGsbL"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": " starting",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "1pyuTcJ"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": " with",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "l7uhYGpUcE1"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": " the",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "aj5jkF7RI6jc"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": " letter",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "1Hqbz5J84"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": " \"",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "IpvA6bGSmegM9"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": "S",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "EzbeTEiigNgr1WP"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": "\"",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "SpfVHvAhaLPtgf"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": " is",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Tz0BHfbZzwIU8"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": " Saturn",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "v62Yi0Uy5"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": ".",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "FaYqJrpzivGOEw6"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": " Saturn",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "ni5aiHptn"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": " is",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "2Xbu8coUEl3nE"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": " well",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "nOPBJSLkxK9"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": "-known",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "65uX75JvoU"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": " for",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "RgOnunwbvzQQ"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": " its",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "s9LwMsjnLC8q"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": " prominent",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "mPdj0V"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": " and",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "hvjs7fxAOSUX"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": " beautiful",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "WVtGRs"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": " ring",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "B0ahJwERcG6"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": " system",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "PRkj9JNVV"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": ".",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "cvQVcsFDMfrfnVh"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "stop",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "kBSMtrCs6N"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-5a1a11d0b498",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": {
"completion_tokens": 31,
"prompt_tokens": 21,
"total_tokens": 52,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": "QRz5sPimoUuAGY0"
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,335 @@
{
"test_id": "tests/integration/responses/test_basic_responses.py::test_response_non_streaming_basic[client_with_models-txt=openai/gpt-4o-earth]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "Which planet do humans live on?"
},
{
"role": "assistant",
"content": "Humans live on Earth."
},
{
"role": "user",
"content": "Repeat your previous response in all caps."
}
],
"stream": true,
"stream_options": {
"include_usage": true
}
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-792f5c2603ba",
"choices": [
{
"delta": {
"content": "",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "Bx83B946ygH7Vb"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-792f5c2603ba",
"choices": [
{
"delta": {
"content": "H",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "jiQelGI7oxHigEi"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-792f5c2603ba",
"choices": [
{
"delta": {
"content": "UM",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "ePc4EP3mEfA9JN"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-792f5c2603ba",
"choices": [
{
"delta": {
"content": "ANS",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "CSsCO4DtCo1jC"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-792f5c2603ba",
"choices": [
{
"delta": {
"content": " LIVE",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "uBbrt1MQwxP"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-792f5c2603ba",
"choices": [
{
"delta": {
"content": " ON",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "0UHMl6xkx8nyl"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-792f5c2603ba",
"choices": [
{
"delta": {
"content": " EAR",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "yy5k0fpmcCxH"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-792f5c2603ba",
"choices": [
{
"delta": {
"content": "TH",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "actry6zH88X5t8"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-792f5c2603ba",
"choices": [
{
"delta": {
"content": ".",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "5FuIwYGrTdqVBbu"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-792f5c2603ba",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "stop",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "kHtcUGMzub"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-792f5c2603ba",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"usage": {
"completion_tokens": 8,
"prompt_tokens": 36,
"total_tokens": 44,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": ""
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -41,9 +41,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "zftbFaqu4Bi"
"obfuscation": "eQtIB22UP5y"
}
},
{
@ -68,9 +68,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "ItcaW7b3D9"
"obfuscation": "xVnzycGrGY"
}
},
{
@ -95,9 +95,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "n9CzCrtHvG"
"obfuscation": "iCxzQ9UFRl"
}
},
{
@ -122,9 +122,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "fWwHGFYy"
"obfuscation": "lVzetb7n"
}
},
{
@ -149,9 +149,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "kdNkbMvQY5"
"obfuscation": "Oi2xzNkyQQ"
}
},
{
@ -176,9 +176,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "DhWaK87"
"obfuscation": "SBQnBFp"
}
},
{
@ -203,9 +203,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "ocA1GIrZyPFW"
"obfuscation": "CtftBIpiBh2x"
}
},
{
@ -230,12 +230,13 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "jlCQlW0"
"obfuscation": "xN1HF2a"
}
}
],
"is_streaming": true
}
},
"id_normalization_mapping": {}
}

View file

@ -41,9 +41,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "zqLiclPltu3"
"obfuscation": "wl1L4ubkZC2"
}
},
{
@ -68,9 +68,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "grzpYaHPM8"
"obfuscation": "MKXxbHEqGM"
}
},
{
@ -95,9 +95,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "zEX86j"
"obfuscation": "RXhSL9"
}
},
{
@ -122,9 +122,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "fvu80DBX"
"obfuscation": "RTRK7mJP"
}
},
{
@ -149,9 +149,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "iIA0sN8"
"obfuscation": "6EhZvCD"
}
},
{
@ -176,9 +176,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "fohE4v"
"obfuscation": "qiNxmx"
}
},
{
@ -203,9 +203,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "9jaVkFkWKk"
"obfuscation": "RNXYcBmW82"
}
},
{
@ -215,7 +215,7 @@
"choices": [
{
"delta": {
"content": " and",
"content": ",",
"function_call": null,
"refusal": null,
"role": null,
@ -230,9 +230,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "4XV7CDgbt"
"obfuscation": "dRGyq9aos4Ao"
}
},
{
@ -242,7 +242,7 @@
"choices": [
{
"delta": {
"content": " a",
"content": " whose",
"function_call": null,
"refusal": null,
"role": null,
@ -257,9 +257,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "8q9WqFBh4Y0"
"obfuscation": "iHNHPzN"
}
},
{
@ -284,9 +284,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "wUB8pf8C"
"obfuscation": "CWIF5di5"
}
},
{
@ -296,7 +296,7 @@
"choices": [
{
"delta": {
"content": " starting",
"content": " starts",
"function_call": null,
"refusal": null,
"role": null,
@ -311,9 +311,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "TX3B"
"obfuscation": "TQurN8"
}
},
{
@ -338,9 +338,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "KuxjVC7G"
"obfuscation": "NcKx1q81"
}
},
{
@ -365,9 +365,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "dX94lMG2M"
"obfuscation": "EQ0qBHCjl"
}
},
{
@ -392,9 +392,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "N6f05V"
"obfuscation": "cjvz4i"
}
},
{
@ -419,9 +419,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "Gf1gjnyVAI"
"obfuscation": "brfeAXs8FX"
}
},
{
@ -446,9 +446,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "TQ2IiLZtvxzh"
"obfuscation": "RQJHhiH9GkeD"
}
},
{
@ -458,7 +458,7 @@
"choices": [
{
"delta": {
"content": "\"",
"content": ",\"",
"function_call": null,
"refusal": null,
"role": null,
@ -473,9 +473,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "aHvCedShvKO"
"obfuscation": "ZF32Jc9E2G"
}
},
{
@ -500,9 +500,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "fhDv1M3NuQ"
"obfuscation": "G2odltRd0L"
}
},
{
@ -527,9 +527,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "xwJPIP"
"obfuscation": "2Osmry"
}
},
{
@ -554,9 +554,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "75QYRcPV0IuE"
"obfuscation": "7SNbKabEykCr"
}
},
{
@ -581,9 +581,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "A4Rh3S"
"obfuscation": "H350qD"
}
},
{
@ -608,9 +608,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "Bohe93xwDk"
"obfuscation": "YJTtTjmwSX"
}
},
{
@ -635,9 +635,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "6f77oxEV"
"obfuscation": "73nC8tFe"
}
},
{
@ -662,9 +662,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "RCyK7L4"
"obfuscation": "nHEeUa9"
}
},
{
@ -689,9 +689,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "s8cnTAnao"
"obfuscation": "9W2IOivfJ"
}
},
{
@ -716,9 +716,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "uI9KpovPR"
"obfuscation": "WlCJ1unfP"
}
},
{
@ -743,9 +743,63 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "pWL"
"obfuscation": "LY2"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-cc0156eadb58",
"choices": [
{
"delta": {
"content": " and",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "vw5KZwo9J"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-cc0156eadb58",
"choices": [
{
"delta": {
"content": " complex",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "YUfqU"
}
},
{
@ -770,9 +824,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "jupSPBuU"
"obfuscation": "WAHNZQCW"
}
},
{
@ -797,9 +851,333 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "ZZ5e4f"
"obfuscation": "iCsrlh"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-cc0156eadb58",
"choices": [
{
"delta": {
"content": ",",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "OssepLC4D5xV"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-cc0156eadb58",
"choices": [
{
"delta": {
"content": " which",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "KYxhInr"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-cc0156eadb58",
"choices": [
{
"delta": {
"content": " is",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "vsMjIHuV1f"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-cc0156eadb58",
"choices": [
{
"delta": {
"content": " the",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "a4hilLjNF"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-cc0156eadb58",
"choices": [
{
"delta": {
"content": " most",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "PSmGYjCZ"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-cc0156eadb58",
"choices": [
{
"delta": {
"content": " extensive",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "sdu"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-cc0156eadb58",
"choices": [
{
"delta": {
"content": " and",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "ruUKcQ2AO"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-cc0156eadb58",
"choices": [
{
"delta": {
"content": " visible",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "G6PXg"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-cc0156eadb58",
"choices": [
{
"delta": {
"content": " in",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "uYj8td6Yf5"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-cc0156eadb58",
"choices": [
{
"delta": {
"content": " our",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "jEDd7GvP7"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-cc0156eadb58",
"choices": [
{
"delta": {
"content": " solar",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "crXMLcu"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-cc0156eadb58",
"choices": [
{
"delta": {
"content": " system",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "8wRexB"
}
},
{
@ -824,9 +1202,9 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "GyHY0sipKZ24"
"obfuscation": "VQpIxgZRp9bP"
}
},
{
@ -851,12 +1229,13 @@
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"system_fingerprint": "fp_1827dd0c55",
"usage": null,
"obfuscation": "gMrZjfo"
"obfuscation": "1PL6rSg"
}
}
],
"is_streaming": true
}
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,894 @@
{
"test_id": "tests/integration/responses/test_basic_responses.py::test_response_non_streaming_basic[client_with_models-txt=openai/gpt-4o-saturn]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "Which planet has rings around it with a name starting with letter S?"
}
],
"stream": true,
"stream_options": {
"include_usage": true
}
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": "",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "z56q5oZHGZKdtw"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": "The",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "LmzSBsnBqMCfY"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": " planet",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "0cyvj0Txx"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": " with",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "7NDFvRwCV4H"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": " rings",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "cXFnChS2le"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": " around",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "KjBEmV3kP"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": " it",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "HRuZPETdawxRE"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": " that",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "oWgRcLBEtgJ"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": " starts",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "57Wb5VAIC"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": " with",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "PIJlSXUHBpa"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": " the",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "z1j4e36eJaU4"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": " letter",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "jDq6ugLXW"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": " \"",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Tl7ZVn6xaBP3M"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": "S",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Jgd6CpJfjXpHnNW"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": "\"",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "OYepkOsMHU8jw5"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": " is",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "6UnjEo5YWE1zb"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": " Saturn",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "ljnq8v7G7"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": ".",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "d79gDV0DqNlOO51"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": " Saturn",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "bwM1CRrwC"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": " is",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "uTygnf5A75n5i"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": " well",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "J2mPiUrRBrP"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": " known",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "ySgqipG0Qe"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": " for",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "KiyTURpCBdJM"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": " its",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "JBRXJQhWdglI"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": " prominent",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "0mjYkG"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": " and",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "hR3bcUfNkRTx"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": " extensive",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "18tm6k"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": " ring",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "ZIL4ZROICo4"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": " system",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "EVoEpQkJL"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": ".",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "cE7oSZixhWV42Kn"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "stop",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "Dl1rR45BAF"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-d058ce92a083",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": {
"completion_tokens": 29,
"prompt_tokens": 21,
"total_tokens": 50,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": "FXirZrSNkvTrASR"
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -0,0 +1,273 @@
{
"test_id": "tests/integration/responses/test_basic_responses.py::test_response_non_streaming_basic[client_with_models-txt=openai/gpt-4o-earth]",
"request": {
"method": "POST",
"url": "https://api.openai.com/v1/v1/chat/completions",
"headers": {},
"body": {
"model": "gpt-4o",
"messages": [
{
"role": "user",
"content": "Which planet do humans live on?"
}
],
"stream": true,
"stream_options": {
"include_usage": true
}
},
"endpoint": "/v1/chat/completions",
"model": "gpt-4o"
},
"response": {
"body": [
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-dca42c8244f7",
"choices": [
{
"delta": {
"content": "",
"function_call": null,
"refusal": null,
"role": "assistant",
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "uWM4CTrTBdhbyI"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-dca42c8244f7",
"choices": [
{
"delta": {
"content": "Hum",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "JuK3luIjXIAR0"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-dca42c8244f7",
"choices": [
{
"delta": {
"content": "ans",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "mKYRNnis1mCZ4"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-dca42c8244f7",
"choices": [
{
"delta": {
"content": " live",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "SrAIMLQQ8Gj"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-dca42c8244f7",
"choices": [
{
"delta": {
"content": " on",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "2fzicdQg1s2C8"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-dca42c8244f7",
"choices": [
{
"delta": {
"content": " Earth",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "saIKgW0kJK"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-dca42c8244f7",
"choices": [
{
"delta": {
"content": ".",
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": null,
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "y4Bq9fWigLsde0J"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-dca42c8244f7",
"choices": [
{
"delta": {
"content": null,
"function_call": null,
"refusal": null,
"role": null,
"tool_calls": null
},
"finish_reason": "stop",
"index": 0,
"logprobs": null
}
],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": null,
"obfuscation": "5VaLcoYLcc"
}
},
{
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
"__data__": {
"id": "rec-dca42c8244f7",
"choices": [],
"created": 0,
"model": "gpt-4o-2024-08-06",
"object": "chat.completion.chunk",
"service_tier": "default",
"system_fingerprint": "fp_cbf1785567",
"usage": {
"completion_tokens": 6,
"prompt_tokens": 14,
"total_tokens": 20,
"completion_tokens_details": {
"accepted_prediction_tokens": 0,
"audio_tokens": 0,
"reasoning_tokens": 0,
"rejected_prediction_tokens": 0
},
"prompt_tokens_details": {
"audio_tokens": 0,
"cached_tokens": 0
}
},
"obfuscation": ""
}
}
],
"is_streaming": true
},
"id_normalization_mapping": {}
}

View file

@ -43,7 +43,7 @@
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "zz0dWhiq9Lc"
"obfuscation": "zV8s7dKVA0e"
}
},
{
@ -70,7 +70,7 @@
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "apZyVste9l"
"obfuscation": "DoxBbL71uu"
}
},
{
@ -97,7 +97,7 @@
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "bKzudmyiZd"
"obfuscation": "3kPw1tJtBy"
}
},
{
@ -124,7 +124,7 @@
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "OgfYmGnf"
"obfuscation": "GYMVbcCI"
}
},
{
@ -151,7 +151,7 @@
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "nWM9P08zaL"
"obfuscation": "0alfiwg16c"
}
},
{
@ -178,7 +178,7 @@
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "gno9CMj"
"obfuscation": "Wug4Phl"
}
},
{
@ -205,7 +205,7 @@
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "iheIwJKTdAVV"
"obfuscation": "cIZ0eTIUOj4x"
}
},
{
@ -232,10 +232,11 @@
"service_tier": "default",
"system_fingerprint": "fp_f33640a400",
"usage": null,
"obfuscation": "X6N8Z47"
"obfuscation": "FbdywkT"
}
}
],
"is_streaming": true
}
},
"id_normalization_mapping": {}
}

View file

@ -23,6 +23,14 @@ def test_response_non_streaming_basic(compat_client, text_model_id, case):
assert len(output_text) > 0
assert case.expected.lower() in output_text
# Verify usage is reported
assert response.usage is not None, "Response should include usage information"
assert response.usage.input_tokens > 0, "Input tokens should be greater than 0"
assert response.usage.output_tokens > 0, "Output tokens should be greater than 0"
assert response.usage.total_tokens == response.usage.input_tokens + response.usage.output_tokens, (
"Total tokens should equal input + output tokens"
)
retrieved_response = compat_client.responses.retrieve(response_id=response.id)
assert retrieved_response.output_text == response.output_text
@ -73,6 +81,15 @@ def test_response_streaming_basic(compat_client, text_model_id, case):
assert len(output_text) > 0, "Response should have content"
assert case.expected.lower() in output_text, f"Expected '{case.expected}' in response"
# Verify usage is reported in final response
assert chunk.response.usage is not None, "Completed response should include usage information"
assert chunk.response.usage.input_tokens > 0, "Input tokens should be greater than 0"
assert chunk.response.usage.output_tokens > 0, "Output tokens should be greater than 0"
assert (
chunk.response.usage.total_tokens
== chunk.response.usage.input_tokens + chunk.response.usage.output_tokens
), "Total tokens should equal input + output tokens"
# Use validator for common checks
validator = StreamingValidator(events)
validator.assert_basic_event_sequence()

View file

@ -175,6 +175,9 @@ async def test_create_openai_response_with_string_input(openai_responses_impl, m
tools=None,
stream=True,
temperature=0.1,
stream_options={
"include_usage": True,
},
)
# Should have content part events for text streaming

View file

@ -213,7 +213,6 @@ class TestReferenceBatchesImpl:
@pytest.mark.parametrize(
"endpoint",
[
"/v1/embeddings",
"/v1/invalid/endpoint",
"",
],
@ -765,3 +764,12 @@ class TestReferenceBatchesImpl:
await asyncio.sleep(0.042) # let tasks start
assert active_batches == 2, f"Expected 2 active batches, got {active_batches}"
async def test_create_batch_embeddings_endpoint(self, provider):
"""Test that batch creation succeeds with embeddings endpoint."""
batch = await provider.create_batch(
input_file_id="file_123",
endpoint="/v1/embeddings",
completion_window="24h",
)
assert batch.endpoint == "/v1/embeddings"

View file

@ -122,7 +122,7 @@ def mock_impls():
@pytest.fixture
def scope_middleware_with_mocks(mock_auth_endpoint):
def middleware_with_mocks(mock_auth_endpoint):
"""Create AuthenticationMiddleware with mocked route implementations"""
mock_app = AsyncMock()
auth_config = AuthenticationConfig(
@ -137,18 +137,20 @@ def scope_middleware_with_mocks(mock_auth_endpoint):
# Mock the route_impls to simulate finding routes with required scopes
from llama_stack.schema_utils import WebMethod
scoped_webmethod = WebMethod(route="/test/scoped", method="POST", required_scope="test.read")
public_webmethod = WebMethod(route="/test/public", method="GET")
routes = {
("POST", "/test/scoped"): WebMethod(route="/test/scoped", method="POST", required_scope="test.read"),
("GET", "/test/public"): WebMethod(route="/test/public", method="GET"),
("GET", "/health"): WebMethod(route="/health", method="GET", require_authentication=False),
("GET", "/version"): WebMethod(route="/version", method="GET", require_authentication=False),
("GET", "/models/list"): WebMethod(route="/models/list", method="GET", require_authentication=True),
}
# Mock the route finding logic
def mock_find_matching_route(method, path, route_impls):
if method == "POST" and path == "/test/scoped":
return None, {}, "/test/scoped", scoped_webmethod
elif method == "GET" and path == "/test/public":
return None, {}, "/test/public", public_webmethod
else:
raise ValueError("No matching route")
webmethod = routes.get((method, path))
if webmethod:
return None, {}, path, webmethod
raise ValueError("No matching route")
import llama_stack.core.server.auth
@ -659,9 +661,9 @@ def test_valid_introspection_with_custom_mapping_authentication(
# Scope-based authorization tests
@patch("httpx.AsyncClient.post", new=mock_post_success_with_scope)
async def test_scope_authorization_success(scope_middleware_with_mocks, valid_api_key):
async def test_scope_authorization_success(middleware_with_mocks, valid_api_key):
"""Test that user with required scope can access protected endpoint"""
middleware, mock_app = scope_middleware_with_mocks
middleware, mock_app = middleware_with_mocks
mock_receive = AsyncMock()
mock_send = AsyncMock()
@ -680,9 +682,9 @@ async def test_scope_authorization_success(scope_middleware_with_mocks, valid_ap
@patch("httpx.AsyncClient.post", new=mock_post_success_no_scope)
async def test_scope_authorization_denied(scope_middleware_with_mocks, valid_api_key):
async def test_scope_authorization_denied(middleware_with_mocks, valid_api_key):
"""Test that user without required scope gets 403 access denied"""
middleware, mock_app = scope_middleware_with_mocks
middleware, mock_app = middleware_with_mocks
mock_receive = AsyncMock()
mock_send = AsyncMock()
@ -710,9 +712,9 @@ async def test_scope_authorization_denied(scope_middleware_with_mocks, valid_api
@patch("httpx.AsyncClient.post", new=mock_post_success_no_scope)
async def test_public_endpoint_no_scope_required(scope_middleware_with_mocks, valid_api_key):
async def test_public_endpoint_no_scope_required(middleware_with_mocks, valid_api_key):
"""Test that public endpoints work without specific scopes"""
middleware, mock_app = scope_middleware_with_mocks
middleware, mock_app = middleware_with_mocks
mock_receive = AsyncMock()
mock_send = AsyncMock()
@ -730,9 +732,9 @@ async def test_public_endpoint_no_scope_required(scope_middleware_with_mocks, va
mock_send.assert_not_called()
async def test_scope_authorization_no_auth_disabled(scope_middleware_with_mocks):
async def test_scope_authorization_no_auth_disabled(middleware_with_mocks):
"""Test that when auth is disabled (no user), scope checks are bypassed"""
middleware, mock_app = scope_middleware_with_mocks
middleware, mock_app = middleware_with_mocks
mock_receive = AsyncMock()
mock_send = AsyncMock()
@ -907,3 +909,41 @@ def test_kubernetes_auth_request_payload(kubernetes_auth_client, valid_token, mo
request_body = call_args[1]["json"]
assert request_body["apiVersion"] == "authentication.k8s.io/v1"
assert request_body["kind"] == "SelfSubjectReview"
async def test_unauthenticated_endpoint_access_health(middleware_with_mocks):
"""Test that /health endpoints can be accessed without authentication"""
middleware, mock_app = middleware_with_mocks
# Test request to /health without auth header (level prefix v1 is added by router)
scope = {"type": "http", "path": "/health", "headers": [], "method": "GET"}
receive = AsyncMock()
send = AsyncMock()
# Should allow the request to proceed without authentication
await middleware(scope, receive, send)
# Verify that the request was passed to the app
mock_app.assert_called_once_with(scope, receive, send)
# Verify that no error response was sent
assert not any(call[0][0].get("status") == 401 for call in send.call_args_list)
async def test_unauthenticated_endpoint_denied_for_other_paths(middleware_with_mocks):
"""Test that endpoints other than /health and /version require authentication"""
middleware, mock_app = middleware_with_mocks
# Test request to /models/list without auth header
scope = {"type": "http", "path": "/models/list", "headers": [], "method": "GET"}
receive = AsyncMock()
send = AsyncMock()
# Should return 401 error
await middleware(scope, receive, send)
# Verify that the app was NOT called
mock_app.assert_not_called()
# Verify that a 401 error response was sent
assert any(call[0][0].get("status") == 401 for call in send.call_args_list)