mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-17 07:07:19 +00:00
This PR updates the Conversation item related types and improves a couple critical parts of the implemenation: - it creates a streaming output item for the final assistant message output by the model. until now we only added content parts and included that message in the final response. - rewrites the conversation update code completely to account for items other than messages (tool calls, outputs, etc.) ## Test Plan Used the test script from https://github.com/llamastack/llama-stack-client-python/pull/281 for this ``` TEST_API_BASE_URL=http://localhost:8321/v1 \ pytest tests/integration/test_agent_turn_step_events.py::test_client_side_function_tool -xvs ```
335 lines
9.6 KiB
JSON
Generated
335 lines
9.6 KiB
JSON
Generated
{
|
|
"test_id": "tests/integration/responses/test_basic_responses.py::test_response_non_streaming_basic[openai_client-txt=openai/gpt-4o-earth]",
|
|
"request": {
|
|
"method": "POST",
|
|
"url": "https://api.openai.com/v1/v1/chat/completions",
|
|
"headers": {},
|
|
"body": {
|
|
"model": "gpt-4o",
|
|
"messages": [
|
|
{
|
|
"role": "user",
|
|
"content": "Which planet do humans live on?"
|
|
},
|
|
{
|
|
"role": "assistant",
|
|
"content": "Humans live on Earth."
|
|
},
|
|
{
|
|
"role": "user",
|
|
"content": "Repeat your previous response in all caps."
|
|
}
|
|
],
|
|
"stream": true,
|
|
"stream_options": {
|
|
"include_usage": true
|
|
}
|
|
},
|
|
"endpoint": "/v1/chat/completions",
|
|
"model": "gpt-4o"
|
|
},
|
|
"response": {
|
|
"body": [
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-ed2ba45a1d4e",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": "",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_cbf1785567",
|
|
"usage": null,
|
|
"obfuscation": "gsvovaVxgEcSGR"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-ed2ba45a1d4e",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": "H",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_cbf1785567",
|
|
"usage": null,
|
|
"obfuscation": "smX3x5kV4yv0BZT"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-ed2ba45a1d4e",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": "UM",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_cbf1785567",
|
|
"usage": null,
|
|
"obfuscation": "2pezTfOmCCkYVy"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-ed2ba45a1d4e",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": "ANS",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_cbf1785567",
|
|
"usage": null,
|
|
"obfuscation": "7lsjSbNCYQzTK"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-ed2ba45a1d4e",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " LIVE",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_cbf1785567",
|
|
"usage": null,
|
|
"obfuscation": "F3Ts19rWcVv"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-ed2ba45a1d4e",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " ON",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_cbf1785567",
|
|
"usage": null,
|
|
"obfuscation": "101uIiiCiEkpW"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-ed2ba45a1d4e",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " EAR",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_cbf1785567",
|
|
"usage": null,
|
|
"obfuscation": "jXyuj2mY6T3M"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-ed2ba45a1d4e",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": "TH",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_cbf1785567",
|
|
"usage": null,
|
|
"obfuscation": "Iv4hhrdVc7QeHu"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-ed2ba45a1d4e",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": ".",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_cbf1785567",
|
|
"usage": null,
|
|
"obfuscation": "M8movWgekBxUGG8"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-ed2ba45a1d4e",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": null,
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": "stop",
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_cbf1785567",
|
|
"usage": null,
|
|
"obfuscation": "1yZ9FXjfa2"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-ed2ba45a1d4e",
|
|
"choices": [],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_cbf1785567",
|
|
"usage": {
|
|
"completion_tokens": 8,
|
|
"prompt_tokens": 36,
|
|
"total_tokens": 44,
|
|
"completion_tokens_details": {
|
|
"accepted_prediction_tokens": 0,
|
|
"audio_tokens": 0,
|
|
"reasoning_tokens": 0,
|
|
"rejected_prediction_tokens": 0
|
|
},
|
|
"prompt_tokens_details": {
|
|
"audio_tokens": 0,
|
|
"cached_tokens": 0
|
|
}
|
|
},
|
|
"obfuscation": ""
|
|
}
|
|
}
|
|
],
|
|
"is_streaming": true
|
|
},
|
|
"id_normalization_mapping": {}
|
|
}
|