mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-12 04:00:42 +00:00
chunk_id in the Chunk class executes actual logic to compute a chunk ID. This sort of logic should not live in the API spec. Instead, the providers should be in charge of calling generate_chunk_id, and pass it to `Chunk`. this removes the incorrect dependency between Provider impl and API impl Signed-off-by: Charlie Doern <cdoern@redhat.com>
767 lines
25 KiB
JSON
Generated
767 lines
25 KiB
JSON
Generated
{
|
|
"test_id": "tests/integration/responses/test_tool_responses.py::test_response_non_streaming_file_search[openai_client-txt=openai/gpt-4o:emb=sentence-transformers/nomic-ai/nomic-embed-text-v1.5:dim=768-llama_experts_pdf]",
|
|
"request": {
|
|
"method": "POST",
|
|
"url": "https://api.openai.com/v1/v1/chat/completions",
|
|
"headers": {},
|
|
"body": {
|
|
"model": "gpt-4o",
|
|
"messages": [
|
|
{
|
|
"role": "user",
|
|
"content": "How many experts does the Llama 4 Maverick model have?"
|
|
},
|
|
{
|
|
"role": "assistant",
|
|
"content": "",
|
|
"tool_calls": [
|
|
{
|
|
"index": 0,
|
|
"id": "call_M8gyYiB39MwYdJKc4aHIGbfA",
|
|
"type": "function",
|
|
"function": {
|
|
"name": "knowledge_search",
|
|
"arguments": "{\"query\":\"Llama 4 Maverick model number of experts\"}"
|
|
}
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"role": "tool",
|
|
"tool_call_id": "call_M8gyYiB39MwYdJKc4aHIGbfA",
|
|
"content": [
|
|
{
|
|
"type": "text",
|
|
"text": "knowledge_search tool found 2 chunks:\nBEGIN of knowledge_search tool results.\n"
|
|
},
|
|
{
|
|
"type": "text",
|
|
"text": "[1] document_id: file-379221123213, score: 0.008294223715346738, attributes: {'filename': 'llama_stack_and_models.pdf', 'chunk_id': 'c3556aea-3b73-0278-aa16-ebbdb4c18b18', 'document_id': 'file-379221123213', 'token_count': 98.0, 'metadata_token_count': 11.0} (cite as <|file-379221123213|>)\n, \nhardware\n \nvendors,\n \nand\n \nAI-focused\n \ncompanies)\n \nthat\n \noffer\n \ntailored\n \ninfrastructure,\n \nsoftware,\n \nand\n \nservices\n \nfor\n \ndeploying\n \nLlama\n \nmodels.\n \nLlama 4 Maverick \n Llama 4 Maverick is a Mixture-of-Experts (MoE) model with 17 billion active parameters and 128 experts. \n"
|
|
},
|
|
{
|
|
"type": "text",
|
|
"text": "[2] document_id: file-379221123213, score: 0.0033899213359898477, attributes: {'filename': 'llama_stack_and_models.pdf', 'chunk_id': '16d99c69-8323-27ce-3bd7-7b51dcac2735', 'document_id': 'file-379221123213', 'token_count': 498.0, 'metadata_token_count': 11.0} (cite as <|file-379221123213|>)\nLlama Stack \nLlama Stack Overview \nLlama Stack standardizes the core building blocks that simplify AI application development. It codifies best \npractices\n \nacross\n \nthe\n \nLlama\n \necosystem.\n \nMore\n \nspecifically,\n \nit\n \nprovides\n \u25cf Unified API layer for Inference, RAG, Agents, Tools, Safety, Evals, and Telemetry. \u25cf Plugin architecture to support the rich ecosystem of different API implementations in various \nenvironments,\n \nincluding\n \nlocal\n \ndevelopment,\n \non-premises,\n \ncloud,\n \nand\n \nmobile.\n \u25cf Prepackaged verified distributions which offer a one-stop solution for developers to get started quickly \nand\n \nreliably\n \nin\n \nany\n \nenvironment.\n \u25cf Multiple developer interfaces like CLI and SDKs for Python, Typescript, iOS, and Android. \u25cf Standalone applications as examples for how to build production-grade AI applications with Llama \nStack.\n \nLlama Stack Benefits \n\u25cf Flexible Options: Developers can choose their preferred infrastructure without changing APIs and enjoy \nflexible\n \ndeployment\n \nchoices.\n \u25cf Consistent Experience: With its unified APIs, Llama Stack makes it easier to build, test, and deploy AI \napplications\n \nwith\n \nconsistent\n \napplication\n \nbehavior.\n \u25cf Robust Ecosystem: Llama Stack is already integrated with distribution partners (cloud providers, \nhardware\n \nvendors,\n \nand\n \nAI-focused\n \ncompanies)\n \nthat\n \noffer\n \ntailored\n \ninfrastructure,\n \nsoftware,\n \nand\n \nservices\n \nfor\n \ndeploying\n \nLlama\n \nmodels.\n \nLlama 4 Maverick \n Llama 4 Maverick is a Mixture-of-Experts (MoE) model with 17 billion active parameters and 128 experts. \n"
|
|
},
|
|
{
|
|
"type": "text",
|
|
"text": "END of knowledge_search tool results.\n"
|
|
},
|
|
{
|
|
"type": "text",
|
|
"text": "The above results were retrieved to help answer the user's query: \"Llama 4 Maverick model number of experts\". Use them as supporting information only in answering this query. Cite sources immediately at the end of sentences before punctuation, using `<|file-id|>` format (e.g., 'This is a fact <|file-Cn3MSNn72ENTiiq11Qda4A|>.'). Do not add extra punctuation. Use only the file IDs provided (do not invent new ones).\n"
|
|
}
|
|
]
|
|
}
|
|
],
|
|
"stream": true,
|
|
"stream_options": {
|
|
"include_usage": true
|
|
},
|
|
"tools": [
|
|
{
|
|
"type": "function",
|
|
"function": {
|
|
"name": "knowledge_search",
|
|
"description": "Search for information in a database.",
|
|
"parameters": {
|
|
"type": "object",
|
|
"properties": {
|
|
"query": {
|
|
"type": "string",
|
|
"description": "The query to search for. Can be a natural language sentence or keywords."
|
|
}
|
|
},
|
|
"required": [
|
|
"query"
|
|
]
|
|
}
|
|
}
|
|
}
|
|
]
|
|
},
|
|
"endpoint": "/v1/chat/completions",
|
|
"model": "gpt-4o"
|
|
},
|
|
"response": {
|
|
"body": [
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-40985d2e0ff8",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": "",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_a788c5aef0",
|
|
"usage": null,
|
|
"obfuscation": "SH6nRcfXzd8qPg"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-40985d2e0ff8",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": "The",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_a788c5aef0",
|
|
"usage": null,
|
|
"obfuscation": "vbJu1mhpQKtNr"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-40985d2e0ff8",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " L",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_a788c5aef0",
|
|
"usage": null,
|
|
"obfuscation": "uAUiYAVpMW8Ph9"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-40985d2e0ff8",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": "lama",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_a788c5aef0",
|
|
"usage": null,
|
|
"obfuscation": "DJxjs1HFugOD"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-40985d2e0ff8",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " ",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_a788c5aef0",
|
|
"usage": null,
|
|
"obfuscation": "sU2IncrauGmuYki"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-40985d2e0ff8",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": "4",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_a788c5aef0",
|
|
"usage": null,
|
|
"obfuscation": "IkZbrWS45cqkmqi"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-40985d2e0ff8",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " Maver",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_a788c5aef0",
|
|
"usage": null,
|
|
"obfuscation": "YbZYhGgoGE"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-40985d2e0ff8",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": "ick",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_a788c5aef0",
|
|
"usage": null,
|
|
"obfuscation": "7FtHnapGtkc09"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-40985d2e0ff8",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " model",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_a788c5aef0",
|
|
"usage": null,
|
|
"obfuscation": "8P3mUr7HfV"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-40985d2e0ff8",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " has",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_a788c5aef0",
|
|
"usage": null,
|
|
"obfuscation": "WxYXJUfkyxqZ"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-40985d2e0ff8",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " ",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_a788c5aef0",
|
|
"usage": null,
|
|
"obfuscation": "E9hIXNC7oeJcZ8v"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-40985d2e0ff8",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": "128",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_a788c5aef0",
|
|
"usage": null,
|
|
"obfuscation": "L9ww7cI1pSSt3"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-40985d2e0ff8",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " experts",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_a788c5aef0",
|
|
"usage": null,
|
|
"obfuscation": "hHao5x7a"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-40985d2e0ff8",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " <",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_a788c5aef0",
|
|
"usage": null,
|
|
"obfuscation": "0cwygEJttBgv7M"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-40985d2e0ff8",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": "|",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_a788c5aef0",
|
|
"usage": null,
|
|
"obfuscation": "KYVCnE5AA6MnQ0Y"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-40985d2e0ff8",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": "file",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_a788c5aef0",
|
|
"usage": null,
|
|
"obfuscation": "N3DcYBcrQDzD"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-40985d2e0ff8",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": "-",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_a788c5aef0",
|
|
"usage": null,
|
|
"obfuscation": "CUpjI7Qo17k4aeo"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-40985d2e0ff8",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": "379",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_a788c5aef0",
|
|
"usage": null,
|
|
"obfuscation": "s1694CAHwowUf"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-40985d2e0ff8",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": "221",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_a788c5aef0",
|
|
"usage": null,
|
|
"obfuscation": "I94vCKkpQNsx6"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-40985d2e0ff8",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": "123",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_a788c5aef0",
|
|
"usage": null,
|
|
"obfuscation": "RNfAfPtJK3KHE"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-40985d2e0ff8",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": "213",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_a788c5aef0",
|
|
"usage": null,
|
|
"obfuscation": "Gk04vo9RXpl3P"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-40985d2e0ff8",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": "|",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_a788c5aef0",
|
|
"usage": null,
|
|
"obfuscation": "rkWPIUdNABAeP7V"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-40985d2e0ff8",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": ">.",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_a788c5aef0",
|
|
"usage": null,
|
|
"obfuscation": "GIF1vPXxInWrhl"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-40985d2e0ff8",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": null,
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": null,
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": "stop",
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_a788c5aef0",
|
|
"usage": null,
|
|
"obfuscation": "Oa1imYdRme"
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-40985d2e0ff8",
|
|
"choices": [],
|
|
"created": 0,
|
|
"model": "gpt-4o-2024-08-06",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": "default",
|
|
"system_fingerprint": "fp_a788c5aef0",
|
|
"usage": {
|
|
"completion_tokens": 23,
|
|
"prompt_tokens": 1048,
|
|
"total_tokens": 1071,
|
|
"completion_tokens_details": {
|
|
"accepted_prediction_tokens": 0,
|
|
"audio_tokens": 0,
|
|
"reasoning_tokens": 0,
|
|
"rejected_prediction_tokens": 0
|
|
},
|
|
"prompt_tokens_details": {
|
|
"audio_tokens": 0,
|
|
"cached_tokens": 0
|
|
}
|
|
},
|
|
"obfuscation": "0Xx3txQF13S"
|
|
}
|
|
}
|
|
],
|
|
"is_streaming": true
|
|
},
|
|
"id_normalization_mapping": {}
|
|
}
|