mirror of
				https://github.com/meta-llama/llama-stack.git
				synced 2025-10-25 17:11:12 +00:00 
			
		
		
		
	# Problem The current inline provider appends the user provided instructions to messages as a system prompt, but the returned response object does not contain the instructions field (as specified in the OpenAI responses spec). # What does this PR do? This pull request adds the instruction field to the response object definition and updates the inline provider. It also ensures that instructions from previous response is not carried over to the next response (as specified in the openAI spec). Closes #[3566](https://github.com/llamastack/llama-stack/issues/3566) ## Test Plan - Tested manually for change in model response w.r.t supplied instructions field. - Added unit test to check that the instructions from previous response is not carried over to the next response. - Added integration tests to check instructions parameter in the returned response object. - Added new recordings for the integration tests. --------- Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
		
			
				
	
	
		
			888 lines
		
	
	
	
		
			25 KiB
		
	
	
	
		
			JSON
		
	
	
	
	
		
			Generated
		
	
	
			
		
		
	
	
			888 lines
		
	
	
	
		
			25 KiB
		
	
	
	
		
			JSON
		
	
	
	
	
		
			Generated
		
	
	
| {
 | |
|   "test_id": "tests/integration/agents/test_openai_responses.py::test_response_with_instructions[txt=ollama/llama3.2:3b-instruct-fp16]",
 | |
|   "request": {
 | |
|     "method": "POST",
 | |
|     "url": "http://0.0.0.0:11434/v1/v1/chat/completions",
 | |
|     "headers": {},
 | |
|     "body": {
 | |
|       "model": "llama3.2:3b-instruct-fp16",
 | |
|       "messages": [
 | |
|         {
 | |
|           "role": "system",
 | |
|           "content": "You are a helpful assistant and speak in pirate language."
 | |
|         },
 | |
|         {
 | |
|           "role": "user",
 | |
|           "content": "What is the capital of France?"
 | |
|         },
 | |
|         {
 | |
|           "role": "assistant",
 | |
|           "content": "The capital of France is Paris."
 | |
|         }
 | |
|       ],
 | |
|       "stream": true,
 | |
|       "stream_options": {
 | |
|         "include_usage": true
 | |
|       }
 | |
|     },
 | |
|     "endpoint": "/v1/chat/completions",
 | |
|     "model": "llama3.2:3b-instruct-fp16"
 | |
|   },
 | |
|   "response": {
 | |
|     "body": [
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": " Yer",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": " look",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": "in",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": "'",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": " fer",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": " a",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": " port",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": " o",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": "'",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": " call",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": ",",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": " eh",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": "?",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": " That",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": " be",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": " the",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": " one",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": "!",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": " Yer",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": " won",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": "'t",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": " go",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": " astr",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": "ay",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": " with",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": " that",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": " answer",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": ",",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": " mate",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": "y",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": ".",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": null,
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [
 | |
|             {
 | |
|               "delta": {
 | |
|                 "content": "",
 | |
|                 "function_call": null,
 | |
|                 "refusal": null,
 | |
|                 "role": "assistant",
 | |
|                 "tool_calls": null
 | |
|               },
 | |
|               "finish_reason": "stop",
 | |
|               "index": 0,
 | |
|               "logprobs": null
 | |
|             }
 | |
|           ],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": null
 | |
|         }
 | |
|       },
 | |
|       {
 | |
|         "__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
 | |
|         "__data__": {
 | |
|           "id": "rec-0f5443c07d15",
 | |
|           "choices": [],
 | |
|           "created": 0,
 | |
|           "model": "llama3.2:3b-instruct-fp16",
 | |
|           "object": "chat.completion.chunk",
 | |
|           "service_tier": null,
 | |
|           "system_fingerprint": "fp_ollama",
 | |
|           "usage": {
 | |
|             "completion_tokens": 32,
 | |
|             "prompt_tokens": 50,
 | |
|             "total_tokens": 82,
 | |
|             "completion_tokens_details": null,
 | |
|             "prompt_tokens_details": null
 | |
|           }
 | |
|         }
 | |
|       }
 | |
|     ],
 | |
|     "is_streaming": true
 | |
|   },
 | |
|   "id_normalization_mapping": {}
 | |
| }
 |