mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-16 06:27:58 +00:00
39 lines
1.3 KiB
JSON
39 lines
1.3 KiB
JSON
{
|
|
"request": {
|
|
"method": "POST",
|
|
"url": "http://localhost:11434/api/generate",
|
|
"headers": {},
|
|
"body": {
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"raw": true,
|
|
"prompt": "<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n<|eot_id|><|start_header_id|>user<|end_header_id|>\n\nTest trace 1<|eot_id|><|start_header_id|>assistant<|end_header_id|>\n\n",
|
|
"options": {
|
|
"temperature": 0.0
|
|
},
|
|
"stream": false
|
|
},
|
|
"endpoint": "/api/generate",
|
|
"model": "llama3.2:3b-instruct-fp16"
|
|
},
|
|
"response": {
|
|
"body": {
|
|
"__type__": "ollama._types.GenerateResponse",
|
|
"__data__": {
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"created_at": "2025-08-01T22:51:13.528654496Z",
|
|
"done": true,
|
|
"done_reason": "stop",
|
|
"total_duration": 9655114226,
|
|
"load_duration": 43539189,
|
|
"prompt_eval_count": 20,
|
|
"prompt_eval_duration": 545833813,
|
|
"eval_count": 51,
|
|
"eval_duration": 9065243767,
|
|
"response": "It seems like you're trying to test the system, but I'm not sure what specific functionality or feature you'd like to test. Could you please provide more context or clarify what you're looking for? I'll do my best to assist you!",
|
|
"thinking": null,
|
|
"context": null
|
|
}
|
|
},
|
|
"is_streaming": false
|
|
}
|
|
}
|