mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-15 14:43:48 +00:00
88 lines
2.6 KiB
JSON
88 lines
2.6 KiB
JSON
{
|
|
"test_id": "tests/integration/agents/test_agents.py::test_create_turn_response[ollama/llama3.2:3b-instruct-fp16-client_tools0]",
|
|
"request": {
|
|
"method": "POST",
|
|
"url": "http://0.0.0.0:11434/v1/v1/chat/completions",
|
|
"headers": {},
|
|
"body": {
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"messages": [
|
|
{
|
|
"role": "system",
|
|
"content": "You are a helpful assistant"
|
|
},
|
|
{
|
|
"role": "user",
|
|
"content": "Call get_boiling_point tool and answer What is the boiling point of polyjuice?"
|
|
},
|
|
{
|
|
"role": "assistant",
|
|
"content": "",
|
|
"tool_calls": [
|
|
{
|
|
"id": "call_vrp71z1i",
|
|
"type": "function",
|
|
"function": {
|
|
"name": "get_boiling_point",
|
|
"arguments": "{}"
|
|
}
|
|
}
|
|
]
|
|
},
|
|
{
|
|
"role": "tool",
|
|
"tool_call_id": "call_vrp71z1i",
|
|
"content": "Error when running tool: get_boiling_point() missing 1 required positional argument: 'liquid_name'"
|
|
}
|
|
],
|
|
"max_tokens": 512,
|
|
"stream": true,
|
|
"temperature": 0.0001,
|
|
"tool_choice": "auto",
|
|
"tools": [
|
|
{
|
|
"type": "function",
|
|
"function": {
|
|
"name": "get_boiling_point",
|
|
"description": "Returns the boiling point of a liquid in Celcius or Fahrenheit."
|
|
}
|
|
}
|
|
],
|
|
"top_p": 0.9,
|
|
"extra_body": {}
|
|
},
|
|
"endpoint": "/v1/chat/completions",
|
|
"model": "llama3.2:3b-instruct-fp16"
|
|
},
|
|
"response": {
|
|
"body": [
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "rec-d2f5f3e7ffe1",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": "I was unable to find the boiling point for polyjuice. The tool requires a liquid name as an argument, but I couldn't find any information on the boiling point of polyjuice. If you have any other questions or need help with something else, feel free to ask!",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": "stop",
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 0,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
}
|
|
],
|
|
"is_streaming": true
|
|
}
|
|
}
|