mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-15 22:18:00 +00:00
Some checks failed
Integration Tests (Replay) / discover-tests (push) Successful in 3s
Test External Providers Installed via Module / test-external-providers-from-module (venv) (push) Has been skipped
Integration Auth Tests / test-matrix (oauth2_token) (push) Failing after 9s
Python Package Build Test / build (3.12) (push) Failing after 4s
Vector IO Integration Tests / test-matrix (3.12, inline::milvus) (push) Failing after 12s
Test Llama Stack Build / generate-matrix (push) Successful in 11s
Test Llama Stack Build / build-ubi9-container-distribution (push) Failing after 12s
Vector IO Integration Tests / test-matrix (3.12, inline::faiss) (push) Failing after 14s
SqlStore Integration Tests / test-postgres (3.12) (push) Failing after 22s
Test External API and Providers / test-external (venv) (push) Failing after 14s
Integration Tests (Replay) / Integration Tests (, , , client=, vision=) (push) Failing after 12s
Vector IO Integration Tests / test-matrix (3.12, remote::pgvector) (push) Failing after 15s
SqlStore Integration Tests / test-postgres (3.13) (push) Failing after 22s
Test Llama Stack Build / build-custom-container-distribution (push) Failing after 14s
Unit Tests / unit-tests (3.13) (push) Failing after 14s
Test Llama Stack Build / build-single-provider (push) Failing after 13s
Vector IO Integration Tests / test-matrix (3.12, remote::chromadb) (push) Failing after 18s
Unit Tests / unit-tests (3.12) (push) Failing after 16s
Vector IO Integration Tests / test-matrix (3.12, remote::qdrant) (push) Failing after 18s
Vector IO Integration Tests / test-matrix (3.13, remote::weaviate) (push) Failing after 10s
Vector IO Integration Tests / test-matrix (3.13, inline::faiss) (push) Failing after 11s
Vector IO Integration Tests / test-matrix (3.12, remote::weaviate) (push) Failing after 16s
Vector IO Integration Tests / test-matrix (3.13, remote::qdrant) (push) Failing after 18s
Test Llama Stack Build / build (push) Failing after 12s
Vector IO Integration Tests / test-matrix (3.13, remote::chromadb) (push) Failing after 18s
Vector IO Integration Tests / test-matrix (3.13, remote::pgvector) (push) Failing after 20s
Vector IO Integration Tests / test-matrix (3.13, inline::sqlite-vec) (push) Failing after 16s
Python Package Build Test / build (3.13) (push) Failing after 53s
Vector IO Integration Tests / test-matrix (3.13, inline::milvus) (push) Failing after 59s
Vector IO Integration Tests / test-matrix (3.12, inline::sqlite-vec) (push) Failing after 1m1s
Update ReadTheDocs / update-readthedocs (push) Failing after 1m6s
Pre-commit / pre-commit (push) Successful in 1m53s
A bunch of miscellaneous cleanup focusing on tests, but ended up speeding up starter distro substantially. - Pulled llama stack client init for tests into `pytest_sessionstart` so it does not clobber output - Profiling of that told me where we were doing lots of heavy imports for starter, so lazied them - starter now starts 20seconds+ faster on my Mac - A few other smallish refactors for `compat_client`
674 lines
19 KiB
JSON
674 lines
19 KiB
JSON
{
|
|
"request": {
|
|
"method": "POST",
|
|
"url": "http://localhost:11434/v1/chat/completions",
|
|
"headers": {},
|
|
"body": {
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"messages": [
|
|
{
|
|
"role": "user",
|
|
"content": "Hello, world!"
|
|
}
|
|
],
|
|
"stream": true
|
|
},
|
|
"endpoint": "/chat/completions",
|
|
"model": "llama3.2:3b-instruct-fp16"
|
|
},
|
|
"response": {
|
|
"body": [
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "chatcmpl-698",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": "Hello",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 1754422171,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "chatcmpl-698",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": "!",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 1754422171,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "chatcmpl-698",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " It",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 1754422171,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "chatcmpl-698",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": "'s",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 1754422171,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "chatcmpl-698",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " nice",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 1754422171,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "chatcmpl-698",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " to",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 1754422171,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "chatcmpl-698",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " meet",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 1754422171,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "chatcmpl-698",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " you",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 1754422171,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "chatcmpl-698",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": ".",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 1754422171,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "chatcmpl-698",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " Is",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 1754422171,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "chatcmpl-698",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " there",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 1754422171,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "chatcmpl-698",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " something",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 1754422171,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "chatcmpl-698",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " I",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 1754422171,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "chatcmpl-698",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " can",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 1754422171,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "chatcmpl-698",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " help",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 1754422172,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "chatcmpl-698",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " you",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 1754422172,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "chatcmpl-698",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " with",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 1754422172,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "chatcmpl-698",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " or",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 1754422172,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "chatcmpl-698",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " would",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 1754422172,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "chatcmpl-698",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " you",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 1754422172,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "chatcmpl-698",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " like",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 1754422172,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "chatcmpl-698",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " to",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 1754422172,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "chatcmpl-698",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": " chat",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 1754422172,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "chatcmpl-698",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": "?",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": null,
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 1754422172,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
},
|
|
{
|
|
"__type__": "openai.types.chat.chat_completion_chunk.ChatCompletionChunk",
|
|
"__data__": {
|
|
"id": "chatcmpl-698",
|
|
"choices": [
|
|
{
|
|
"delta": {
|
|
"content": "",
|
|
"function_call": null,
|
|
"refusal": null,
|
|
"role": "assistant",
|
|
"tool_calls": null
|
|
},
|
|
"finish_reason": "stop",
|
|
"index": 0,
|
|
"logprobs": null
|
|
}
|
|
],
|
|
"created": 1754422172,
|
|
"model": "llama3.2:3b-instruct-fp16",
|
|
"object": "chat.completion.chunk",
|
|
"service_tier": null,
|
|
"system_fingerprint": "fp_ollama",
|
|
"usage": null
|
|
}
|
|
}
|
|
],
|
|
"is_streaming": true
|
|
}
|
|
}
|