mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-06-28 02:53:30 +00:00
fix: Gaps in doc codegen (#1035)
# What does this PR do? Catches docs up to source with: ``` python llama_stack/scripts/distro_codegen.py ``` [//]: # (If resolving an issue, uncomment and update the line below) [//]: # (Closes #[issue-number]) ## Test Plan [Describe the tests you ran to verify your changes with result summaries. *Provide clear instructions so the plan can be easily re-executed.*] Manually checked ``` sphinx-autobuild docs/source build/html ``` [//]: # (## Documentation) [//]: # (- [ ] Added a Changelog entry if the change is significant)
This commit is contained in:
parent
371f11a569
commit
ab9516c789
27 changed files with 146 additions and 100 deletions
|
@ -66,6 +66,40 @@
|
||||||
"sentence-transformers --no-deps",
|
"sentence-transformers --no-deps",
|
||||||
"torch torchvision --index-url https://download.pytorch.org/whl/cpu"
|
"torch torchvision --index-url https://download.pytorch.org/whl/cpu"
|
||||||
],
|
],
|
||||||
|
"dell": [
|
||||||
|
"aiohttp",
|
||||||
|
"aiosqlite",
|
||||||
|
"autoevals",
|
||||||
|
"blobfile",
|
||||||
|
"chardet",
|
||||||
|
"chromadb-client",
|
||||||
|
"datasets",
|
||||||
|
"faiss-cpu",
|
||||||
|
"fastapi",
|
||||||
|
"fire",
|
||||||
|
"httpx",
|
||||||
|
"huggingface_hub",
|
||||||
|
"matplotlib",
|
||||||
|
"nltk",
|
||||||
|
"numpy",
|
||||||
|
"openai",
|
||||||
|
"opentelemetry-exporter-otlp-proto-http",
|
||||||
|
"opentelemetry-sdk",
|
||||||
|
"pandas",
|
||||||
|
"pillow",
|
||||||
|
"psycopg2-binary",
|
||||||
|
"pypdf",
|
||||||
|
"redis",
|
||||||
|
"requests",
|
||||||
|
"scikit-learn",
|
||||||
|
"scipy",
|
||||||
|
"sentencepiece",
|
||||||
|
"tqdm",
|
||||||
|
"transformers",
|
||||||
|
"uvicorn",
|
||||||
|
"sentence-transformers --no-deps",
|
||||||
|
"torch torchvision --index-url https://download.pytorch.org/whl/cpu"
|
||||||
|
],
|
||||||
"fireworks": [
|
"fireworks": [
|
||||||
"aiosqlite",
|
"aiosqlite",
|
||||||
"autoevals",
|
"autoevals",
|
||||||
|
@ -252,6 +286,38 @@
|
||||||
"sentence-transformers --no-deps",
|
"sentence-transformers --no-deps",
|
||||||
"torch torchvision --index-url https://download.pytorch.org/whl/cpu"
|
"torch torchvision --index-url https://download.pytorch.org/whl/cpu"
|
||||||
],
|
],
|
||||||
|
"nvidia": [
|
||||||
|
"aiosqlite",
|
||||||
|
"autoevals",
|
||||||
|
"blobfile",
|
||||||
|
"chardet",
|
||||||
|
"datasets",
|
||||||
|
"faiss-cpu",
|
||||||
|
"fastapi",
|
||||||
|
"fire",
|
||||||
|
"httpx",
|
||||||
|
"matplotlib",
|
||||||
|
"mcp",
|
||||||
|
"nltk",
|
||||||
|
"numpy",
|
||||||
|
"openai",
|
||||||
|
"opentelemetry-exporter-otlp-proto-http",
|
||||||
|
"opentelemetry-sdk",
|
||||||
|
"pandas",
|
||||||
|
"pillow",
|
||||||
|
"psycopg2-binary",
|
||||||
|
"pypdf",
|
||||||
|
"redis",
|
||||||
|
"requests",
|
||||||
|
"scikit-learn",
|
||||||
|
"scipy",
|
||||||
|
"sentencepiece",
|
||||||
|
"tqdm",
|
||||||
|
"transformers",
|
||||||
|
"uvicorn",
|
||||||
|
"sentence-transformers --no-deps",
|
||||||
|
"torch torchvision --index-url https://download.pytorch.org/whl/cpu"
|
||||||
|
],
|
||||||
"ollama": [
|
"ollama": [
|
||||||
"aiohttp",
|
"aiohttp",
|
||||||
"aiosqlite",
|
"aiosqlite",
|
||||||
|
@ -319,6 +385,36 @@
|
||||||
"sentence-transformers --no-deps",
|
"sentence-transformers --no-deps",
|
||||||
"torch torchvision --index-url https://download.pytorch.org/whl/cpu"
|
"torch torchvision --index-url https://download.pytorch.org/whl/cpu"
|
||||||
],
|
],
|
||||||
|
"sambanova": [
|
||||||
|
"aiosqlite",
|
||||||
|
"blobfile",
|
||||||
|
"chardet",
|
||||||
|
"chromadb-client",
|
||||||
|
"faiss-cpu",
|
||||||
|
"fastapi",
|
||||||
|
"fire",
|
||||||
|
"httpx",
|
||||||
|
"matplotlib",
|
||||||
|
"nltk",
|
||||||
|
"numpy",
|
||||||
|
"openai",
|
||||||
|
"opentelemetry-exporter-otlp-proto-http",
|
||||||
|
"opentelemetry-sdk",
|
||||||
|
"pandas",
|
||||||
|
"pillow",
|
||||||
|
"psycopg2-binary",
|
||||||
|
"pypdf",
|
||||||
|
"redis",
|
||||||
|
"requests",
|
||||||
|
"scikit-learn",
|
||||||
|
"scipy",
|
||||||
|
"sentencepiece",
|
||||||
|
"tqdm",
|
||||||
|
"transformers",
|
||||||
|
"uvicorn",
|
||||||
|
"sentence-transformers --no-deps",
|
||||||
|
"torch torchvision --index-url https://download.pytorch.org/whl/cpu"
|
||||||
|
],
|
||||||
"tgi": [
|
"tgi": [
|
||||||
"aiohttp",
|
"aiohttp",
|
||||||
"aiosqlite",
|
"aiosqlite",
|
||||||
|
@ -421,101 +517,5 @@
|
||||||
"vllm",
|
"vllm",
|
||||||
"sentence-transformers --no-deps",
|
"sentence-transformers --no-deps",
|
||||||
"torch torchvision --index-url https://download.pytorch.org/whl/cpu"
|
"torch torchvision --index-url https://download.pytorch.org/whl/cpu"
|
||||||
],
|
|
||||||
"nvidia": [
|
|
||||||
"aiosqlite",
|
|
||||||
"autoevals",
|
|
||||||
"blobfile",
|
|
||||||
"chardet",
|
|
||||||
"datasets",
|
|
||||||
"faiss-cpu",
|
|
||||||
"fastapi",
|
|
||||||
"fire",
|
|
||||||
"httpx",
|
|
||||||
"matplotlib",
|
|
||||||
"mcp",
|
|
||||||
"nltk",
|
|
||||||
"numpy",
|
|
||||||
"openai",
|
|
||||||
"opentelemetry-exporter-otlp-proto-http",
|
|
||||||
"opentelemetry-sdk",
|
|
||||||
"pandas",
|
|
||||||
"pillow",
|
|
||||||
"psycopg2-binary",
|
|
||||||
"pypdf",
|
|
||||||
"redis",
|
|
||||||
"requests",
|
|
||||||
"scikit-learn",
|
|
||||||
"scipy",
|
|
||||||
"sentencepiece",
|
|
||||||
"tqdm",
|
|
||||||
"transformers",
|
|
||||||
"uvicorn",
|
|
||||||
"sentence-transformers --no-deps",
|
|
||||||
"torch torchvision --index-url https://download.pytorch.org/whl/cpu"
|
|
||||||
],
|
|
||||||
"sambanova": [
|
|
||||||
"aiosqlite",
|
|
||||||
"blobfile",
|
|
||||||
"chardet",
|
|
||||||
"chromadb-client",
|
|
||||||
"faiss-cpu",
|
|
||||||
"fastapi",
|
|
||||||
"fire",
|
|
||||||
"httpx",
|
|
||||||
"matplotlib",
|
|
||||||
"nltk",
|
|
||||||
"numpy",
|
|
||||||
"openai",
|
|
||||||
"opentelemetry-exporter-otlp-proto-http",
|
|
||||||
"opentelemetry-sdk",
|
|
||||||
"pandas",
|
|
||||||
"pillow",
|
|
||||||
"psycopg2-binary",
|
|
||||||
"pypdf",
|
|
||||||
"redis",
|
|
||||||
"requests",
|
|
||||||
"scikit-learn",
|
|
||||||
"scipy",
|
|
||||||
"sentencepiece",
|
|
||||||
"tqdm",
|
|
||||||
"transformers",
|
|
||||||
"uvicorn",
|
|
||||||
"sentence-transformers --no-deps",
|
|
||||||
"torch torchvision --index-url https://download.pytorch.org/whl/cpu"
|
|
||||||
],
|
|
||||||
"dell": [
|
|
||||||
"aiohttp",
|
|
||||||
"aiosqlite",
|
|
||||||
"autoevals",
|
|
||||||
"blobfile",
|
|
||||||
"chardet",
|
|
||||||
"chromadb-client",
|
|
||||||
"datasets",
|
|
||||||
"faiss-cpu",
|
|
||||||
"fastapi",
|
|
||||||
"fire",
|
|
||||||
"httpx",
|
|
||||||
"huggingface_hub",
|
|
||||||
"matplotlib",
|
|
||||||
"nltk",
|
|
||||||
"numpy",
|
|
||||||
"openai",
|
|
||||||
"opentelemetry-exporter-otlp-proto-http",
|
|
||||||
"opentelemetry-sdk",
|
|
||||||
"pandas",
|
|
||||||
"pillow",
|
|
||||||
"psycopg2-binary",
|
|
||||||
"pypdf",
|
|
||||||
"redis",
|
|
||||||
"requests",
|
|
||||||
"scikit-learn",
|
|
||||||
"scipy",
|
|
||||||
"sentencepiece",
|
|
||||||
"tqdm",
|
|
||||||
"transformers",
|
|
||||||
"uvicorn",
|
|
||||||
"sentence-transformers --no-deps",
|
|
||||||
"torch torchvision --index-url https://download.pytorch.org/whl/cpu"
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,9 +26,7 @@ The `llamastack/distribution-ollama` distribution consists of the following prov
|
||||||
| vector_io | `inline::faiss`, `remote::chromadb`, `remote::pgvector` |
|
| vector_io | `inline::faiss`, `remote::chromadb`, `remote::pgvector` |
|
||||||
|
|
||||||
|
|
||||||
You should use this distribution if you have a regular desktop machine without very powerful GPUs. Of course, if you have powerful GPUs, you can still continue using this distribution since Ollama supports GPU acceleration.
|
You should use this distribution if you have a regular desktop machine without very powerful GPUs. Of course, if you have powerful GPUs, you can still continue using this distribution since Ollama supports GPU acceleration.### Environment Variables
|
||||||
|
|
||||||
### Environment Variables
|
|
||||||
|
|
||||||
The following environment variables can be configured:
|
The following environment variables can be configured:
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,7 @@ def find_template_dirs(templates_dir: Path) -> Iterator[Path]:
|
||||||
if not templates_dir.exists():
|
if not templates_dir.exists():
|
||||||
raise FileNotFoundError(f"Templates directory not found: {templates_dir}")
|
raise FileNotFoundError(f"Templates directory not found: {templates_dir}")
|
||||||
|
|
||||||
return (d for d in templates_dir.iterdir() if d.is_dir() and d.name != "__pycache__")
|
return sorted(d for d in templates_dir.iterdir() if d.is_dir() and d.name != "__pycache__")
|
||||||
|
|
||||||
|
|
||||||
def process_template(template_dir: Path, progress) -> None:
|
def process_template(template_dir: Path, progress) -> None:
|
||||||
|
|
|
@ -115,3 +115,5 @@ tool_groups:
|
||||||
provider_id: rag-runtime
|
provider_id: rag-runtime
|
||||||
- toolgroup_id: builtin::code_interpreter
|
- toolgroup_id: builtin::code_interpreter
|
||||||
provider_id: code-interpreter
|
provider_id: code-interpreter
|
||||||
|
server:
|
||||||
|
port: 8321
|
||||||
|
|
|
@ -117,3 +117,5 @@ tool_groups:
|
||||||
provider_id: rag-runtime
|
provider_id: rag-runtime
|
||||||
- toolgroup_id: builtin::code_interpreter
|
- toolgroup_id: builtin::code_interpreter
|
||||||
provider_id: code-interpreter
|
provider_id: code-interpreter
|
||||||
|
server:
|
||||||
|
port: 8321
|
||||||
|
|
|
@ -116,3 +116,5 @@ tool_groups:
|
||||||
provider_id: rag-runtime
|
provider_id: rag-runtime
|
||||||
- toolgroup_id: builtin::code_interpreter
|
- toolgroup_id: builtin::code_interpreter
|
||||||
provider_id: code-interpreter
|
provider_id: code-interpreter
|
||||||
|
server:
|
||||||
|
port: 8321
|
||||||
|
|
|
@ -107,3 +107,5 @@ tool_groups:
|
||||||
provider_id: rag-runtime
|
provider_id: rag-runtime
|
||||||
- toolgroup_id: builtin::code_interpreter
|
- toolgroup_id: builtin::code_interpreter
|
||||||
provider_id: code-interpreter
|
provider_id: code-interpreter
|
||||||
|
server:
|
||||||
|
port: 8321
|
||||||
|
|
|
@ -172,3 +172,5 @@ tool_groups:
|
||||||
provider_id: rag-runtime
|
provider_id: rag-runtime
|
||||||
- toolgroup_id: builtin::code_interpreter
|
- toolgroup_id: builtin::code_interpreter
|
||||||
provider_id: code-interpreter
|
provider_id: code-interpreter
|
||||||
|
server:
|
||||||
|
port: 8321
|
||||||
|
|
|
@ -161,3 +161,5 @@ tool_groups:
|
||||||
provider_id: rag-runtime
|
provider_id: rag-runtime
|
||||||
- toolgroup_id: builtin::code_interpreter
|
- toolgroup_id: builtin::code_interpreter
|
||||||
provider_id: code-interpreter
|
provider_id: code-interpreter
|
||||||
|
server:
|
||||||
|
port: 8321
|
||||||
|
|
|
@ -124,3 +124,5 @@ tool_groups:
|
||||||
provider_id: rag-runtime
|
provider_id: rag-runtime
|
||||||
- toolgroup_id: builtin::code_interpreter
|
- toolgroup_id: builtin::code_interpreter
|
||||||
provider_id: code-interpreter
|
provider_id: code-interpreter
|
||||||
|
server:
|
||||||
|
port: 8321
|
||||||
|
|
|
@ -114,3 +114,5 @@ tool_groups:
|
||||||
provider_id: rag-runtime
|
provider_id: rag-runtime
|
||||||
- toolgroup_id: builtin::code_interpreter
|
- toolgroup_id: builtin::code_interpreter
|
||||||
provider_id: code-interpreter
|
provider_id: code-interpreter
|
||||||
|
server:
|
||||||
|
port: 8321
|
||||||
|
|
|
@ -124,3 +124,5 @@ tool_groups:
|
||||||
provider_id: rag-runtime
|
provider_id: rag-runtime
|
||||||
- toolgroup_id: builtin::code_interpreter
|
- toolgroup_id: builtin::code_interpreter
|
||||||
provider_id: code-interpreter
|
provider_id: code-interpreter
|
||||||
|
server:
|
||||||
|
port: 8321
|
||||||
|
|
|
@ -114,3 +114,5 @@ tool_groups:
|
||||||
provider_id: rag-runtime
|
provider_id: rag-runtime
|
||||||
- toolgroup_id: builtin::code_interpreter
|
- toolgroup_id: builtin::code_interpreter
|
||||||
provider_id: code-interpreter
|
provider_id: code-interpreter
|
||||||
|
server:
|
||||||
|
port: 8321
|
||||||
|
|
|
@ -126,3 +126,5 @@ tool_groups:
|
||||||
provider_id: rag-runtime
|
provider_id: rag-runtime
|
||||||
- toolgroup_id: builtin::code_interpreter
|
- toolgroup_id: builtin::code_interpreter
|
||||||
provider_id: code-interpreter
|
provider_id: code-interpreter
|
||||||
|
server:
|
||||||
|
port: 8321
|
||||||
|
|
|
@ -115,3 +115,5 @@ tool_groups:
|
||||||
provider_id: rag-runtime
|
provider_id: rag-runtime
|
||||||
- toolgroup_id: builtin::code_interpreter
|
- toolgroup_id: builtin::code_interpreter
|
||||||
provider_id: code-interpreter
|
provider_id: code-interpreter
|
||||||
|
server:
|
||||||
|
port: 8321
|
||||||
|
|
|
@ -117,3 +117,5 @@ tool_groups:
|
||||||
provider_id: rag-runtime
|
provider_id: rag-runtime
|
||||||
- toolgroup_id: builtin::code_interpreter
|
- toolgroup_id: builtin::code_interpreter
|
||||||
provider_id: code-interpreter
|
provider_id: code-interpreter
|
||||||
|
server:
|
||||||
|
port: 8321
|
||||||
|
|
|
@ -147,3 +147,5 @@ tool_groups:
|
||||||
provider_id: rag-runtime
|
provider_id: rag-runtime
|
||||||
- toolgroup_id: builtin::code_interpreter
|
- toolgroup_id: builtin::code_interpreter
|
||||||
provider_id: code-interpreter
|
provider_id: code-interpreter
|
||||||
|
server:
|
||||||
|
port: 8321
|
||||||
|
|
|
@ -121,3 +121,5 @@ tool_groups:
|
||||||
provider_id: rag-runtime
|
provider_id: rag-runtime
|
||||||
- toolgroup_id: builtin::code_interpreter
|
- toolgroup_id: builtin::code_interpreter
|
||||||
provider_id: code-interpreter
|
provider_id: code-interpreter
|
||||||
|
server:
|
||||||
|
port: 8321
|
||||||
|
|
|
@ -110,3 +110,5 @@ tool_groups:
|
||||||
provider_id: rag-runtime
|
provider_id: rag-runtime
|
||||||
- toolgroup_id: builtin::code_interpreter
|
- toolgroup_id: builtin::code_interpreter
|
||||||
provider_id: code-interpreter
|
provider_id: code-interpreter
|
||||||
|
server:
|
||||||
|
port: 8321
|
||||||
|
|
|
@ -126,3 +126,5 @@ tool_groups:
|
||||||
provider_id: rag-runtime
|
provider_id: rag-runtime
|
||||||
- toolgroup_id: builtin::code_interpreter
|
- toolgroup_id: builtin::code_interpreter
|
||||||
provider_id: code-interpreter
|
provider_id: code-interpreter
|
||||||
|
server:
|
||||||
|
port: 8321
|
||||||
|
|
|
@ -115,3 +115,5 @@ tool_groups:
|
||||||
provider_id: rag-runtime
|
provider_id: rag-runtime
|
||||||
- toolgroup_id: builtin::code_interpreter
|
- toolgroup_id: builtin::code_interpreter
|
||||||
provider_id: code-interpreter
|
provider_id: code-interpreter
|
||||||
|
server:
|
||||||
|
port: 8321
|
||||||
|
|
|
@ -126,3 +126,5 @@ tool_groups:
|
||||||
provider_id: rag-runtime
|
provider_id: rag-runtime
|
||||||
- toolgroup_id: builtin::code_interpreter
|
- toolgroup_id: builtin::code_interpreter
|
||||||
provider_id: code-interpreter
|
provider_id: code-interpreter
|
||||||
|
server:
|
||||||
|
port: 8321
|
||||||
|
|
|
@ -114,3 +114,5 @@ tool_groups:
|
||||||
provider_id: rag-runtime
|
provider_id: rag-runtime
|
||||||
- toolgroup_id: builtin::code_interpreter
|
- toolgroup_id: builtin::code_interpreter
|
||||||
provider_id: code-interpreter
|
provider_id: code-interpreter
|
||||||
|
server:
|
||||||
|
port: 8321
|
||||||
|
|
|
@ -113,3 +113,5 @@ tool_groups:
|
||||||
provider_id: rag-runtime
|
provider_id: rag-runtime
|
||||||
- toolgroup_id: builtin::code_interpreter
|
- toolgroup_id: builtin::code_interpreter
|
||||||
provider_id: code-interpreter
|
provider_id: code-interpreter
|
||||||
|
server:
|
||||||
|
port: 8321
|
||||||
|
|
|
@ -167,3 +167,5 @@ tool_groups:
|
||||||
provider_id: rag-runtime
|
provider_id: rag-runtime
|
||||||
- toolgroup_id: builtin::code_interpreter
|
- toolgroup_id: builtin::code_interpreter
|
||||||
provider_id: code-interpreter
|
provider_id: code-interpreter
|
||||||
|
server:
|
||||||
|
port: 8321
|
||||||
|
|
|
@ -156,3 +156,5 @@ tool_groups:
|
||||||
provider_id: rag-runtime
|
provider_id: rag-runtime
|
||||||
- toolgroup_id: builtin::code_interpreter
|
- toolgroup_id: builtin::code_interpreter
|
||||||
provider_id: code-interpreter
|
provider_id: code-interpreter
|
||||||
|
server:
|
||||||
|
port: 8321
|
||||||
|
|
|
@ -117,3 +117,5 @@ tool_groups:
|
||||||
provider_id: rag-runtime
|
provider_id: rag-runtime
|
||||||
- toolgroup_id: builtin::code_interpreter
|
- toolgroup_id: builtin::code_interpreter
|
||||||
provider_id: code-interpreter
|
provider_id: code-interpreter
|
||||||
|
server:
|
||||||
|
port: 8321
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue