forked from phoenix-oss/llama-stack-mirror
# What does this PR do? Providers that live outside of the llama-stack codebase are now supported. A new property `external_providers_dir` has been added to the main config and can be configured as follow: ``` external_providers_dir: /etc/llama-stack/providers.d/ ``` Where the expected structure is: ``` providers.d/ inference/ custom_ollama.yaml vllm.yaml vector_io/ qdrant.yaml ``` Where `custom_ollama.yaml` is: ``` adapter: adapter_type: custom_ollama pip_packages: ["ollama", "aiohttp"] config_class: llama_stack_ollama_provider.config.OllamaImplConfig module: llama_stack_ollama_provider api_dependencies: [] optional_api_dependencies: [] ``` Obviously the package must be installed on the system, here is the `llama_stack_ollama_provider` example: ``` $ uv pip show llama-stack-ollama-provider Using Python 3.10.16 environment at: /Users/leseb/Documents/AI/llama-stack/.venv Name: llama-stack-ollama-provider Version: 0.1.0 Location: /Users/leseb/Documents/AI/llama-stack/.venv/lib/python3.10/site-packages Editable project location: /private/var/folders/mq/rnm5w_7s2d3fxmtkx02knvhm0000gn/T/tmp.ZBHU5Ezxg4/ollama/llama-stack-ollama-provider Requires: Required-by: ``` Closes: https://github.com/meta-llama/llama-stack/issues/658 Signed-off-by: Sébastien Han <seb@redhat.com>
44 lines
796 B
TOML
44 lines
796 B
TOML
[project]
|
|
dependencies = [
|
|
"llama-stack",
|
|
"pydantic",
|
|
"ollama",
|
|
"aiohttp",
|
|
"aiosqlite",
|
|
"autoevals",
|
|
"blobfile",
|
|
"chardet",
|
|
"chromadb-client",
|
|
"datasets",
|
|
"faiss-cpu",
|
|
"fastapi",
|
|
"fire",
|
|
"httpx",
|
|
"matplotlib",
|
|
"mcp",
|
|
"nltk",
|
|
"numpy",
|
|
"openai",
|
|
"opentelemetry-exporter-otlp-proto-http",
|
|
"opentelemetry-sdk",
|
|
"pandas",
|
|
"pillow",
|
|
"psycopg2-binary",
|
|
"pymongo",
|
|
"pypdf",
|
|
"redis",
|
|
"requests",
|
|
"scikit-learn",
|
|
"scipy",
|
|
"sentencepiece",
|
|
"tqdm",
|
|
"transformers",
|
|
"tree_sitter",
|
|
"uvicorn",
|
|
]
|
|
|
|
name = "llama-stack-provider-ollama"
|
|
version = "0.1.0"
|
|
description = "External provider for Ollama using the Llama Stack API"
|
|
readme = "README.md"
|
|
requires-python = ">=3.10"
|