ci: tests

Signed-off-by: Sébastien Han <seb@redhat.com>
This commit is contained in:
Sébastien Han 2025-06-12 16:39:08 +02:00
parent 9443cef577
commit f3297c4166
No known key found for this signature in database
22 changed files with 236 additions and 181 deletions

View file

@ -1,4 +1,4 @@
name: Test External Providers name: Test External API and Providers
on: on:
push: push:
@ -11,10 +11,10 @@ on:
- 'uv.lock' - 'uv.lock'
- 'pyproject.toml' - 'pyproject.toml'
- 'requirements.txt' - 'requirements.txt'
- '.github/workflows/test-external-providers.yml' # This workflow - '.github/workflows/test-external.yml' # This workflow
jobs: jobs:
test-external-providers: test-external:
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
matrix: matrix:
@ -28,24 +28,23 @@ jobs:
- name: Install dependencies - name: Install dependencies
uses: ./.github/actions/setup-runner uses: ./.github/actions/setup-runner
- name: Apply image type to config file - name: Create API configuration
run: | run: |
yq -i '.image_type = "${{ matrix.image-type }}"' tests/external-provider/llama-stack-provider-ollama/custom-distro.yaml mkdir -p /home/runner/.llama/apis.d
cat tests/external-provider/llama-stack-provider-ollama/custom-distro.yaml cp tests/external/weather.yaml /home/runner/.llama/apis.d/weather.yaml
- name: Setup directory for Ollama custom provider
run: |
mkdir -p tests/external-provider/llama-stack-provider-ollama/src/
cp -a llama_stack/providers/remote/inference/ollama/ tests/external-provider/llama-stack-provider-ollama/src/llama_stack_provider_ollama
- name: Create provider configuration - name: Create provider configuration
run: | run: |
mkdir -p /home/runner/.llama/providers.d/remote/inference mkdir -p /home/runner/.llama/providers.d/remote/weather
cp tests/external-provider/llama-stack-provider-ollama/custom_ollama.yaml /home/runner/.llama/providers.d/remote/inference/custom_ollama.yaml cp tests/external/kaze.yaml /home/runner/.llama/providers.d/remote/weather/kaze.yaml
- name: Print distro dependencies
run: |
USE_COPY_NOT_MOUNT=true LLAMA_STACK_DIR=. llama stack build --config tests/external/build.yaml --print-deps-only
- name: Build distro from config file - name: Build distro from config file
run: | run: |
USE_COPY_NOT_MOUNT=true LLAMA_STACK_DIR=. llama stack build --config tests/external-provider/llama-stack-provider-ollama/custom-distro.yaml USE_COPY_NOT_MOUNT=true LLAMA_STACK_DIR=. llama stack build --config tests/external/build.yaml
- name: Start Llama Stack server in background - name: Start Llama Stack server in background
if: ${{ matrix.image-type }} == 'venv' if: ${{ matrix.image-type }} == 'venv'
@ -55,19 +54,22 @@ jobs:
# Use the virtual environment created by the build step (name comes from build config) # Use the virtual environment created by the build step (name comes from build config)
source ci-test/bin/activate source ci-test/bin/activate
uv pip list uv pip list
nohup llama stack run tests/external-provider/llama-stack-provider-ollama/run.yaml --image-type ${{ matrix.image-type }} > server.log 2>&1 & nohup llama stack run tests/external/run-byoa.yaml --image-type ${{ matrix.image-type }} > server.log 2>&1 &
- name: Wait for Llama Stack server to be ready - name: Wait for Llama Stack server to be ready
run: | run: |
echo "Waiting for Llama Stack server..."
for i in {1..30}; do for i in {1..30}; do
if ! grep -q "Successfully loaded external provider remote::custom_ollama" server.log; then if curl -sSf http://localhost:8321/v1/health | grep -q "OK"; then
echo "Waiting for Llama Stack server to load the provider..." echo "Llama Stack server is up!"
sleep 1
else
echo "Provider loaded"
exit 0 exit 0
fi fi
sleep 1
done done
echo "Provider failed to load" echo "Llama Stack server failed to start"
cat server.log cat server.log
exit 1 exit 1
- name: Test external API
run: |
curl -sSf http://localhost:8321/v1/weather/locations

View file

@ -361,7 +361,7 @@ python -m llama_stack.distribution.server.server --yaml-config ~/.llama/run-byoa
9. Test the API: 9. Test the API:
```bash ```bash
curl -s http://127.0.0.1:8321/v1/weather/locations curl -sSf http://127.0.0.1:8321/v1/weather/locations
{"locations":["Paris","Tokyo"]}% {"locations":["Paris","Tokyo"]}%
``` ```

View file

@ -30,14 +30,14 @@ class DynamicApiMeta(EnumMeta):
try: try:
return super().__call__(value) return super().__call__(value)
except ValueError as e: except ValueError as e:
# If the value doesn't exist, create a new enum member
# Create a new member name from the value
member_name = value.lower().replace("-", "_")
# If this value was already dynamically added, return it # If this value was already dynamically added, return it
if value in cls._dynamic_values: if value in cls._dynamic_values:
return cls._dynamic_values[value] return cls._dynamic_values[value]
# If the value doesn't exist, create a new enum member
# Create a new member name from the value
member_name = value.lower().replace("-", "_")
# If this member name already exists in the enum, return the existing member # If this member name already exists in the enum, return the existing member
if member_name in cls._member_map_: if member_name in cls._member_map_:
return cls._member_map_[member_name] return cls._member_map_[member_name]
@ -55,7 +55,7 @@ class DynamicApiMeta(EnumMeta):
def add(cls, value): def add(cls, value):
""" """
Add a new API to the enum. Add a new API to the enum.
Particulary useful for external APIs. Used to register external APIs.
""" """
member_name = value.lower().replace("-", "_") member_name = value.lower().replace("-", "_")

View file

@ -36,6 +36,7 @@ from llama_stack.distribution.datatypes import (
StackRunConfig, StackRunConfig,
) )
from llama_stack.distribution.distribution import get_provider_registry from llama_stack.distribution.distribution import get_provider_registry
from llama_stack.distribution.external import load_external_apis
from llama_stack.distribution.resolver import InvalidProviderError from llama_stack.distribution.resolver import InvalidProviderError
from llama_stack.distribution.stack import replace_env_vars from llama_stack.distribution.stack import replace_env_vars
from llama_stack.distribution.utils.config_dirs import DISTRIBS_BASE_DIR, EXTERNAL_PROVIDERS_DIR from llama_stack.distribution.utils.config_dirs import DISTRIBS_BASE_DIR, EXTERNAL_PROVIDERS_DIR
@ -390,6 +391,29 @@ def _run_stack_build_command_from_build_config(
to_write = json.loads(build_config.model_dump_json()) to_write = json.loads(build_config.model_dump_json())
f.write(yaml.dump(to_write, sort_keys=False)) f.write(yaml.dump(to_write, sort_keys=False))
# We first install the external APIs so that the build process can use them and discover the
# providers dependencies
if build_config.external_apis_dir:
cprint("Installing external APIs", color="yellow", file=sys.stderr)
external_apis = load_external_apis(build_config)
if external_apis:
# install the external APIs
packages = []
for _, api_spec in external_apis.items():
if api_spec.pip_packages:
packages.extend(api_spec.pip_packages)
cprint(
f"Installing {api_spec.name} with pip packages {api_spec.pip_packages}",
color="yellow",
file=sys.stderr,
)
return_code = run_command(["uv", "pip", "install", *packages])
if return_code != 0:
packages_str = ", ".join(packages)
raise RuntimeError(
f"Failed to install external APIs packages: {packages_str} (return code: {return_code})"
)
return_code = build_image( return_code = build_image(
build_config, build_config,
build_file_path, build_file_path,

View file

@ -14,6 +14,7 @@ from termcolor import cprint
from llama_stack.distribution.datatypes import BuildConfig from llama_stack.distribution.datatypes import BuildConfig
from llama_stack.distribution.distribution import get_provider_registry from llama_stack.distribution.distribution import get_provider_registry
from llama_stack.distribution.external import load_external_apis
from llama_stack.distribution.utils.exec import run_command from llama_stack.distribution.utils.exec import run_command
from llama_stack.distribution.utils.image_types import LlamaStackImageType from llama_stack.distribution.utils.image_types import LlamaStackImageType
from llama_stack.providers.datatypes import Api from llama_stack.providers.datatypes import Api
@ -105,6 +106,11 @@ def build_image(
normal_deps, special_deps = get_provider_dependencies(build_config) normal_deps, special_deps = get_provider_dependencies(build_config)
normal_deps += SERVER_DEPENDENCIES normal_deps += SERVER_DEPENDENCIES
if build_config.external_apis_dir:
external_apis = load_external_apis(build_config)
if external_apis:
for _, api_spec in external_apis.items():
normal_deps.extend(api_spec.pip_packages)
if build_config.image_type == LlamaStackImageType.CONTAINER.value: if build_config.image_type == LlamaStackImageType.CONTAINER.value:
script = str(importlib.resources.files("llama_stack") / "distribution/build_container.sh") script = str(importlib.resources.files("llama_stack") / "distribution/build_container.sh")

View file

@ -325,6 +325,10 @@ class BuildConfig(BaseModel):
default_factory=list, default_factory=list,
description="Additional pip packages to install in the distribution. These packages will be installed in the distribution environment.", description="Additional pip packages to install in the distribution. These packages will be installed in the distribution environment.",
) )
external_apis_dir: Path | None = Field(
default=None,
description="Path to directory containing external API implementations. The APIs code and dependencies must be installed on the system.",
)
@field_validator("external_providers_dir") @field_validator("external_providers_dir")
@classmethod @classmethod

View file

@ -152,10 +152,15 @@ def get_provider_registry(
try: try:
module = importlib.import_module(api_spec.module) module = importlib.import_module(api_spec.module)
registry[api] = {a.provider_type: a for a in module.available_providers()} registry[api] = {a.provider_type: a for a in module.available_providers()}
except ImportError as e: except (ImportError, AttributeError) as e:
raise ImportError( # Populate the registry with an empty dict to avoid breaking the provider registry
f"Failed to import external API module {name}. Is the external API package installed? {e}" # This assume that the in-tree provider(s) are not available for this API which means
) from e # that users will need to use external providers for this API.
registry[api] = {}
logger.error(
f"Failed to import external API {name}: {e}. Could not populate the in-tree provider(s) registry for {api.name}. \n"
"Install the API package to load any in-tree providers for this API."
)
# Check if config has the external_providers_dir attribute # Check if config has the external_providers_dir attribute
if config and hasattr(config, "external_providers_dir") and config.external_providers_dir: if config and hasattr(config, "external_providers_dir") and config.external_providers_dir:

View file

@ -1,3 +0,0 @@
# Ollama external provider for Llama Stack
Template code to create a new external provider for Llama Stack.

View file

@ -1,7 +0,0 @@
adapter:
adapter_type: custom_ollama
pip_packages: ["ollama", "aiohttp", "tests/external-provider/llama-stack-provider-ollama"]
config_class: llama_stack_provider_ollama.config.OllamaImplConfig
module: llama_stack_provider_ollama
api_dependencies: []
optional_api_dependencies: []

View file

@ -1,43 +0,0 @@
[project]
dependencies = [
"llama-stack",
"pydantic",
"ollama",
"aiohttp",
"aiosqlite",
"autoevals",
"chardet",
"chromadb-client",
"datasets",
"faiss-cpu",
"fastapi",
"fire",
"httpx",
"matplotlib",
"mcp",
"nltk",
"numpy",
"openai",
"opentelemetry-exporter-otlp-proto-http",
"opentelemetry-sdk",
"pandas",
"pillow",
"psycopg2-binary",
"pymongo",
"pypdf",
"redis",
"requests",
"scikit-learn",
"scipy",
"sentencepiece",
"tqdm",
"transformers",
"tree_sitter",
"uvicorn",
]
name = "llama-stack-provider-ollama"
version = "0.1.0"
description = "External provider for Ollama using the Llama Stack API"
readme = "README.md"
requires-python = ">=3.12"

View file

@ -1,94 +0,0 @@
version: '2'
image_name: ollama
apis:
- inference
- telemetry
- tool_runtime
- datasetio
- vector_io
providers:
inference:
- provider_id: custom_ollama
provider_type: remote::custom_ollama
config:
url: ${env.OLLAMA_URL:http://localhost:11434}
vector_io:
- provider_id: faiss
provider_type: inline::faiss
config:
kvstore:
type: sqlite
namespace: null
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/ollama}/faiss_store.db
telemetry:
- provider_id: meta-reference
provider_type: inline::meta-reference
config:
service_name: "${env.OTEL_SERVICE_NAME:\u200B}"
sinks: ${env.TELEMETRY_SINKS:console,sqlite}
sqlite_db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/ollama}/trace_store.db
datasetio:
- provider_id: huggingface
provider_type: remote::huggingface
config:
kvstore:
type: sqlite
namespace: null
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/ollama}/huggingface_datasetio.db
- provider_id: localfs
provider_type: inline::localfs
config:
kvstore:
type: sqlite
namespace: null
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/ollama}/localfs_datasetio.db
tool_runtime:
- provider_id: brave-search
provider_type: remote::brave-search
config:
api_key: ${env.BRAVE_SEARCH_API_KEY:}
max_results: 3
- provider_id: tavily-search
provider_type: remote::tavily-search
config:
api_key: ${env.TAVILY_SEARCH_API_KEY:}
max_results: 3
- provider_id: rag-runtime
provider_type: inline::rag-runtime
config: {}
- provider_id: model-context-protocol
provider_type: remote::model-context-protocol
config: {}
- provider_id: wolfram-alpha
provider_type: remote::wolfram-alpha
config:
api_key: ${env.WOLFRAM_ALPHA_API_KEY:}
metadata_store:
type: sqlite
db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/ollama}/registry.db
models:
- metadata: {}
model_id: ${env.INFERENCE_MODEL}
provider_id: custom_ollama
model_type: llm
- metadata:
embedding_dimension: 384
model_id: all-MiniLM-L6-v2
provider_id: custom_ollama
provider_model_id: all-minilm:latest
model_type: embedding
shields: []
vector_dbs: []
datasets: []
scoring_fns: []
benchmarks: []
tool_groups:
- toolgroup_id: builtin::websearch
provider_id: tavily-search
- toolgroup_id: builtin::rag
provider_id: rag-runtime
- toolgroup_id: builtin::wolfram_alpha
provider_id: wolfram-alpha
server:
port: 8321
external_providers_dir: ~/.llama/providers.d

View file

@ -2,8 +2,9 @@ version: '2'
distribution_spec: distribution_spec:
description: Custom distro for CI tests description: Custom distro for CI tests
providers: providers:
inference: weather:
- remote::custom_ollama - remote::kaze
image_type: container image_type: venv
image_name: ci-test image_name: ci-test
external_providers_dir: ~/.llama/providers.d external_providers_dir: ~/.llama/providers.d
external_apis_dir: ~/.llama/apis.d

6
tests/external/kaze.yaml vendored Normal file
View file

@ -0,0 +1,6 @@
adapter:
adapter_type: kaze
pip_packages: ["tests/external/llama-stack-provider-kaze"]
config_class: llama_stack_provider_kaze.config.KazeProviderConfig
module: llama_stack_provider_kaze
optional_api_dependencies: []

View file

@ -0,0 +1,15 @@
[project]
name = "llama-stack-api-weather"
version = "0.1.0"
description = "Weather API for Llama Stack"
readme = "README.md"
requires-python = ">=3.10"
dependencies = ["llama-stack", "pydantic"]
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
[tool.setuptools.packages.find]
where = ["src"]
include = ["llama_stack_api_weather", "llama_stack_api_weather.*"]

View file

@ -0,0 +1,11 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
"""Weather API for Llama Stack."""
from .weather import WeatherProvider, available_providers
__all__ = ["WeatherProvider", "available_providers"]

View file

@ -0,0 +1,39 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
from typing import Protocol
from llama_stack.providers.datatypes import AdapterSpec, Api, ProviderSpec, RemoteProviderSpec
from llama_stack.schema_utils import webmethod
def available_providers() -> list[ProviderSpec]:
return [
RemoteProviderSpec(
api=Api.weather,
provider_type="remote::kaze",
config_class="llama_stack_provider_kaze.KazeProviderConfig",
adapter=AdapterSpec(
adapter_type="kaze",
module="llama_stack_provider_kaze",
pip_packages=["llama_stack_provider_kaze"],
config_class="llama_stack_provider_kaze.KazeProviderConfig",
),
),
]
class WeatherProvider(Protocol):
"""
A protocol for the Weather API.
"""
@webmethod(route="/weather/locations", method="GET")
async def get_available_locations() -> dict[str, list[str]]:
"""
Get the available locations.
"""
...

View file

@ -0,0 +1,15 @@
[project]
name = "llama-stack-provider-kaze"
version = "0.1.0"
description = "Kaze weather provider for Llama Stack"
readme = "README.md"
requires-python = ">=3.10"
dependencies = ["llama-stack", "pydantic", "aiohttp"]
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
[tool.setuptools.packages.find]
where = ["src"]
include = ["llama_stack_provider_kaze", "llama_stack_provider_kaze.*"]

View file

@ -0,0 +1,20 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
"""Kaze weather provider for Llama Stack."""
from .config import KazeProviderConfig
from .kaze import WeatherKazeAdapter
__all__ = ["KazeProviderConfig", "WeatherKazeAdapter"]
async def get_adapter_impl(config: KazeProviderConfig, _deps):
from .kaze import WeatherKazeAdapter
impl = WeatherKazeAdapter(config)
await impl.initialize()
return impl

View file

@ -0,0 +1,11 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
from pydantic import BaseModel
class KazeProviderConfig(BaseModel):
"""Configuration for the Kaze weather provider."""

View file

@ -0,0 +1,26 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
from llama_stack_api_weather.weather import WeatherProvider
from .config import KazeProviderConfig
class WeatherKazeAdapter(WeatherProvider):
"""Kaze weather provider implementation."""
def __init__(
self,
config: KazeProviderConfig,
) -> None:
self.config = config
async def initialize(self) -> None:
pass
async def get_available_locations(self) -> dict[str, list[str]]:
"""Get available weather locations."""
return {"locations": ["Paris", "Tokyo"]}

13
tests/external/run-byoa.yaml vendored Normal file
View file

@ -0,0 +1,13 @@
version: "2"
image_name: "llama-stack-api-weather"
apis:
- weather
providers:
weather:
- provider_id: kaze
provider_type: remote::kaze
config: {}
external_apis_dir: ~/.llama/apis.d
external_providers_dir: ~/.llama/providers.d
server:
port: 8321

4
tests/external/weather.yaml vendored Normal file
View file

@ -0,0 +1,4 @@
module: llama_stack_api_weather
name: weather
pip_packages: ["tests/external/llama-stack-api-weather"]
protocol: WeatherProvider