From f3297c41663b128b69af60a426d2da81977500ee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Han?= Date: Thu, 12 Jun 2025 16:39:08 +0200 Subject: [PATCH] ci: tests MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Sébastien Han --- ...ternal-providers.yml => test-external.yml} | 44 ++++----- docs/source/apis/external.md | 2 +- llama_stack/apis/datatypes.py | 10 +- llama_stack/cli/stack/_build.py | 24 +++++ llama_stack/distribution/build.py | 6 ++ llama_stack/distribution/datatypes.py | 4 + llama_stack/distribution/distribution.py | 13 ++- .../llama-stack-provider-ollama/README.md | 3 - .../custom_ollama.yaml | 7 -- .../pyproject.toml | 43 --------- .../llama-stack-provider-ollama/run.yaml | 94 ------------------- .../build.yaml} | 7 +- tests/external/kaze.yaml | 6 ++ .../llama-stack-api-weather/pyproject.toml | 15 +++ .../src/llama_stack_api_weather/__init__.py | 11 +++ .../src/llama_stack_api_weather/weather.py | 39 ++++++++ .../llama-stack-provider-kaze/pyproject.toml | 15 +++ .../src/llama_stack_provider_kaze/__init__.py | 20 ++++ .../src/llama_stack_provider_kaze/config.py | 11 +++ .../src/llama_stack_provider_kaze/kaze.py | 26 +++++ tests/external/run-byoa.yaml | 13 +++ tests/external/weather.yaml | 4 + 22 files changed, 236 insertions(+), 181 deletions(-) rename .github/workflows/{test-external-providers.yml => test-external.yml} (50%) delete mode 100644 tests/external-provider/llama-stack-provider-ollama/README.md delete mode 100644 tests/external-provider/llama-stack-provider-ollama/custom_ollama.yaml delete mode 100644 tests/external-provider/llama-stack-provider-ollama/pyproject.toml delete mode 100644 tests/external-provider/llama-stack-provider-ollama/run.yaml rename tests/{external-provider/llama-stack-provider-ollama/custom-distro.yaml => external/build.yaml} (64%) create mode 100644 tests/external/kaze.yaml create mode 100644 tests/external/llama-stack-api-weather/pyproject.toml create mode 100644 tests/external/llama-stack-api-weather/src/llama_stack_api_weather/__init__.py create mode 100644 tests/external/llama-stack-api-weather/src/llama_stack_api_weather/weather.py create mode 100644 tests/external/llama-stack-provider-kaze/pyproject.toml create mode 100644 tests/external/llama-stack-provider-kaze/src/llama_stack_provider_kaze/__init__.py create mode 100644 tests/external/llama-stack-provider-kaze/src/llama_stack_provider_kaze/config.py create mode 100644 tests/external/llama-stack-provider-kaze/src/llama_stack_provider_kaze/kaze.py create mode 100644 tests/external/run-byoa.yaml create mode 100644 tests/external/weather.yaml diff --git a/.github/workflows/test-external-providers.yml b/.github/workflows/test-external.yml similarity index 50% rename from .github/workflows/test-external-providers.yml rename to .github/workflows/test-external.yml index cdf18fab7..d4b222e70 100644 --- a/.github/workflows/test-external-providers.yml +++ b/.github/workflows/test-external.yml @@ -1,4 +1,4 @@ -name: Test External Providers +name: Test External API and Providers on: push: @@ -11,10 +11,10 @@ on: - 'uv.lock' - 'pyproject.toml' - 'requirements.txt' - - '.github/workflows/test-external-providers.yml' # This workflow + - '.github/workflows/test-external.yml' # This workflow jobs: - test-external-providers: + test-external: runs-on: ubuntu-latest strategy: matrix: @@ -28,24 +28,23 @@ jobs: - name: Install dependencies uses: ./.github/actions/setup-runner - - name: Apply image type to config file + - name: Create API configuration run: | - yq -i '.image_type = "${{ matrix.image-type }}"' tests/external-provider/llama-stack-provider-ollama/custom-distro.yaml - cat tests/external-provider/llama-stack-provider-ollama/custom-distro.yaml - - - name: Setup directory for Ollama custom provider - run: | - mkdir -p tests/external-provider/llama-stack-provider-ollama/src/ - cp -a llama_stack/providers/remote/inference/ollama/ tests/external-provider/llama-stack-provider-ollama/src/llama_stack_provider_ollama + mkdir -p /home/runner/.llama/apis.d + cp tests/external/weather.yaml /home/runner/.llama/apis.d/weather.yaml - name: Create provider configuration run: | - mkdir -p /home/runner/.llama/providers.d/remote/inference - cp tests/external-provider/llama-stack-provider-ollama/custom_ollama.yaml /home/runner/.llama/providers.d/remote/inference/custom_ollama.yaml + mkdir -p /home/runner/.llama/providers.d/remote/weather + cp tests/external/kaze.yaml /home/runner/.llama/providers.d/remote/weather/kaze.yaml + + - name: Print distro dependencies + run: | + USE_COPY_NOT_MOUNT=true LLAMA_STACK_DIR=. llama stack build --config tests/external/build.yaml --print-deps-only - name: Build distro from config file run: | - USE_COPY_NOT_MOUNT=true LLAMA_STACK_DIR=. llama stack build --config tests/external-provider/llama-stack-provider-ollama/custom-distro.yaml + USE_COPY_NOT_MOUNT=true LLAMA_STACK_DIR=. llama stack build --config tests/external/build.yaml - name: Start Llama Stack server in background if: ${{ matrix.image-type }} == 'venv' @@ -55,19 +54,22 @@ jobs: # Use the virtual environment created by the build step (name comes from build config) source ci-test/bin/activate uv pip list - nohup llama stack run tests/external-provider/llama-stack-provider-ollama/run.yaml --image-type ${{ matrix.image-type }} > server.log 2>&1 & + nohup llama stack run tests/external/run-byoa.yaml --image-type ${{ matrix.image-type }} > server.log 2>&1 & - name: Wait for Llama Stack server to be ready run: | + echo "Waiting for Llama Stack server..." for i in {1..30}; do - if ! grep -q "Successfully loaded external provider remote::custom_ollama" server.log; then - echo "Waiting for Llama Stack server to load the provider..." - sleep 1 - else - echo "Provider loaded" + if curl -sSf http://localhost:8321/v1/health | grep -q "OK"; then + echo "Llama Stack server is up!" exit 0 fi + sleep 1 done - echo "Provider failed to load" + echo "Llama Stack server failed to start" cat server.log exit 1 + + - name: Test external API + run: | + curl -sSf http://localhost:8321/v1/weather/locations diff --git a/docs/source/apis/external.md b/docs/source/apis/external.md index a1fe70bdd..025267c33 100644 --- a/docs/source/apis/external.md +++ b/docs/source/apis/external.md @@ -361,7 +361,7 @@ python -m llama_stack.distribution.server.server --yaml-config ~/.llama/run-byoa 9. Test the API: ```bash -curl -s http://127.0.0.1:8321/v1/weather/locations +curl -sSf http://127.0.0.1:8321/v1/weather/locations {"locations":["Paris","Tokyo"]}% ``` diff --git a/llama_stack/apis/datatypes.py b/llama_stack/apis/datatypes.py index 6c4ebb449..e6628f5d7 100644 --- a/llama_stack/apis/datatypes.py +++ b/llama_stack/apis/datatypes.py @@ -30,14 +30,14 @@ class DynamicApiMeta(EnumMeta): try: return super().__call__(value) except ValueError as e: - # If the value doesn't exist, create a new enum member - # Create a new member name from the value - member_name = value.lower().replace("-", "_") - # If this value was already dynamically added, return it if value in cls._dynamic_values: return cls._dynamic_values[value] + # If the value doesn't exist, create a new enum member + # Create a new member name from the value + member_name = value.lower().replace("-", "_") + # If this member name already exists in the enum, return the existing member if member_name in cls._member_map_: return cls._member_map_[member_name] @@ -55,7 +55,7 @@ class DynamicApiMeta(EnumMeta): def add(cls, value): """ Add a new API to the enum. - Particulary useful for external APIs. + Used to register external APIs. """ member_name = value.lower().replace("-", "_") diff --git a/llama_stack/cli/stack/_build.py b/llama_stack/cli/stack/_build.py index 7ade6f17a..464accc0c 100644 --- a/llama_stack/cli/stack/_build.py +++ b/llama_stack/cli/stack/_build.py @@ -36,6 +36,7 @@ from llama_stack.distribution.datatypes import ( StackRunConfig, ) from llama_stack.distribution.distribution import get_provider_registry +from llama_stack.distribution.external import load_external_apis from llama_stack.distribution.resolver import InvalidProviderError from llama_stack.distribution.stack import replace_env_vars from llama_stack.distribution.utils.config_dirs import DISTRIBS_BASE_DIR, EXTERNAL_PROVIDERS_DIR @@ -390,6 +391,29 @@ def _run_stack_build_command_from_build_config( to_write = json.loads(build_config.model_dump_json()) f.write(yaml.dump(to_write, sort_keys=False)) + # We first install the external APIs so that the build process can use them and discover the + # providers dependencies + if build_config.external_apis_dir: + cprint("Installing external APIs", color="yellow", file=sys.stderr) + external_apis = load_external_apis(build_config) + if external_apis: + # install the external APIs + packages = [] + for _, api_spec in external_apis.items(): + if api_spec.pip_packages: + packages.extend(api_spec.pip_packages) + cprint( + f"Installing {api_spec.name} with pip packages {api_spec.pip_packages}", + color="yellow", + file=sys.stderr, + ) + return_code = run_command(["uv", "pip", "install", *packages]) + if return_code != 0: + packages_str = ", ".join(packages) + raise RuntimeError( + f"Failed to install external APIs packages: {packages_str} (return code: {return_code})" + ) + return_code = build_image( build_config, build_file_path, diff --git a/llama_stack/distribution/build.py b/llama_stack/distribution/build.py index 699ed72da..819bf4e94 100644 --- a/llama_stack/distribution/build.py +++ b/llama_stack/distribution/build.py @@ -14,6 +14,7 @@ from termcolor import cprint from llama_stack.distribution.datatypes import BuildConfig from llama_stack.distribution.distribution import get_provider_registry +from llama_stack.distribution.external import load_external_apis from llama_stack.distribution.utils.exec import run_command from llama_stack.distribution.utils.image_types import LlamaStackImageType from llama_stack.providers.datatypes import Api @@ -105,6 +106,11 @@ def build_image( normal_deps, special_deps = get_provider_dependencies(build_config) normal_deps += SERVER_DEPENDENCIES + if build_config.external_apis_dir: + external_apis = load_external_apis(build_config) + if external_apis: + for _, api_spec in external_apis.items(): + normal_deps.extend(api_spec.pip_packages) if build_config.image_type == LlamaStackImageType.CONTAINER.value: script = str(importlib.resources.files("llama_stack") / "distribution/build_container.sh") diff --git a/llama_stack/distribution/datatypes.py b/llama_stack/distribution/datatypes.py index 5b4eb9733..99539084a 100644 --- a/llama_stack/distribution/datatypes.py +++ b/llama_stack/distribution/datatypes.py @@ -325,6 +325,10 @@ class BuildConfig(BaseModel): default_factory=list, description="Additional pip packages to install in the distribution. These packages will be installed in the distribution environment.", ) + external_apis_dir: Path | None = Field( + default=None, + description="Path to directory containing external API implementations. The APIs code and dependencies must be installed on the system.", + ) @field_validator("external_providers_dir") @classmethod diff --git a/llama_stack/distribution/distribution.py b/llama_stack/distribution/distribution.py index 1280b1d42..929e11286 100644 --- a/llama_stack/distribution/distribution.py +++ b/llama_stack/distribution/distribution.py @@ -152,10 +152,15 @@ def get_provider_registry( try: module = importlib.import_module(api_spec.module) registry[api] = {a.provider_type: a for a in module.available_providers()} - except ImportError as e: - raise ImportError( - f"Failed to import external API module {name}. Is the external API package installed? {e}" - ) from e + except (ImportError, AttributeError) as e: + # Populate the registry with an empty dict to avoid breaking the provider registry + # This assume that the in-tree provider(s) are not available for this API which means + # that users will need to use external providers for this API. + registry[api] = {} + logger.error( + f"Failed to import external API {name}: {e}. Could not populate the in-tree provider(s) registry for {api.name}. \n" + "Install the API package to load any in-tree providers for this API." + ) # Check if config has the external_providers_dir attribute if config and hasattr(config, "external_providers_dir") and config.external_providers_dir: diff --git a/tests/external-provider/llama-stack-provider-ollama/README.md b/tests/external-provider/llama-stack-provider-ollama/README.md deleted file mode 100644 index 8bd2b6a87..000000000 --- a/tests/external-provider/llama-stack-provider-ollama/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Ollama external provider for Llama Stack - -Template code to create a new external provider for Llama Stack. diff --git a/tests/external-provider/llama-stack-provider-ollama/custom_ollama.yaml b/tests/external-provider/llama-stack-provider-ollama/custom_ollama.yaml deleted file mode 100644 index 2ae1e2cf3..000000000 --- a/tests/external-provider/llama-stack-provider-ollama/custom_ollama.yaml +++ /dev/null @@ -1,7 +0,0 @@ -adapter: - adapter_type: custom_ollama - pip_packages: ["ollama", "aiohttp", "tests/external-provider/llama-stack-provider-ollama"] - config_class: llama_stack_provider_ollama.config.OllamaImplConfig - module: llama_stack_provider_ollama -api_dependencies: [] -optional_api_dependencies: [] diff --git a/tests/external-provider/llama-stack-provider-ollama/pyproject.toml b/tests/external-provider/llama-stack-provider-ollama/pyproject.toml deleted file mode 100644 index ca1fecc42..000000000 --- a/tests/external-provider/llama-stack-provider-ollama/pyproject.toml +++ /dev/null @@ -1,43 +0,0 @@ -[project] -dependencies = [ - "llama-stack", - "pydantic", - "ollama", - "aiohttp", - "aiosqlite", - "autoevals", - "chardet", - "chromadb-client", - "datasets", - "faiss-cpu", - "fastapi", - "fire", - "httpx", - "matplotlib", - "mcp", - "nltk", - "numpy", - "openai", - "opentelemetry-exporter-otlp-proto-http", - "opentelemetry-sdk", - "pandas", - "pillow", - "psycopg2-binary", - "pymongo", - "pypdf", - "redis", - "requests", - "scikit-learn", - "scipy", - "sentencepiece", - "tqdm", - "transformers", - "tree_sitter", - "uvicorn", -] - -name = "llama-stack-provider-ollama" -version = "0.1.0" -description = "External provider for Ollama using the Llama Stack API" -readme = "README.md" -requires-python = ">=3.12" diff --git a/tests/external-provider/llama-stack-provider-ollama/run.yaml b/tests/external-provider/llama-stack-provider-ollama/run.yaml deleted file mode 100644 index 158f6800f..000000000 --- a/tests/external-provider/llama-stack-provider-ollama/run.yaml +++ /dev/null @@ -1,94 +0,0 @@ -version: '2' -image_name: ollama -apis: -- inference -- telemetry -- tool_runtime -- datasetio -- vector_io -providers: - inference: - - provider_id: custom_ollama - provider_type: remote::custom_ollama - config: - url: ${env.OLLAMA_URL:http://localhost:11434} - vector_io: - - provider_id: faiss - provider_type: inline::faiss - config: - kvstore: - type: sqlite - namespace: null - db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/ollama}/faiss_store.db - telemetry: - - provider_id: meta-reference - provider_type: inline::meta-reference - config: - service_name: "${env.OTEL_SERVICE_NAME:\u200B}" - sinks: ${env.TELEMETRY_SINKS:console,sqlite} - sqlite_db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/ollama}/trace_store.db - datasetio: - - provider_id: huggingface - provider_type: remote::huggingface - config: - kvstore: - type: sqlite - namespace: null - db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/ollama}/huggingface_datasetio.db - - provider_id: localfs - provider_type: inline::localfs - config: - kvstore: - type: sqlite - namespace: null - db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/ollama}/localfs_datasetio.db - tool_runtime: - - provider_id: brave-search - provider_type: remote::brave-search - config: - api_key: ${env.BRAVE_SEARCH_API_KEY:} - max_results: 3 - - provider_id: tavily-search - provider_type: remote::tavily-search - config: - api_key: ${env.TAVILY_SEARCH_API_KEY:} - max_results: 3 - - provider_id: rag-runtime - provider_type: inline::rag-runtime - config: {} - - provider_id: model-context-protocol - provider_type: remote::model-context-protocol - config: {} - - provider_id: wolfram-alpha - provider_type: remote::wolfram-alpha - config: - api_key: ${env.WOLFRAM_ALPHA_API_KEY:} -metadata_store: - type: sqlite - db_path: ${env.SQLITE_STORE_DIR:~/.llama/distributions/ollama}/registry.db -models: -- metadata: {} - model_id: ${env.INFERENCE_MODEL} - provider_id: custom_ollama - model_type: llm -- metadata: - embedding_dimension: 384 - model_id: all-MiniLM-L6-v2 - provider_id: custom_ollama - provider_model_id: all-minilm:latest - model_type: embedding -shields: [] -vector_dbs: [] -datasets: [] -scoring_fns: [] -benchmarks: [] -tool_groups: -- toolgroup_id: builtin::websearch - provider_id: tavily-search -- toolgroup_id: builtin::rag - provider_id: rag-runtime -- toolgroup_id: builtin::wolfram_alpha - provider_id: wolfram-alpha -server: - port: 8321 -external_providers_dir: ~/.llama/providers.d diff --git a/tests/external-provider/llama-stack-provider-ollama/custom-distro.yaml b/tests/external/build.yaml similarity index 64% rename from tests/external-provider/llama-stack-provider-ollama/custom-distro.yaml rename to tests/external/build.yaml index 1f3ab3817..90dcc97aa 100644 --- a/tests/external-provider/llama-stack-provider-ollama/custom-distro.yaml +++ b/tests/external/build.yaml @@ -2,8 +2,9 @@ version: '2' distribution_spec: description: Custom distro for CI tests providers: - inference: - - remote::custom_ollama -image_type: container + weather: + - remote::kaze +image_type: venv image_name: ci-test external_providers_dir: ~/.llama/providers.d +external_apis_dir: ~/.llama/apis.d diff --git a/tests/external/kaze.yaml b/tests/external/kaze.yaml new file mode 100644 index 000000000..c61ac0e31 --- /dev/null +++ b/tests/external/kaze.yaml @@ -0,0 +1,6 @@ +adapter: + adapter_type: kaze + pip_packages: ["tests/external/llama-stack-provider-kaze"] + config_class: llama_stack_provider_kaze.config.KazeProviderConfig + module: llama_stack_provider_kaze +optional_api_dependencies: [] diff --git a/tests/external/llama-stack-api-weather/pyproject.toml b/tests/external/llama-stack-api-weather/pyproject.toml new file mode 100644 index 000000000..566e1e9aa --- /dev/null +++ b/tests/external/llama-stack-api-weather/pyproject.toml @@ -0,0 +1,15 @@ +[project] +name = "llama-stack-api-weather" +version = "0.1.0" +description = "Weather API for Llama Stack" +readme = "README.md" +requires-python = ">=3.10" +dependencies = ["llama-stack", "pydantic"] + +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[tool.setuptools.packages.find] +where = ["src"] +include = ["llama_stack_api_weather", "llama_stack_api_weather.*"] diff --git a/tests/external/llama-stack-api-weather/src/llama_stack_api_weather/__init__.py b/tests/external/llama-stack-api-weather/src/llama_stack_api_weather/__init__.py new file mode 100644 index 000000000..d0227615d --- /dev/null +++ b/tests/external/llama-stack-api-weather/src/llama_stack_api_weather/__init__.py @@ -0,0 +1,11 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + +"""Weather API for Llama Stack.""" + +from .weather import WeatherProvider, available_providers + +__all__ = ["WeatherProvider", "available_providers"] diff --git a/tests/external/llama-stack-api-weather/src/llama_stack_api_weather/weather.py b/tests/external/llama-stack-api-weather/src/llama_stack_api_weather/weather.py new file mode 100644 index 000000000..4b3bfb641 --- /dev/null +++ b/tests/external/llama-stack-api-weather/src/llama_stack_api_weather/weather.py @@ -0,0 +1,39 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + +from typing import Protocol + +from llama_stack.providers.datatypes import AdapterSpec, Api, ProviderSpec, RemoteProviderSpec +from llama_stack.schema_utils import webmethod + + +def available_providers() -> list[ProviderSpec]: + return [ + RemoteProviderSpec( + api=Api.weather, + provider_type="remote::kaze", + config_class="llama_stack_provider_kaze.KazeProviderConfig", + adapter=AdapterSpec( + adapter_type="kaze", + module="llama_stack_provider_kaze", + pip_packages=["llama_stack_provider_kaze"], + config_class="llama_stack_provider_kaze.KazeProviderConfig", + ), + ), + ] + + +class WeatherProvider(Protocol): + """ + A protocol for the Weather API. + """ + + @webmethod(route="/weather/locations", method="GET") + async def get_available_locations() -> dict[str, list[str]]: + """ + Get the available locations. + """ + ... diff --git a/tests/external/llama-stack-provider-kaze/pyproject.toml b/tests/external/llama-stack-provider-kaze/pyproject.toml new file mode 100644 index 000000000..7bbf1f843 --- /dev/null +++ b/tests/external/llama-stack-provider-kaze/pyproject.toml @@ -0,0 +1,15 @@ +[project] +name = "llama-stack-provider-kaze" +version = "0.1.0" +description = "Kaze weather provider for Llama Stack" +readme = "README.md" +requires-python = ">=3.10" +dependencies = ["llama-stack", "pydantic", "aiohttp"] + +[build-system] +requires = ["setuptools"] +build-backend = "setuptools.build_meta" + +[tool.setuptools.packages.find] +where = ["src"] +include = ["llama_stack_provider_kaze", "llama_stack_provider_kaze.*"] diff --git a/tests/external/llama-stack-provider-kaze/src/llama_stack_provider_kaze/__init__.py b/tests/external/llama-stack-provider-kaze/src/llama_stack_provider_kaze/__init__.py new file mode 100644 index 000000000..581ff38c7 --- /dev/null +++ b/tests/external/llama-stack-provider-kaze/src/llama_stack_provider_kaze/__init__.py @@ -0,0 +1,20 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + +"""Kaze weather provider for Llama Stack.""" + +from .config import KazeProviderConfig +from .kaze import WeatherKazeAdapter + +__all__ = ["KazeProviderConfig", "WeatherKazeAdapter"] + + +async def get_adapter_impl(config: KazeProviderConfig, _deps): + from .kaze import WeatherKazeAdapter + + impl = WeatherKazeAdapter(config) + await impl.initialize() + return impl diff --git a/tests/external/llama-stack-provider-kaze/src/llama_stack_provider_kaze/config.py b/tests/external/llama-stack-provider-kaze/src/llama_stack_provider_kaze/config.py new file mode 100644 index 000000000..4b82698ed --- /dev/null +++ b/tests/external/llama-stack-provider-kaze/src/llama_stack_provider_kaze/config.py @@ -0,0 +1,11 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + +from pydantic import BaseModel + + +class KazeProviderConfig(BaseModel): + """Configuration for the Kaze weather provider.""" diff --git a/tests/external/llama-stack-provider-kaze/src/llama_stack_provider_kaze/kaze.py b/tests/external/llama-stack-provider-kaze/src/llama_stack_provider_kaze/kaze.py new file mode 100644 index 000000000..120b5438d --- /dev/null +++ b/tests/external/llama-stack-provider-kaze/src/llama_stack_provider_kaze/kaze.py @@ -0,0 +1,26 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + +from llama_stack_api_weather.weather import WeatherProvider + +from .config import KazeProviderConfig + + +class WeatherKazeAdapter(WeatherProvider): + """Kaze weather provider implementation.""" + + def __init__( + self, + config: KazeProviderConfig, + ) -> None: + self.config = config + + async def initialize(self) -> None: + pass + + async def get_available_locations(self) -> dict[str, list[str]]: + """Get available weather locations.""" + return {"locations": ["Paris", "Tokyo"]} diff --git a/tests/external/run-byoa.yaml b/tests/external/run-byoa.yaml new file mode 100644 index 000000000..5774ae9da --- /dev/null +++ b/tests/external/run-byoa.yaml @@ -0,0 +1,13 @@ +version: "2" +image_name: "llama-stack-api-weather" +apis: + - weather +providers: + weather: + - provider_id: kaze + provider_type: remote::kaze + config: {} +external_apis_dir: ~/.llama/apis.d +external_providers_dir: ~/.llama/providers.d +server: + port: 8321 diff --git a/tests/external/weather.yaml b/tests/external/weather.yaml new file mode 100644 index 000000000..a84fcc921 --- /dev/null +++ b/tests/external/weather.yaml @@ -0,0 +1,4 @@ +module: llama_stack_api_weather +name: weather +pip_packages: ["tests/external/llama-stack-api-weather"] +protocol: WeatherProvider