From e665b652c6d6a22eed1cf25e2ff2e05d0bb2448b Mon Sep 17 00:00:00 2001 From: Xi Yan Date: Sat, 14 Sep 2024 14:28:16 -0700 Subject: [PATCH] example build files --- .../conda/local-fireworks-conda-example-build.yaml | 12 ++++++++++++ .../conda/local-ollama-conda-example-build.yaml | 12 ++++++++++++ .../conda/local-tgi-conda-example-build.yaml | 12 ++++++++++++ 3 files changed, 36 insertions(+) create mode 100644 llama_toolchain/configs/distributions/conda/local-fireworks-conda-example-build.yaml create mode 100644 llama_toolchain/configs/distributions/conda/local-ollama-conda-example-build.yaml create mode 100644 llama_toolchain/configs/distributions/conda/local-tgi-conda-example-build.yaml diff --git a/llama_toolchain/configs/distributions/conda/local-fireworks-conda-example-build.yaml b/llama_toolchain/configs/distributions/conda/local-fireworks-conda-example-build.yaml new file mode 100644 index 000000000..bb9c87c25 --- /dev/null +++ b/llama_toolchain/configs/distributions/conda/local-fireworks-conda-example-build.yaml @@ -0,0 +1,12 @@ +name: local-fireworks-conda-example +distribution_spec: + distribution_type: local + description: Use code from `llama_toolchain` itself to serve all llama stack APIs + docker_image: null + providers: + inference: remote::fireworks + memory: meta-reference-faiss + safety: meta-reference + agentic_system: meta-reference + telemetry: console +image_type: conda diff --git a/llama_toolchain/configs/distributions/conda/local-ollama-conda-example-build.yaml b/llama_toolchain/configs/distributions/conda/local-ollama-conda-example-build.yaml new file mode 100644 index 000000000..1016c24d4 --- /dev/null +++ b/llama_toolchain/configs/distributions/conda/local-ollama-conda-example-build.yaml @@ -0,0 +1,12 @@ +name: local-ollama-conda-example +distribution_spec: + distribution_type: local + description: Use code from `llama_toolchain` itself to serve all llama stack APIs + docker_image: null + providers: + inference: remote::ollama + memory: meta-reference-faiss + safety: meta-reference + agentic_system: meta-reference + telemetry: console +image_type: conda diff --git a/llama_toolchain/configs/distributions/conda/local-tgi-conda-example-build.yaml b/llama_toolchain/configs/distributions/conda/local-tgi-conda-example-build.yaml new file mode 100644 index 000000000..2d31e2c14 --- /dev/null +++ b/llama_toolchain/configs/distributions/conda/local-tgi-conda-example-build.yaml @@ -0,0 +1,12 @@ +name: local-tgi-conda-example +distribution_spec: + distribution_type: local + description: Use code from `llama_toolchain` itself to serve all llama stack APIs + docker_image: null + providers: + inference: remote::tgi + memory: meta-reference-faiss + safety: meta-reference + agentic_system: meta-reference + telemetry: console +image_type: conda