From 1855fd2835c8c4816a63908507d2f79f1eebef1c Mon Sep 17 00:00:00 2001 From: Xi Yan Date: Sat, 14 Sep 2024 14:33:19 -0700 Subject: [PATCH] update example build files --- .../conda/local-fireworks-conda-example-build.yaml | 4 ++-- .../conda/local-ollama-conda-example-build.yaml | 4 ++-- .../conda/local-tgi-conda-example-build.yaml | 4 ++-- .../conda/local-together-conda-example-build.yaml | 12 ++++++++++++ 4 files changed, 18 insertions(+), 6 deletions(-) create mode 100644 llama_toolchain/configs/distributions/conda/local-together-conda-example-build.yaml diff --git a/llama_toolchain/configs/distributions/conda/local-fireworks-conda-example-build.yaml b/llama_toolchain/configs/distributions/conda/local-fireworks-conda-example-build.yaml index bb9c87c25..4531fcebb 100644 --- a/llama_toolchain/configs/distributions/conda/local-fireworks-conda-example-build.yaml +++ b/llama_toolchain/configs/distributions/conda/local-fireworks-conda-example-build.yaml @@ -1,7 +1,7 @@ name: local-fireworks-conda-example distribution_spec: - distribution_type: local - description: Use code from `llama_toolchain` itself to serve all llama stack APIs + distribution_type: local-plus-fireworks-inference + description: Use Fireworks.ai for running LLM inference docker_image: null providers: inference: remote::fireworks diff --git a/llama_toolchain/configs/distributions/conda/local-ollama-conda-example-build.yaml b/llama_toolchain/configs/distributions/conda/local-ollama-conda-example-build.yaml index 1016c24d4..dbc2c7936 100644 --- a/llama_toolchain/configs/distributions/conda/local-ollama-conda-example-build.yaml +++ b/llama_toolchain/configs/distributions/conda/local-ollama-conda-example-build.yaml @@ -1,7 +1,7 @@ name: local-ollama-conda-example distribution_spec: - distribution_type: local - description: Use code from `llama_toolchain` itself to serve all llama stack APIs + distribution_type: local-plus-ollama-inference + description: Like local, but use ollama for running LLM inference docker_image: null providers: inference: remote::ollama diff --git a/llama_toolchain/configs/distributions/conda/local-tgi-conda-example-build.yaml b/llama_toolchain/configs/distributions/conda/local-tgi-conda-example-build.yaml index 2d31e2c14..48b426385 100644 --- a/llama_toolchain/configs/distributions/conda/local-tgi-conda-example-build.yaml +++ b/llama_toolchain/configs/distributions/conda/local-tgi-conda-example-build.yaml @@ -1,7 +1,7 @@ name: local-tgi-conda-example distribution_spec: - distribution_type: local - description: Use code from `llama_toolchain` itself to serve all llama stack APIs + distribution_type: local-plus-tgi-inference + description: Use TGI for running LLM inference docker_image: null providers: inference: remote::tgi diff --git a/llama_toolchain/configs/distributions/conda/local-together-conda-example-build.yaml b/llama_toolchain/configs/distributions/conda/local-together-conda-example-build.yaml new file mode 100644 index 000000000..0d69369ef --- /dev/null +++ b/llama_toolchain/configs/distributions/conda/local-together-conda-example-build.yaml @@ -0,0 +1,12 @@ +name: local-tgi-conda-example +distribution_spec: + distribution_type: local-plus-together-inference + description: Use Together.ai for running LLM inference + docker_image: null + providers: + inference: remote::together + memory: meta-reference-faiss + safety: meta-reference + agentic_system: meta-reference + telemetry: console +image_type: conda