Rename the "inline" distribution to "local"

This commit is contained in:
Ashwin Bharambe 2024-08-08 14:05:49 -07:00
parent dd15671f7f
commit 640741c639
5 changed files with 18 additions and 32 deletions

View file

@ -96,7 +96,7 @@ ensure_conda_env_python310() {
if [ "$#" -ne 3 ]; then
echo "Usage: $0 <environment_name> <distribution_name> <pip_dependencies>" >&2
echo "Example: $0 my_env local-inline 'numpy pandas scipy'" >&2
echo "Example: $0 my_env local-llama-8b 'numpy pandas scipy'" >&2
exit 1
fi

View file

@ -28,7 +28,7 @@ def available_distribution_specs() -> List[DistributionSpec]:
providers = api_providers()
return [
DistributionSpec(
spec_id="inline",
spec_id="local",
description="Use code from `llama_toolchain` itself to serve all llama stack APIs",
provider_specs={
Api.inference: providers[Api.inference]["meta-reference"],
@ -42,8 +42,8 @@ def available_distribution_specs() -> List[DistributionSpec]:
provider_specs={x: remote_spec(x) for x in providers},
),
DistributionSpec(
spec_id="ollama-inline",
description="Like local-source, but use ollama for running LLM inference",
spec_id="local-ollama",
description="Like local, but use ollama for running LLM inference",
provider_specs={
Api.inference: providers[Api.inference]["meta-ollama"],
Api.safety: providers[Api.safety]["meta-reference"],