More progress towards llama distribution install

This commit is contained in:
Ashwin Bharambe 2024-08-01 16:40:43 -07:00
parent 5a583cf16e
commit dac2b5a1ed
11 changed files with 298 additions and 75 deletions

View file

@ -4,12 +4,84 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
from typing import List
from enum import Enum
from typing import Any, Dict, List, Literal, Union
from pydantic import BaseModel
from pydantic import BaseModel, Field
from strong_typing.schema import json_schema_type
from typing_extensions import Annotated
class LlamaStackDistribution(BaseModel):
@json_schema_type
class AdapterType(Enum):
passthrough_api = "passthrough_api"
python_impl = "python_impl"
not_implemented = "not_implemented"
@json_schema_type
class PassthroughApiAdapterConfig(BaseModel):
type: Literal[AdapterType.passthrough_api.value] = AdapterType.passthrough_api.value
base_url: str = Field(..., description="The base URL for the llama stack provider")
headers: Dict[str, str] = Field(
default_factory=dict,
description="Headers (e.g., authorization) to send with the request",
)
@json_schema_type
class PythonImplAdapterConfig(BaseModel):
type: Literal[AdapterType.python_impl.value] = AdapterType.python_impl.value
pip_packages: List[str] = Field(
default_factory=list,
description="The pip dependencies needed for this implementation",
)
module: str = Field(..., description="The name of the module to import")
entrypoint: str = Field(
...,
description="The name of the entrypoint function which creates the implementation for the API",
)
kwargs: Dict[str, Any] = Field(
default_factory=dict, description="kwargs to pass to the entrypoint"
)
@json_schema_type
class NotImplementedAdapterConfig(BaseModel):
type: Literal[AdapterType.not_implemented.value] = AdapterType.not_implemented.value
# should we define very granular typed classes for each of the PythonImplAdapters we will have?
# e.g., OllamaInference / vLLMInference / etc. might need very specific parameters
AdapterConfig = Annotated[
Union[
PassthroughApiAdapterConfig,
NotImplementedAdapterConfig,
PythonImplAdapterConfig,
],
Field(discriminator="type"),
]
class DistributionConfig(BaseModel):
inference: AdapterConfig
safety: AdapterConfig
# configs for each API that the stack provides, e.g.
# agentic_system: AdapterConfig
# post_training: AdapterConfig
class DistributionConfigDefaults(BaseModel):
inference: Dict[str, Any] = Field(
default_factory=dict, description="Default kwargs for the inference adapter"
)
safety: Dict[str, Any] = Field(
default_factory=dict, description="Default kwargs for the safety adapter"
)
class Distribution(BaseModel):
name: str
description: str
@ -17,3 +89,5 @@ class LlamaStackDistribution(BaseModel):
# later, we may have a docker image be the main artifact of
# a distribution.
pip_packages: List[str]
config_defaults: DistributionConfigDefaults

View file

@ -0,0 +1,64 @@
#!/bin/bash
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
set -euo pipefail
error_handler() {
echo "Error occurred in script at line: ${1}" >&2
exit 1
}
# Set up the error trap
trap 'error_handler ${LINENO}' ERR
ensure_conda_env_python310() {
local env_name="$1"
local pip_dependencies="$2"
local python_version="3.10"
# Check if conda command is available
if ! command -v conda &>/dev/null; then
echo "Error: conda command not found. Is Conda installed and in your PATH?" >&2
exit 1
fi
# Check if the environment exists
if conda env list | grep -q "^${env_name} "; then
echo "Conda environment '${env_name}' exists. Checking Python version..."
# Check Python version in the environment
current_version=$(conda run -n "${env_name}" python --version 2>&1 | cut -d' ' -f2 | cut -d'.' -f1,2)
if [ "$current_version" = "$python_version" ]; then
echo "Environment '${env_name}' already has Python ${python_version}. No action needed."
else
echo "Updating environment '${env_name}' to Python ${python_version}..."
conda install -n "${env_name}" python="${python_version}" -y
fi
else
echo "Conda environment '${env_name}' does not exist. Creating with Python ${python_version}..."
conda create -n "${env_name}" python="${python_version}" -y
fi
# Install pip dependencies
if [ -n "$pip_dependencies" ]; then
echo "Installing pip dependencies: $pip_dependencies"
conda run -n "${env_name}" pip install $pip_dependencies
fi
}
if [ "$#" -ne 2 ]; then
echo "Usage: $0 <environment_name> <pip_dependencies>" >&2
echo "Example: $0 my_env 'numpy pandas scipy'" >&2
exit 1
fi
env_name="$1"
pip_dependencies="$2"
ensure_conda_env_python310 "$env_name" "$pip_dependencies"

View file

@ -1,5 +0,0 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.

View file

@ -1,5 +0,0 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.

View file

@ -6,19 +6,30 @@
from typing import List
from .datatypes import LlamaStackDistribution
from .datatypes import Distribution, DistributionConfigDefaults
def all_registered_distributions() -> List[LlamaStackDistribution]:
def all_registered_distributions() -> List[Distribution]:
return [
LlamaStackDistribution(
Distribution(
name="local-source",
description="Use code within `llama_toolchain` itself to run model inference and everything on top",
description="Use code from `llama_toolchain` itself to serve all llama stack APIs",
pip_packages=[],
config_defaults=DistributionConfigDefaults(
inference={
"max_seq_len": 4096,
"max_batch_size": 1,
},
safety={},
),
),
LlamaStackDistribution(
Distribution(
name="local-ollama",
description="Like local-source, but use ollama for running LLM inference",
pip_packages=[],
pip_packages=["ollama"],
config_defaults=DistributionConfigDefaults(
inference={},
safety={},
),
),
]