mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-29 15:23:51 +00:00
api build works for conda now
This commit is contained in:
parent
c4fe72c3a3
commit
2076d2b6db
5 changed files with 28 additions and 15 deletions
|
@ -5,6 +5,7 @@
|
||||||
# the root directory of this source tree.
|
# the root directory of this source tree.
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
|
@ -137,6 +138,7 @@ class ApiBuild(Subcommand):
|
||||||
def _run_api_build_command(self, args: argparse.Namespace) -> None:
|
def _run_api_build_command(self, args: argparse.Namespace) -> None:
|
||||||
from llama_toolchain.common.exec import run_with_pty
|
from llama_toolchain.common.exec import run_with_pty
|
||||||
from llama_toolchain.distribution.distribution import api_providers
|
from llama_toolchain.distribution.distribution import api_providers
|
||||||
|
from llama_toolchain.common.serialize import EnumEncoder
|
||||||
|
|
||||||
os.makedirs(BUILDS_BASE_DIR, exist_ok=True)
|
os.makedirs(BUILDS_BASE_DIR, exist_ok=True)
|
||||||
all_providers = api_providers()
|
all_providers = api_providers()
|
||||||
|
@ -174,7 +176,7 @@ class ApiBuild(Subcommand):
|
||||||
}
|
}
|
||||||
with open(package_file, "w") as f:
|
with open(package_file, "w") as f:
|
||||||
c = PackageConfig(
|
c = PackageConfig(
|
||||||
built_at=datetime.now(),
|
built_at=str(datetime.now()),
|
||||||
package_name=package_name,
|
package_name=package_name,
|
||||||
docker_image=(
|
docker_image=(
|
||||||
package_name if args.type == BuildType.container.value else None
|
package_name if args.type == BuildType.container.value else None
|
||||||
|
@ -184,7 +186,8 @@ class ApiBuild(Subcommand):
|
||||||
),
|
),
|
||||||
providers=stub_config,
|
providers=stub_config,
|
||||||
)
|
)
|
||||||
f.write(yaml.dump(c.dict(), sort_keys=False))
|
to_write = json.loads(json.dumps(c.dict(), cls=EnumEncoder))
|
||||||
|
f.write(yaml.dump(to_write, sort_keys=False))
|
||||||
|
|
||||||
if args.type == BuildType.container.value:
|
if args.type == BuildType.container.value:
|
||||||
script = pkg_resources.resource_filename(
|
script = pkg_resources.resource_filename(
|
||||||
|
@ -209,10 +212,14 @@ class ApiBuild(Subcommand):
|
||||||
]
|
]
|
||||||
|
|
||||||
return_code = run_with_pty(args)
|
return_code = run_with_pty(args)
|
||||||
assert return_code == 0, cprint(
|
if return_code != 0:
|
||||||
f"Failed to build target {package_name}", color="red"
|
cprint(
|
||||||
)
|
f"Failed to build target {package_name} with return code {return_code}",
|
||||||
|
color="red",
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
cprint(
|
cprint(
|
||||||
f"Target `{target_name}` built with configuration at {str(package_file)}",
|
f"Target `{package_name}` built with configuration at {str(package_file)}",
|
||||||
color="green",
|
color="green",
|
||||||
)
|
)
|
||||||
|
|
|
@ -81,13 +81,12 @@ def configure_llama_provider(config_file: Path) -> None:
|
||||||
provider_spec = providers[provider_id]
|
provider_spec = providers[provider_id]
|
||||||
cprint(f"Configuring API surface: {api}", "white", attrs=["bold"])
|
cprint(f"Configuring API surface: {api}", "white", attrs=["bold"])
|
||||||
config_type = instantiate_class_type(provider_spec.config_class)
|
config_type = instantiate_class_type(provider_spec.config_class)
|
||||||
print(f"Config type: {config_type}")
|
|
||||||
provider_config = prompt_for_config(
|
provider_config = prompt_for_config(
|
||||||
config_type,
|
config_type,
|
||||||
)
|
)
|
||||||
print("")
|
print("")
|
||||||
|
|
||||||
provider_configs[api.value] = {
|
provider_configs[api] = {
|
||||||
"provider_id": provider_id,
|
"provider_id": provider_id,
|
||||||
**provider_config.dict(),
|
**provider_config.dict(),
|
||||||
}
|
}
|
||||||
|
@ -97,4 +96,4 @@ def configure_llama_provider(config_file: Path) -> None:
|
||||||
to_write = json.loads(json.dumps(config.dict(), cls=EnumEncoder))
|
to_write = json.loads(json.dumps(config.dict(), cls=EnumEncoder))
|
||||||
fp.write(yaml.dump(to_write, sort_keys=False))
|
fp.write(yaml.dump(to_write, sort_keys=False))
|
||||||
|
|
||||||
print(f"YAML configuration has been written to {config_path}")
|
print(f"YAML configuration has been written to {config_file}")
|
||||||
|
|
|
@ -71,7 +71,6 @@ def prompt_for_config(
|
||||||
"""
|
"""
|
||||||
config_data = {}
|
config_data = {}
|
||||||
|
|
||||||
print(f"Configuring {config_type.__name__}:")
|
|
||||||
for field_name, field in config_type.__fields__.items():
|
for field_name, field in config_type.__fields__.items():
|
||||||
field_type = field.annotation
|
field_type = field.annotation
|
||||||
|
|
||||||
|
@ -86,7 +85,6 @@ def prompt_for_config(
|
||||||
if not isinstance(field.default, PydanticUndefinedType)
|
if not isinstance(field.default, PydanticUndefinedType)
|
||||||
else None
|
else None
|
||||||
)
|
)
|
||||||
print(f" {field_name}: {field_type} (default: {default_value})")
|
|
||||||
is_required = field.is_required
|
is_required = field.is_required
|
||||||
|
|
||||||
# Skip fields with Literal type
|
# Skip fields with Literal type
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
# the root directory of this source tree.
|
# the root directory of this source tree.
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
from datetime import datetime
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
|
@ -12,4 +13,6 @@ class EnumEncoder(json.JSONEncoder):
|
||||||
def default(self, obj):
|
def default(self, obj):
|
||||||
if isinstance(obj, Enum):
|
if isinstance(obj, Enum):
|
||||||
return obj.value
|
return obj.value
|
||||||
|
elif isinstance(obj, datetime):
|
||||||
|
return obj.isoformat()
|
||||||
return super().default(obj)
|
return super().default(obj)
|
||||||
|
|
|
@ -10,7 +10,13 @@ LLAMA_MODELS_DIR=${LLAMA_MODELS_DIR:-}
|
||||||
LLAMA_TOOLCHAIN_DIR=${LLAMA_TOOLCHAIN_DIR:-}
|
LLAMA_TOOLCHAIN_DIR=${LLAMA_TOOLCHAIN_DIR:-}
|
||||||
TEST_PYPI_VERSION=${TEST_PYPI_VERSION:-}
|
TEST_PYPI_VERSION=${TEST_PYPI_VERSION:-}
|
||||||
|
|
||||||
echo "llama-toolchain-dir=$LLAMA_TOOLCHAIN_DIR"
|
if [ -n "$LLAMA_TOOLCHAIN_DIR" ]; then
|
||||||
|
echo "Using llama-toolchain-dir=$LLAMA_TOOLCHAIN_DIR"
|
||||||
|
fi
|
||||||
|
if [ -n "$LLAMA_MODELS_DIR" ]; then
|
||||||
|
echo "Using llama-models-dir=$LLAMA_MODELS_DIR"
|
||||||
|
fi
|
||||||
|
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
if [ "$#" -ne 3 ]; then
|
if [ "$#" -ne 3 ]; then
|
||||||
|
@ -82,9 +88,9 @@ ensure_conda_env_python310() {
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "Installing from LLAMA_TOOLCHAIN_DIR: $LLAMA_TOOLCHAIN_DIR"
|
echo "Installing from LLAMA_TOOLCHAIN_DIR: $LLAMA_TOOLCHAIN_DIR"
|
||||||
pip install -e "$LLAMA_TOOLCHAIN_DIR"
|
pip install --no-cache-dir -e "$LLAMA_TOOLCHAIN_DIR"
|
||||||
else
|
else
|
||||||
pip install llama-toolchain
|
pip install --no-cache-dir llama-toolchain
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -n "$LLAMA_MODELS_DIR" ]; then
|
if [ -n "$LLAMA_MODELS_DIR" ]; then
|
||||||
|
@ -95,7 +101,7 @@ ensure_conda_env_python310() {
|
||||||
|
|
||||||
echo "Installing from LLAMA_MODELS_DIR: $LLAMA_MODELS_DIR"
|
echo "Installing from LLAMA_MODELS_DIR: $LLAMA_MODELS_DIR"
|
||||||
pip uninstall -y llama-models
|
pip uninstall -y llama-models
|
||||||
pip install -e "$LLAMA_MODELS_DIR"
|
pip install --no-cache-dir -e "$LLAMA_MODELS_DIR"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Install pip dependencies
|
# Install pip dependencies
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue