mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-17 13:12:36 +00:00
22 lines
838 B
Python
22 lines
838 B
Python
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
|
# All rights reserved.
|
|
#
|
|
# This source code is licensed under the terms described in the LICENSE file in
|
|
# the root directory of this source tree.
|
|
|
|
|
|
from llama_stack.distributions.template import BuildProvider, DistributionTemplate
|
|
|
|
from ..starter.starter import get_distribution_template as get_starter_distribution_template
|
|
|
|
|
|
def get_distribution_template() -> DistributionTemplate:
|
|
template = get_starter_distribution_template()
|
|
name = "starter-gpu"
|
|
template.name = name
|
|
template.description = "Quick start template for running Llama Stack with several popular providers. This distribution is intended for GPU-enabled environments."
|
|
|
|
template.providers["post_training"] = [
|
|
BuildProvider(provider_type="inline::torchtune-gpu"),
|
|
]
|
|
return template
|