mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-04 20:14:13 +00:00
More progress towards llama distribution install
This commit is contained in:
parent
5a583cf16e
commit
dac2b5a1ed
11 changed files with 298 additions and 75 deletions
|
@ -6,19 +6,30 @@
|
|||
|
||||
from typing import List
|
||||
|
||||
from .datatypes import LlamaStackDistribution
|
||||
from .datatypes import Distribution, DistributionConfigDefaults
|
||||
|
||||
|
||||
def all_registered_distributions() -> List[LlamaStackDistribution]:
|
||||
def all_registered_distributions() -> List[Distribution]:
|
||||
return [
|
||||
LlamaStackDistribution(
|
||||
Distribution(
|
||||
name="local-source",
|
||||
description="Use code within `llama_toolchain` itself to run model inference and everything on top",
|
||||
description="Use code from `llama_toolchain` itself to serve all llama stack APIs",
|
||||
pip_packages=[],
|
||||
config_defaults=DistributionConfigDefaults(
|
||||
inference={
|
||||
"max_seq_len": 4096,
|
||||
"max_batch_size": 1,
|
||||
},
|
||||
safety={},
|
||||
),
|
||||
),
|
||||
LlamaStackDistribution(
|
||||
Distribution(
|
||||
name="local-ollama",
|
||||
description="Like local-source, but use ollama for running LLM inference",
|
||||
pip_packages=[],
|
||||
pip_packages=["ollama"],
|
||||
config_defaults=DistributionConfigDefaults(
|
||||
inference={},
|
||||
safety={},
|
||||
),
|
||||
),
|
||||
]
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue