mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-06 20:44:58 +00:00
models api from models config
This commit is contained in:
parent
54cd9ded80
commit
47be4c7222
2 changed files with 20 additions and 30 deletions
|
@ -18,16 +18,6 @@ from termcolor import cprint
|
|||
|
||||
from .config import BuiltinImplConfig
|
||||
|
||||
DUMMY_MODELS_SPEC_1 = ModelSpec(
|
||||
llama_model_metadata=resolve_model("Llama-Guard-3-8B"),
|
||||
providers_spec={"safety": {"provider_type": "meta-reference"}},
|
||||
)
|
||||
|
||||
DUMMY_MODELS_SPEC_2 = ModelSpec(
|
||||
llama_model_metadata=resolve_model("Meta-Llama3.1-8B-Instruct"),
|
||||
providers_spec={"inference": {"provider_type": "meta-reference"}},
|
||||
)
|
||||
|
||||
|
||||
class BuiltinModelsImpl(Models):
|
||||
def __init__(
|
||||
|
@ -35,19 +25,21 @@ class BuiltinModelsImpl(Models):
|
|||
config: BuiltinImplConfig,
|
||||
) -> None:
|
||||
self.config = config
|
||||
|
||||
self.models = {
|
||||
x.llama_model_metadata.core_model_id.value: x
|
||||
for x in [DUMMY_MODELS_SPEC_1, DUMMY_MODELS_SPEC_2]
|
||||
}
|
||||
|
||||
cprint(self.config, "red")
|
||||
self.models = {
|
||||
entry.core_model_id: ModelSpec(
|
||||
llama_model_metadata=resolve_model(entry.core_model_id),
|
||||
provider_id=entry.provider_id,
|
||||
api=entry.api,
|
||||
provider_config=entry.config,
|
||||
)
|
||||
for entry in self.config.models_config
|
||||
}
|
||||
|
||||
async def initialize(self) -> None:
|
||||
pass
|
||||
|
||||
async def list_models(self) -> ModelsListResponse:
|
||||
print(self.config, "hihihi")
|
||||
return ModelsListResponse(models_list=list(self.models.values()))
|
||||
|
||||
async def get_model(self, core_model_id: str) -> ModelsGetResponse:
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue