mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-29 15:23:51 +00:00
wip models
This commit is contained in:
parent
24dbe448a3
commit
ce17f20468
2 changed files with 2 additions and 5 deletions
|
@ -59,8 +59,3 @@ class Models(Protocol):
|
|||
|
||||
@webmethod(route="/models/get", method="POST")
|
||||
async def get_model(self, model_id: str) -> ModelsGetResponse: ...
|
||||
|
||||
@webmethod(route="/models/register")
|
||||
async def register_model(
|
||||
self, model_id: str, api: str, provider_spec: Dict[str, str]
|
||||
) -> ModelsRegisterResponse: ...
|
||||
|
|
|
@ -39,6 +39,8 @@ class MetaReferenceModelsImpl(Models):
|
|||
self.safety_api = safety_api
|
||||
|
||||
self.models_list = []
|
||||
model = get_model_id_from_api(self.inference_api)
|
||||
|
||||
# TODO, make the inference route provider and use router provider to do the lookup dynamically
|
||||
if isinstance(
|
||||
self.inference_api,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue