mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-04 12:07:34 +00:00
Improve TGI adapter initialization condition
This commit is contained in:
parent
2ac8e7b901
commit
0964b0a74a
2 changed files with 2 additions and 4 deletions
|
@ -7,6 +7,7 @@
|
|||
from typing import Optional
|
||||
|
||||
from huggingface_hub import HfApi
|
||||
|
||||
from llama_models.schema_utils import json_schema_type
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
@ -31,6 +32,3 @@ class TGIImplConfig(BaseModel):
|
|||
|
||||
def get_namespace(self) -> str:
|
||||
return HfApi().whoami()["name"]
|
||||
|
||||
def is_local_tgi(self) -> bool:
|
||||
return self.url is not None and self.url.startswith("http://localhost")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue