mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-29 07:14:20 +00:00
Improve TGI adapter initialization condition
This commit is contained in:
parent
2ac8e7b901
commit
0964b0a74a
2 changed files with 2 additions and 4 deletions
|
@ -11,7 +11,7 @@ from .tgi import InferenceEndpointAdapter, TGIAdapter
|
|||
async def get_adapter_impl(config: TGIImplConfig, _deps):
|
||||
assert isinstance(config, TGIImplConfig), f"Unexpected config type: {type(config)}"
|
||||
|
||||
if config.is_local_tgi():
|
||||
if config.url is not None:
|
||||
impl = TGIAdapter(config)
|
||||
elif config.is_inference_endpoint():
|
||||
impl = InferenceEndpointAdapter(config)
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
from typing import Optional
|
||||
|
||||
from huggingface_hub import HfApi
|
||||
|
||||
from llama_models.schema_utils import json_schema_type
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
@ -31,6 +32,3 @@ class TGIImplConfig(BaseModel):
|
|||
|
||||
def get_namespace(self) -> str:
|
||||
return HfApi().whoami()["name"]
|
||||
|
||||
def is_local_tgi(self) -> bool:
|
||||
return self.url is not None and self.url.startswith("http://localhost")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue