mirror of
https://github.com/meta-llama/llama-stack.git
synced 2026-01-03 09:52:17 +00:00
fix tgi
This commit is contained in:
parent
e88faa91e2
commit
5c6e1e9d1e
3 changed files with 10 additions and 5 deletions
|
|
@ -15,10 +15,10 @@ class TGIImplConfig(BaseModel):
|
|||
url: str = Field(
|
||||
description="The URL for the TGI serving endpoint",
|
||||
)
|
||||
api_token: Optional[SecretStr] = Field(
|
||||
default=None,
|
||||
description="A bearer token if your TGI endpoint is protected.",
|
||||
)
|
||||
# api_token: Optional[SecretStr] = Field(
|
||||
# default=None,
|
||||
# description="A bearer token if your TGI endpoint is protected.",
|
||||
# )
|
||||
|
||||
@classmethod
|
||||
def sample_run_config(cls, url: str = "${env.TGI_URL}", **kwargs):
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue