mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-16 23:03:49 +00:00
clean up build
This commit is contained in:
parent
5161a8ef8c
commit
676b07e91e
3 changed files with 10 additions and 20 deletions
|
@ -12,10 +12,13 @@ from pydantic import BaseModel, Field
|
|||
|
||||
@json_schema_type
|
||||
class TGIImplConfig(BaseModel):
|
||||
url: str = Field(
|
||||
description="The URL for the TGI endpoint (e.g. 'http://localhost:8080')",
|
||||
default="http://localhost:8080",
|
||||
)
|
||||
host: str = "localhost"
|
||||
port: int = 8080
|
||||
|
||||
@property
|
||||
def url(self) -> str:
|
||||
return f"http://{self.host}:{self.port}"
|
||||
|
||||
api_token: Optional[str] = Field(
|
||||
default=None,
|
||||
description="A bearer token if your TGI endpoint is protected.",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue