Kill llama stack configure (#371)

* remove configure

* build msg

* wip

* build->run

* delete prints

* docs

* fix docs, kill configure

* precommit

* update fireworks build

* docs

* clean up build

* comments

* fix

* test

* remove baking build.yaml into docker

* fix msg, urls

* configure msg
This commit is contained in:
Xi Yan 2024-11-06 13:32:10 -08:00 committed by GitHub
parent d289afdbde
commit 748606195b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 248 additions and 401 deletions

View file

@ -12,9 +12,14 @@ from pydantic import BaseModel, Field
@json_schema_type
class TGIImplConfig(BaseModel):
url: str = Field(
description="The URL for the TGI endpoint (e.g. 'http://localhost:8080')",
)
host: str = "localhost"
port: int = 8080
protocol: str = "http"
@property
def url(self) -> str:
return f"{self.protocol}://{self.host}:{self.port}"
api_token: Optional[str] = Field(
default=None,
description="A bearer token if your TGI endpoint is protected.",