mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-09 11:20:58 +00:00
Convert TGI to work with openai_compat
This commit is contained in:
parent
05e73d12b3
commit
ed899a5dec
6 changed files with 133 additions and 338 deletions
|
|
@ -48,10 +48,6 @@ class TogetherInferenceAdapter(
|
|||
self.config = config
|
||||
self.formatter = ChatFormat(Tokenizer.get_instance())
|
||||
|
||||
@property
|
||||
def client(self) -> Together:
|
||||
return Together(api_key=self.config.api_key)
|
||||
|
||||
async def initialize(self) -> None:
|
||||
return
|
||||
|
||||
|
|
@ -91,7 +87,6 @@ class TogetherInferenceAdapter(
|
|||
together_api_key = provider_data.together_api_key
|
||||
|
||||
client = Together(api_key=together_api_key)
|
||||
# wrapper request to make it easier to pass around (internal only, not exposed to API)
|
||||
request = ChatCompletionRequest(
|
||||
model=model,
|
||||
messages=messages,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue