mirror of
https://github.com/meta-llama/llama-stack.git
synced 2026-01-07 09:49:05 +00:00
fix params
This commit is contained in:
parent
da373471e4
commit
3a6febdc0f
1 changed files with 0 additions and 2 deletions
|
|
@ -177,10 +177,8 @@ class TogetherInferenceAdapter(ModelRegistryHelper, Inference, NeedsRequestProvi
|
||||||
)
|
)
|
||||||
|
|
||||||
if stream:
|
if stream:
|
||||||
print("STREAMING", request)
|
|
||||||
return self._stream_chat_completion(request)
|
return self._stream_chat_completion(request)
|
||||||
else:
|
else:
|
||||||
print("NON STREAMING", request)
|
|
||||||
return await self._nonstream_chat_completion(request)
|
return await self._nonstream_chat_completion(request)
|
||||||
|
|
||||||
async def _nonstream_chat_completion(self, request: ChatCompletionRequest) -> ChatCompletionResponse:
|
async def _nonstream_chat_completion(self, request: ChatCompletionRequest) -> ChatCompletionResponse:
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue