mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-07 11:08:20 +00:00
pre-commit rerun
This commit is contained in:
parent
af20652677
commit
69c8a4fe40
1 changed files with 7 additions and 7 deletions
|
@ -162,13 +162,13 @@ class PassthroughInferenceAdapter(Inference):
|
||||||
response = await client.inference.chat_completion(**json_params)
|
response = await client.inference.chat_completion(**json_params)
|
||||||
|
|
||||||
return ChatCompletionResponse(
|
return ChatCompletionResponse(
|
||||||
completion_message=CompletionMessage(
|
completion_message=CompletionMessage(
|
||||||
content=response.completion_message.content.text,
|
content=response.completion_message.content.text,
|
||||||
stop_reason=response.completion_message.stop_reason,
|
stop_reason=response.completion_message.stop_reason,
|
||||||
tool_calls=response.completion_message.tool_calls,
|
tool_calls=response.completion_message.tool_calls,
|
||||||
),
|
),
|
||||||
logprobs=response.logprobs,
|
logprobs=response.logprobs,
|
||||||
)
|
)
|
||||||
|
|
||||||
async def _stream_chat_completion(self, json_params: Dict[str, Any]) -> AsyncGenerator:
|
async def _stream_chat_completion(self, json_params: Dict[str, Any]) -> AsyncGenerator:
|
||||||
client = self._get_client()
|
client = self._get_client()
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue