From 44069404fd40159f39c17f09159bb46f2694ea5e Mon Sep 17 00:00:00 2001 From: Xi Yan Date: Wed, 15 Jan 2025 16:50:58 -0800 Subject: [PATCH] stream completion log probs --- llama_stack/providers/utils/inference/openai_compat.py | 1 + 1 file changed, 1 insertion(+) diff --git a/llama_stack/providers/utils/inference/openai_compat.py b/llama_stack/providers/utils/inference/openai_compat.py index 88aca1a1c..0cad78df9 100644 --- a/llama_stack/providers/utils/inference/openai_compat.py +++ b/llama_stack/providers/utils/inference/openai_compat.py @@ -178,6 +178,7 @@ async def process_completion_stream_response( yield CompletionResponseStreamChunk( delta=text, stop_reason=stop_reason, + logprobs=convert_openai_completion_logprobs(choice.logprobs), ) if finish_reason: if finish_reason in ["stop", "eos", "eos_token"]: