diff --git a/llama_stack/providers/remote/inference/lmstudio/_client.py b/llama_stack/providers/remote/inference/lmstudio/_client.py index 84bbd946f..fc7d626b1 100644 --- a/llama_stack/providers/remote/inference/lmstudio/_client.py +++ b/llama_stack/providers/remote/inference/lmstudio/_client.py @@ -116,12 +116,12 @@ class LMStudioClient: delta=TextDelta(text=chunk.content), ) ) - yield ChatCompletionResponseStreamChunk( - event=ChatCompletionResponseEvent( - event_type=ChatCompletionResponseEventType.complete, - delta=TextDelta(text=""), - ) + yield ChatCompletionResponseStreamChunk( + event=ChatCompletionResponseEvent( + event_type=ChatCompletionResponseEventType.complete, + delta=TextDelta(text=""), ) + ) return stream_generator() else: