mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-03 09:21:45 +00:00
Fix stream generate
This commit is contained in:
parent
9c83ca415d
commit
1a5cfd1b6f
1 changed files with 5 additions and 5 deletions
|
@ -116,12 +116,12 @@ class LMStudioClient:
|
||||||
delta=TextDelta(text=chunk.content),
|
delta=TextDelta(text=chunk.content),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
yield ChatCompletionResponseStreamChunk(
|
yield ChatCompletionResponseStreamChunk(
|
||||||
event=ChatCompletionResponseEvent(
|
event=ChatCompletionResponseEvent(
|
||||||
event_type=ChatCompletionResponseEventType.complete,
|
event_type=ChatCompletionResponseEventType.complete,
|
||||||
delta=TextDelta(text=""),
|
delta=TextDelta(text=""),
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
)
|
||||||
|
|
||||||
return stream_generator()
|
return stream_generator()
|
||||||
else:
|
else:
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue