mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-31 11:43:55 +00:00
chore: mypy for remote::ollama
Signed-off-by: Ihar Hrachyshka <ihar.hrachyshka@gmail.com>
This commit is contained in:
parent
3e6c47ce10
commit
d95b92571b
3 changed files with 28 additions and 19 deletions
|
|
@ -300,7 +300,7 @@ def process_chat_completion_response(
|
|||
|
||||
async def process_completion_stream_response(
|
||||
stream: AsyncGenerator[OpenAICompatCompletionResponse, None],
|
||||
) -> AsyncGenerator:
|
||||
) -> AsyncGenerator[CompletionResponseStreamChunk, None]:
|
||||
stop_reason = None
|
||||
|
||||
async for chunk in stream:
|
||||
|
|
@ -337,7 +337,7 @@ async def process_completion_stream_response(
|
|||
async def process_chat_completion_stream_response(
|
||||
stream: AsyncGenerator[OpenAICompatCompletionResponse, None],
|
||||
request: ChatCompletionRequest,
|
||||
) -> AsyncGenerator:
|
||||
) -> AsyncGenerator[ChatCompletionResponseStreamChunk]:
|
||||
yield ChatCompletionResponseStreamChunk(
|
||||
event=ChatCompletionResponseEvent(
|
||||
event_type=ChatCompletionResponseEventType.start,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue