mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-11 19:56:03 +00:00
Remove unnecessary type ignore using Sequence covariance
This commit is contained in:
parent
ec1bae78e6
commit
6fb7b9521f
1 changed files with 1 additions and 2 deletions
|
|
@ -72,8 +72,7 @@ async def convert_chat_choice_to_response_message(
|
|||
|
||||
return OpenAIResponseMessage(
|
||||
id=message_id or f"msg_{uuid.uuid4()}",
|
||||
# List invariance: annotations is list of specific type, but parameter expects union
|
||||
content=[OpenAIResponseOutputMessageContentOutputText(text=clean_text, annotations=list(annotations))], # type: ignore[arg-type]
|
||||
content=[OpenAIResponseOutputMessageContentOutputText(text=clean_text, annotations=list(annotations))],
|
||||
status="completed",
|
||||
role="assistant",
|
||||
)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue