mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-04 04:04:14 +00:00
chore: enable mypy type checking for sentence transformers
Fix method signature incompatibilities with InferenceProvider protocol: - Update completion() content parameter type to InterleavedContent - Reorder chat_completion() parameters to match protocol - Add type ignores for mixin inheritance conflicts Signed-off-by: Mustafa Elbehery <melbeher@redhat.com>
This commit is contained in:
parent
ed418653ec
commit
821d09af3d
3 changed files with 6 additions and 6 deletions
|
@ -268,7 +268,6 @@ exclude = [
|
|||
"^llama_stack/models/llama/llama3/multimodal/model\\.py$",
|
||||
"^llama_stack/models/llama/llama4/",
|
||||
"^llama_stack/providers/inline/inference/meta_reference/quantization/fp8_impls\\.py$",
|
||||
"^llama_stack/providers/inline/inference/sentence_transformers/sentence_transformers\\.py$",
|
||||
"^llama_stack/providers/inline/post_training/common/validator\\.py$",
|
||||
"^llama_stack/providers/inline/safety/code_scanner/",
|
||||
"^llama_stack/providers/inline/safety/llama_guard/",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue