mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-23 04:49:40 +00:00
chore(api): add mypy coverage to interface
Signed-off-by: Mustafa Elbehery <melbeher@redhat.com>
This commit is contained in:
parent
51b179e1c5
commit
c67097ffeb
3 changed files with 11 additions and 10 deletions
|
|
@ -171,7 +171,7 @@ class Tokenizer:
|
|||
str: The decoded string.
|
||||
"""
|
||||
# Typecast is safe here. Tiktoken doesn't do anything list-related with the sequence.
|
||||
return self.model.decode(cast(list[int], t))
|
||||
return cast(str, self.model.decode(cast(list[int], t)))
|
||||
|
||||
@staticmethod
|
||||
def _split_whitespaces_or_nonwhitespaces(s: str, max_consecutive_slice_len: int) -> Iterator[str]:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue