mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-18 01:37:15 +00:00
chore(pre-commit): add pre-commit hook to enforce llama_stack logger usage
Signed-off-by: Mustafa Elbehery <melbeher@redhat.com>
This commit is contained in:
parent
5f6d5072b6
commit
8d85f8a8f6
57 changed files with 148 additions and 122 deletions
|
|
@ -5,7 +5,6 @@
|
|||
# the root directory of this source tree.
|
||||
|
||||
from collections.abc import Collection, Iterator, Sequence, Set
|
||||
from logging import getLogger
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
Literal,
|
||||
|
|
@ -14,11 +13,9 @@ from typing import (
|
|||
|
||||
import tiktoken
|
||||
|
||||
from llama_stack.log import get_logger
|
||||
from llama_stack.models.llama.tokenizer_utils import load_bpe_file
|
||||
|
||||
logger = getLogger(__name__)
|
||||
|
||||
|
||||
# The tiktoken tokenizer can handle <=400k chars without
|
||||
# pyo3_runtime.PanicException.
|
||||
TIKTOKEN_MAX_ENCODE_CHARS = 400_000
|
||||
|
|
@ -101,6 +98,8 @@ BASIC_SPECIAL_TOKENS = [
|
|||
"<|fim_suffix|>",
|
||||
]
|
||||
|
||||
logger = get_logger(name=__name__, category="models::llama")
|
||||
|
||||
|
||||
class Tokenizer:
|
||||
"""
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue