mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-07-27 06:28:50 +00:00
fix: logger usage
Signed-off-by: Mustafa Elbehery <melbeher@redhat.com>
This commit is contained in:
parent
313d0d809b
commit
5115801835
49 changed files with 118 additions and 98 deletions
|
@ -4,7 +4,6 @@
|
|||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
|
||||
import logging
|
||||
import os
|
||||
from collections.abc import Callable
|
||||
|
||||
|
@ -13,11 +12,13 @@ from fairscale.nn.model_parallel.initialize import get_model_parallel_rank
|
|||
from torch import Tensor, nn
|
||||
from torch.nn import functional as F
|
||||
|
||||
from llama_stack.log import get_logger
|
||||
|
||||
from ...datatypes import QuantizationMode
|
||||
from ..model import Transformer, TransformerBlock
|
||||
from ..moe import MoE
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
log = get_logger(name=__name__, category="core")
|
||||
|
||||
|
||||
def swiglu_wrapper_no_reduce(
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
# the root directory of this source tree.
|
||||
|
||||
from collections.abc import Collection, Iterator, Sequence, Set
|
||||
from logging import getLogger
|
||||
from pathlib import Path
|
||||
from typing import (
|
||||
Literal,
|
||||
|
@ -14,9 +13,10 @@ from typing import (
|
|||
|
||||
import tiktoken
|
||||
|
||||
from llama_stack.log import get_logger
|
||||
from llama_stack.models.llama.tokenizer_utils import load_bpe_file
|
||||
|
||||
logger = getLogger(__name__)
|
||||
logger = get_logger(name=__name__, category="core")
|
||||
|
||||
|
||||
# The tiktoken tokenizer can handle <=400k chars without
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue