fix: logger usage

Signed-off-by: Mustafa Elbehery <melbeher@redhat.com>
This commit is contained in:
Mustafa Elbehery 2025-07-23 14:35:15 +02:00
parent 313d0d809b
commit 5115801835
49 changed files with 118 additions and 98 deletions

View file

@ -6,7 +6,6 @@
import gc
import json
import logging
import multiprocessing
import os
import signal
@ -17,6 +16,7 @@ from typing import Any
import psutil
from llama_stack.log import get_logger
from llama_stack.providers.inline.post_training.common.utils import evacuate_model_from_device
# Set tokenizer parallelism environment variable
@ -48,7 +48,7 @@ from llama_stack.apis.post_training import (
from ..config import HuggingFacePostTrainingConfig
logger = logging.getLogger(__name__)
logger = get_logger(name=__name__, category="core")
def get_gb(to_convert: int) -> str:

View file

@ -4,7 +4,6 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
import logging
import os
import time
from datetime import UTC, datetime
@ -45,6 +44,7 @@ from llama_stack.apis.post_training import (
)
from llama_stack.distribution.utils.config_dirs import DEFAULT_CHECKPOINT_DIR
from llama_stack.distribution.utils.model_utils import model_local_dir
from llama_stack.log import get_logger
from llama_stack.models.llama.sku_list import resolve_model
from llama_stack.providers.inline.post_training.common.utils import evacuate_model_from_device
from llama_stack.providers.inline.post_training.torchtune.common import utils
@ -56,7 +56,7 @@ from llama_stack.providers.inline.post_training.torchtune.config import (
)
from llama_stack.providers.inline.post_training.torchtune.datasets.sft import SFTDataset
log = logging.getLogger(__name__)
log = get_logger(name=__name__, category="core")
from torchtune.models.llama3._tokenizer import Llama3Tokenizer