mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-05 18:22:41 +00:00
refine
This commit is contained in:
parent
a3b1c3438b
commit
4c2b05c6b2
1 changed files with 0 additions and 18 deletions
|
@ -14,24 +14,6 @@ from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
import torch
|
import torch
|
||||||
from llama_models.sku_list import resolve_model
|
from llama_models.sku_list import resolve_model
|
||||||
from torch import nn
|
|
||||||
from torch.optim import Optimizer
|
|
||||||
from torch.utils.data import DataLoader, DistributedSampler
|
|
||||||
from torchtune import modules, training, utils as torchtune_utils
|
|
||||||
from torchtune.data import padded_collate_sft
|
|
||||||
|
|
||||||
from torchtune.modules.loss import CEWithChunkedOutputLoss
|
|
||||||
from torchtune.modules.peft import (
|
|
||||||
get_adapter_params,
|
|
||||||
get_adapter_state_dict,
|
|
||||||
get_lora_module_names,
|
|
||||||
get_merged_lora_ckpt,
|
|
||||||
set_trainable_params,
|
|
||||||
validate_missing_and_unexpected_for_lora,
|
|
||||||
)
|
|
||||||
from torchtune.training.lr_schedulers import get_cosine_schedule_with_warmup
|
|
||||||
from torchtune.training.metric_logging import DiskLogger
|
|
||||||
from tqdm import tqdm
|
|
||||||
|
|
||||||
from llama_stack.apis.common.training_types import PostTrainingMetric
|
from llama_stack.apis.common.training_types import PostTrainingMetric
|
||||||
from llama_stack.apis.datasetio import DatasetIO
|
from llama_stack.apis.datasetio import DatasetIO
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue