from models.llama3_1 --> from llama_models.llama3_1

This commit is contained in:
Hardik Shah 2024-07-21 19:07:02 -07:00
parent c6ef16f6bd
commit c64b8cba22
22 changed files with 29 additions and 27 deletions

View file

@ -6,7 +6,7 @@ from pydantic import BaseModel, Field
from strong_typing.schema import json_schema_type
from typing_extensions import Annotated
from models.llama3_1.api.datatypes import * # noqa: F403
from llama_models.llama3_1.api.datatypes import * # noqa: F403
class LogProbConfig(BaseModel):

View file

@ -16,11 +16,11 @@ from fairscale.nn.model_parallel.initialize import (
initialize_model_parallel,
model_parallel_is_initialized,
)
from models.llama3_1.api.args import ModelArgs
from models.llama3_1.api.chat_format import ChatFormat, ModelInput
from models.llama3_1.api.datatypes import Message
from models.llama3_1.api.model import Transformer
from models.llama3_1.api.tokenizer import Tokenizer
from llama_models.llama3_1.api.args import ModelArgs
from llama_models.llama3_1.api.chat_format import ChatFormat, ModelInput
from llama_models.llama3_1.api.datatypes import Message
from llama_models.llama3_1.api.model import Transformer
from llama_models.llama3_1.api.tokenizer import Tokenizer
from termcolor import cprint
from .api.config import CheckpointType, InlineImplConfig

View file

@ -1,6 +1,6 @@
from typing import AsyncGenerator
from models.llama3_1.api.datatypes import StopReason
from llama_models.llama3_1.api.datatypes import StopReason
from .api.config import (
CheckpointQuantizationFormat,

View file

@ -2,9 +2,9 @@ from dataclasses import dataclass
from functools import partial
from typing import Generator, List, Optional
from models.llama3_1.api.chat_format import ChatFormat
from models.llama3_1.api.datatypes import Message
from models.llama3_1.api.tokenizer import Tokenizer
from llama_models.llama3_1.api.chat_format import ChatFormat
from llama_models.llama3_1.api.datatypes import Message
from llama_models.llama3_1.api.tokenizer import Tokenizer
from .api.config import InlineImplConfig
from .generation import Llama

View file

@ -7,7 +7,7 @@ from typing import Optional
import torch
from fairscale.nn.model_parallel.mappings import reduce_from_model_parallel_region
from models.llama3_1.api.model import Transformer, TransformerBlock
from llama_models.llama3_1.api.model import Transformer, TransformerBlock
from termcolor import cprint