mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-11 04:28:02 +00:00
fix lint
Signed-off-by: Yuan Tang <terrytangyuan@gmail.com>
This commit is contained in:
parent
bfb842ed4d
commit
b1b1dbc41e
7 changed files with 5 additions and 7 deletions
|
@ -13,8 +13,8 @@ from typing import (
|
||||||
Literal,
|
Literal,
|
||||||
Optional,
|
Optional,
|
||||||
Protocol,
|
Protocol,
|
||||||
runtime_checkable,
|
|
||||||
Union,
|
Union,
|
||||||
|
runtime_checkable,
|
||||||
)
|
)
|
||||||
|
|
||||||
from llama_models.llama3.api.datatypes import Primitive
|
from llama_models.llama3.api.datatypes import Primitive
|
||||||
|
|
|
@ -8,7 +8,6 @@ from typing import Dict
|
||||||
|
|
||||||
from llama_stack.providers.datatypes import Api, ProviderSpec
|
from llama_stack.providers.datatypes import Api, ProviderSpec
|
||||||
|
|
||||||
|
|
||||||
from .config import FaissVectorIOConfig
|
from .config import FaissVectorIOConfig
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,6 @@ from llama_stack.providers.utils.memory.vector_store import (
|
||||||
VectorDBWithIndex,
|
VectorDBWithIndex,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
from .config import QdrantVectorIOConfig
|
from .config import QdrantVectorIOConfig
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
|
@ -8,7 +8,7 @@ from typing import Dict
|
||||||
|
|
||||||
from llama_stack.providers.datatypes import Api, ProviderSpec
|
from llama_stack.providers.datatypes import Api, ProviderSpec
|
||||||
|
|
||||||
from .config import WeaviateVectorIOConfig, WeaviateRequestProviderData # noqa: F401
|
from .config import WeaviateRequestProviderData, WeaviateVectorIOConfig # noqa: F401
|
||||||
|
|
||||||
|
|
||||||
async def get_adapter_impl(config: WeaviateVectorIOConfig, deps: Dict[Api, ProviderSpec]):
|
async def get_adapter_impl(config: WeaviateVectorIOConfig, deps: Dict[Api, ProviderSpec]):
|
||||||
|
|
|
@ -23,7 +23,7 @@ from llama_stack.providers.utils.memory.vector_store import (
|
||||||
VectorDBWithIndex,
|
VectorDBWithIndex,
|
||||||
)
|
)
|
||||||
|
|
||||||
from .config import WeaviateVectorIOConfig, WeaviateRequestProviderData
|
from .config import WeaviateRequestProviderData, WeaviateVectorIOConfig
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -12,9 +12,9 @@ import pytest_asyncio
|
||||||
|
|
||||||
from llama_stack.apis.models import ModelInput, ModelType
|
from llama_stack.apis.models import ModelInput, ModelType
|
||||||
from llama_stack.distribution.datatypes import Api, Provider
|
from llama_stack.distribution.datatypes import Api, Provider
|
||||||
from llama_stack.providers.inline.vector_io.sqlite_vec import SQLiteVectorIOConfig
|
|
||||||
from llama_stack.providers.inline.vector_io.chroma import ChromaVectorIOConfig as InlineChromaVectorIOConfig
|
from llama_stack.providers.inline.vector_io.chroma import ChromaVectorIOConfig as InlineChromaVectorIOConfig
|
||||||
from llama_stack.providers.inline.vector_io.faiss import FaissVectorIOConfig
|
from llama_stack.providers.inline.vector_io.faiss import FaissVectorIOConfig
|
||||||
|
from llama_stack.providers.inline.vector_io.sqlite_vec import SQLiteVectorIOConfig
|
||||||
from llama_stack.providers.remote.vector_io.chroma import ChromaVectorIOConfig
|
from llama_stack.providers.remote.vector_io.chroma import ChromaVectorIOConfig
|
||||||
from llama_stack.providers.remote.vector_io.pgvector import PGVectorVectorIOConfig
|
from llama_stack.providers.remote.vector_io.pgvector import PGVectorVectorIOConfig
|
||||||
from llama_stack.providers.remote.vector_io.weaviate import WeaviateVectorIOConfig
|
from llama_stack.providers.remote.vector_io.weaviate import WeaviateVectorIOConfig
|
||||||
|
|
|
@ -16,8 +16,8 @@ from llama_stack.distribution.datatypes import (
|
||||||
from llama_stack.providers.inline.inference.sentence_transformers import (
|
from llama_stack.providers.inline.inference.sentence_transformers import (
|
||||||
SentenceTransformersInferenceConfig,
|
SentenceTransformersInferenceConfig,
|
||||||
)
|
)
|
||||||
from llama_stack.providers.inline.vector_io.sqlite_vec.config import SQLiteVectorIOConfig
|
|
||||||
from llama_stack.providers.inline.vector_io.faiss.config import FaissVectorIOConfig
|
from llama_stack.providers.inline.vector_io.faiss.config import FaissVectorIOConfig
|
||||||
|
from llama_stack.providers.inline.vector_io.sqlite_vec.config import SQLiteVectorIOConfig
|
||||||
from llama_stack.providers.remote.inference.ollama import OllamaImplConfig
|
from llama_stack.providers.remote.inference.ollama import OllamaImplConfig
|
||||||
from llama_stack.templates.template import DistributionTemplate, RunConfigSettings
|
from llama_stack.templates.template import DistributionTemplate, RunConfigSettings
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue