mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-06 02:32:40 +00:00
Moving preprocessors.py to a separate directory.
This commit is contained in:
parent
47e5ae682b
commit
2008cd7921
28 changed files with 35 additions and 26 deletions
1
docs/_static/llama-stack-spec.html
vendored
1
docs/_static/llama-stack-spec.html
vendored
|
@ -7210,6 +7210,7 @@
|
||||||
"preprocessor"
|
"preprocessor"
|
||||||
],
|
],
|
||||||
"title": "ResourceType",
|
"title": "ResourceType",
|
||||||
|
"const": "preprocessor",
|
||||||
"default": "preprocessor"
|
"default": "preprocessor"
|
||||||
},
|
},
|
||||||
"metadata": {
|
"metadata": {
|
||||||
|
|
1
docs/_static/llama-stack-spec.yaml
vendored
1
docs/_static/llama-stack-spec.yaml
vendored
|
@ -5033,6 +5033,7 @@ components:
|
||||||
- tool_group
|
- tool_group
|
||||||
- preprocessor
|
- preprocessor
|
||||||
title: ResourceType
|
title: ResourceType
|
||||||
|
const: preprocessor
|
||||||
default: preprocessor
|
default: preprocessor
|
||||||
metadata:
|
metadata:
|
||||||
type: object
|
type: object
|
||||||
|
|
|
@ -9,7 +9,7 @@ from typing import Any, Dict, List, Optional, Protocol, runtime_checkable
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
from llama_stack.apis.common.content_types import URL, InterleavedContent
|
from llama_stack.apis.common.content_types import URL, InterleavedContent
|
||||||
from llama_stack.apis.preprocessing.preprocessors import Preprocessor
|
from llama_stack.apis.preprocessors.preprocessors import Preprocessor
|
||||||
from llama_stack.apis.vector_io import Chunk
|
from llama_stack.apis.vector_io import Chunk
|
||||||
from llama_stack.schema_utils import json_schema_type, webmethod
|
from llama_stack.schema_utils import json_schema_type, webmethod
|
||||||
|
|
||||||
|
|
7
llama_stack/apis/preprocessors/__init__.py
Normal file
7
llama_stack/apis/preprocessors/__init__.py
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||||
|
# All rights reserved.
|
||||||
|
#
|
||||||
|
# This source code is licensed under the terms described in the LICENSE file in
|
||||||
|
# the root directory of this source tree.
|
||||||
|
|
||||||
|
from .preprocessors import * # noqa: F401 F403
|
|
@ -4,7 +4,7 @@
|
||||||
# This source code is licensed under the terms described in the LICENSE file in
|
# This source code is licensed under the terms described in the LICENSE file in
|
||||||
# the root directory of this source tree.
|
# the root directory of this source tree.
|
||||||
|
|
||||||
from typing import Any, Dict, List, Optional, Protocol, runtime_checkable
|
from typing import Any, Dict, List, Literal, Optional, Protocol, runtime_checkable
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
@ -15,7 +15,7 @@ from llama_stack.schema_utils import json_schema_type, webmethod
|
||||||
|
|
||||||
@json_schema_type
|
@json_schema_type
|
||||||
class Preprocessor(Resource):
|
class Preprocessor(Resource):
|
||||||
type: ResourceType = ResourceType.preprocessor
|
type: Literal[ResourceType.preprocessor] = ResourceType.preprocessor
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def preprocessor_id(self) -> str:
|
def preprocessor_id(self) -> str:
|
|
@ -15,7 +15,7 @@ from llama_stack.apis.eval import Eval
|
||||||
from llama_stack.apis.inference import Inference
|
from llama_stack.apis.inference import Inference
|
||||||
from llama_stack.apis.models import Model, ModelInput
|
from llama_stack.apis.models import Model, ModelInput
|
||||||
from llama_stack.apis.preprocessing import Preprocessing, Preprocessor
|
from llama_stack.apis.preprocessing import Preprocessing, Preprocessor
|
||||||
from llama_stack.apis.preprocessing.preprocessors import PreprocessorInput
|
from llama_stack.apis.preprocessors.preprocessors import PreprocessorInput
|
||||||
from llama_stack.apis.resource import Resource
|
from llama_stack.apis.resource import Resource
|
||||||
from llama_stack.apis.safety import Safety
|
from llama_stack.apis.safety import Safety
|
||||||
from llama_stack.apis.scoring import Scoring
|
from llama_stack.apis.scoring import Scoring
|
||||||
|
|
|
@ -18,7 +18,7 @@ from llama_stack.apis.inspect import Inspect
|
||||||
from llama_stack.apis.models import Models
|
from llama_stack.apis.models import Models
|
||||||
from llama_stack.apis.post_training import PostTraining
|
from llama_stack.apis.post_training import PostTraining
|
||||||
from llama_stack.apis.preprocessing import Preprocessing
|
from llama_stack.apis.preprocessing import Preprocessing
|
||||||
from llama_stack.apis.preprocessing.preprocessors import Preprocessors
|
from llama_stack.apis.preprocessors.preprocessors import Preprocessors
|
||||||
from llama_stack.apis.providers import Providers as ProvidersAPI
|
from llama_stack.apis.providers import Providers as ProvidersAPI
|
||||||
from llama_stack.apis.safety import Safety
|
from llama_stack.apis.safety import Safety
|
||||||
from llama_stack.apis.scoring import Scoring
|
from llama_stack.apis.scoring import Scoring
|
||||||
|
|
|
@ -24,7 +24,7 @@ from llama_stack.apis.datasets import (
|
||||||
URIDataSource,
|
URIDataSource,
|
||||||
)
|
)
|
||||||
from llama_stack.apis.models import ListModelsResponse, Model, Models, ModelType
|
from llama_stack.apis.models import ListModelsResponse, Model, Models, ModelType
|
||||||
from llama_stack.apis.preprocessing.preprocessors import ListPreprocessorsResponse, Preprocessor, Preprocessors
|
from llama_stack.apis.preprocessors.preprocessors import ListPreprocessorsResponse, Preprocessor, Preprocessors
|
||||||
from llama_stack.apis.resource import ResourceType
|
from llama_stack.apis.resource import ResourceType
|
||||||
from llama_stack.apis.scoring_functions import (
|
from llama_stack.apis.scoring_functions import (
|
||||||
ListScoringFunctionsResponse,
|
ListScoringFunctionsResponse,
|
||||||
|
|
|
@ -24,7 +24,7 @@ from llama_stack.apis.inspect import Inspect
|
||||||
from llama_stack.apis.models import Models
|
from llama_stack.apis.models import Models
|
||||||
from llama_stack.apis.post_training import PostTraining
|
from llama_stack.apis.post_training import PostTraining
|
||||||
from llama_stack.apis.preprocessing import Preprocessing
|
from llama_stack.apis.preprocessing import Preprocessing
|
||||||
from llama_stack.apis.preprocessing.preprocessors import Preprocessors
|
from llama_stack.apis.preprocessors.preprocessors import Preprocessors
|
||||||
from llama_stack.apis.providers import Providers
|
from llama_stack.apis.providers import Providers
|
||||||
from llama_stack.apis.safety import Safety
|
from llama_stack.apis.safety import Safety
|
||||||
from llama_stack.apis.scoring import Scoring
|
from llama_stack.apis.scoring import Scoring
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from llama_stack.apis.preprocessing.preprocessors import PreprocessorInput
|
from llama_stack.apis.preprocessors.preprocessors import PreprocessorInput
|
||||||
from llama_stack.distribution.datatypes import Provider, ToolGroupInput
|
from llama_stack.distribution.datatypes import Provider, ToolGroupInput
|
||||||
from llama_stack.providers.inline.vector_io.faiss.config import FaissVectorIOConfig
|
from llama_stack.providers.inline.vector_io.faiss.config import FaissVectorIOConfig
|
||||||
from llama_stack.providers.remote.inference.bedrock.models import MODEL_ENTRIES
|
from llama_stack.providers.remote.inference.bedrock.models import MODEL_ENTRIES
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from llama_stack.apis.models.models import ModelType
|
from llama_stack.apis.models.models import ModelType
|
||||||
from llama_stack.apis.preprocessing.preprocessors import PreprocessorInput
|
from llama_stack.apis.preprocessors.preprocessors import PreprocessorInput
|
||||||
from llama_stack.distribution.datatypes import ModelInput, Provider, ToolGroupInput
|
from llama_stack.distribution.datatypes import ModelInput, Provider, ToolGroupInput
|
||||||
from llama_stack.providers.inline.inference.sentence_transformers import (
|
from llama_stack.providers.inline.inference.sentence_transformers import (
|
||||||
SentenceTransformersInferenceConfig,
|
SentenceTransformersInferenceConfig,
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
|
|
||||||
|
|
||||||
from llama_stack.apis.models.models import ModelType
|
from llama_stack.apis.models.models import ModelType
|
||||||
from llama_stack.apis.preprocessing.preprocessors import PreprocessorInput
|
from llama_stack.apis.preprocessors.preprocessors import PreprocessorInput
|
||||||
from llama_stack.distribution.datatypes import (
|
from llama_stack.distribution.datatypes import (
|
||||||
ModelInput,
|
ModelInput,
|
||||||
Provider,
|
Provider,
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
# the root directory of this source tree.
|
# the root directory of this source tree.
|
||||||
|
|
||||||
from llama_stack.apis.models.models import ModelType
|
from llama_stack.apis.models.models import ModelType
|
||||||
from llama_stack.apis.preprocessing.preprocessors import PreprocessorInput
|
from llama_stack.apis.preprocessors.preprocessors import PreprocessorInput
|
||||||
from llama_stack.distribution.datatypes import (
|
from llama_stack.distribution.datatypes import (
|
||||||
ModelInput,
|
ModelInput,
|
||||||
Provider,
|
Provider,
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
from typing import List, Tuple
|
from typing import List, Tuple
|
||||||
|
|
||||||
from llama_stack.apis.models.models import ModelType
|
from llama_stack.apis.models.models import ModelType
|
||||||
from llama_stack.apis.preprocessing.preprocessors import PreprocessorInput
|
from llama_stack.apis.preprocessors.preprocessors import PreprocessorInput
|
||||||
from llama_stack.distribution.datatypes import (
|
from llama_stack.distribution.datatypes import (
|
||||||
ModelInput,
|
ModelInput,
|
||||||
Provider,
|
Provider,
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from llama_stack.apis.models.models import ModelType
|
from llama_stack.apis.models.models import ModelType
|
||||||
from llama_stack.apis.preprocessing.preprocessors import PreprocessorInput
|
from llama_stack.apis.preprocessors.preprocessors import PreprocessorInput
|
||||||
from llama_stack.distribution.datatypes import (
|
from llama_stack.distribution.datatypes import (
|
||||||
ModelInput,
|
ModelInput,
|
||||||
Provider,
|
Provider,
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from llama_stack.apis.models.models import ModelType
|
from llama_stack.apis.models.models import ModelType
|
||||||
from llama_stack.apis.preprocessing.preprocessors import PreprocessorInput
|
from llama_stack.apis.preprocessors.preprocessors import PreprocessorInput
|
||||||
from llama_stack.distribution.datatypes import ModelInput, Provider, ToolGroupInput
|
from llama_stack.distribution.datatypes import ModelInput, Provider, ToolGroupInput
|
||||||
from llama_stack.providers.inline.inference.sentence_transformers import (
|
from llama_stack.providers.inline.inference.sentence_transformers import (
|
||||||
SentenceTransformersInferenceConfig,
|
SentenceTransformersInferenceConfig,
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
# the root directory of this source tree.
|
# the root directory of this source tree.
|
||||||
|
|
||||||
from llama_stack.apis.models.models import ModelType
|
from llama_stack.apis.models.models import ModelType
|
||||||
from llama_stack.apis.preprocessing.preprocessors import PreprocessorInput
|
from llama_stack.apis.preprocessors.preprocessors import PreprocessorInput
|
||||||
from llama_stack.distribution.datatypes import (
|
from llama_stack.distribution.datatypes import (
|
||||||
ModelInput,
|
ModelInput,
|
||||||
Provider,
|
Provider,
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
# the root directory of this source tree.
|
# the root directory of this source tree.
|
||||||
|
|
||||||
from llama_stack.apis.models.models import ModelType
|
from llama_stack.apis.models.models import ModelType
|
||||||
from llama_stack.apis.preprocessing.preprocessors import PreprocessorInput
|
from llama_stack.apis.preprocessors.preprocessors import PreprocessorInput
|
||||||
from llama_stack.distribution.datatypes import (
|
from llama_stack.distribution.datatypes import (
|
||||||
ModelInput,
|
ModelInput,
|
||||||
Provider,
|
Provider,
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from llama_stack.apis.models.models import ModelType
|
from llama_stack.apis.models.models import ModelType
|
||||||
from llama_stack.apis.preprocessing.preprocessors import PreprocessorInput
|
from llama_stack.apis.preprocessors.preprocessors import PreprocessorInput
|
||||||
from llama_stack.distribution.datatypes import (
|
from llama_stack.distribution.datatypes import (
|
||||||
ModelInput,
|
ModelInput,
|
||||||
Provider,
|
Provider,
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from llama_stack.apis.models.models import ModelType
|
from llama_stack.apis.models.models import ModelType
|
||||||
from llama_stack.apis.preprocessing.preprocessors import PreprocessorInput
|
from llama_stack.apis.preprocessors.preprocessors import PreprocessorInput
|
||||||
from llama_stack.distribution.datatypes import ModelInput, Provider, ToolGroupInput
|
from llama_stack.distribution.datatypes import ModelInput, Provider, ToolGroupInput
|
||||||
from llama_stack.providers.inline.inference.meta_reference import (
|
from llama_stack.providers.inline.inference.meta_reference import (
|
||||||
MetaReferenceQuantizedInferenceConfig,
|
MetaReferenceQuantizedInferenceConfig,
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from llama_stack.apis.preprocessing.preprocessors import PreprocessorInput
|
from llama_stack.apis.preprocessors.preprocessors import PreprocessorInput
|
||||||
from llama_stack.distribution.datatypes import ModelInput, Provider, ShieldInput, ToolGroupInput
|
from llama_stack.distribution.datatypes import ModelInput, Provider, ShieldInput, ToolGroupInput
|
||||||
from llama_stack.providers.remote.inference.nvidia import NVIDIAConfig
|
from llama_stack.providers.remote.inference.nvidia import NVIDIAConfig
|
||||||
from llama_stack.providers.remote.inference.nvidia.models import MODEL_ENTRIES
|
from llama_stack.providers.remote.inference.nvidia.models import MODEL_ENTRIES
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from llama_stack.apis.models.models import ModelType
|
from llama_stack.apis.models.models import ModelType
|
||||||
from llama_stack.apis.preprocessing.preprocessors import PreprocessorInput
|
from llama_stack.apis.preprocessors.preprocessors import PreprocessorInput
|
||||||
from llama_stack.distribution.datatypes import (
|
from llama_stack.distribution.datatypes import (
|
||||||
ModelInput,
|
ModelInput,
|
||||||
Provider,
|
Provider,
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from llama_stack.apis.models.models import ModelType
|
from llama_stack.apis.models.models import ModelType
|
||||||
from llama_stack.apis.preprocessing.preprocessors import PreprocessorInput
|
from llama_stack.apis.preprocessors.preprocessors import PreprocessorInput
|
||||||
from llama_stack.distribution.datatypes import (
|
from llama_stack.distribution.datatypes import (
|
||||||
ModelInput,
|
ModelInput,
|
||||||
Provider,
|
Provider,
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from llama_stack.apis.preprocessing.preprocessors import PreprocessorInput
|
from llama_stack.apis.preprocessors.preprocessors import PreprocessorInput
|
||||||
from llama_stack.distribution.datatypes import Provider, ShieldInput, ToolGroupInput
|
from llama_stack.distribution.datatypes import Provider, ShieldInput, ToolGroupInput
|
||||||
from llama_stack.providers.inline.vector_io.faiss.config import FaissVectorIOConfig
|
from llama_stack.providers.inline.vector_io.faiss.config import FaissVectorIOConfig
|
||||||
from llama_stack.providers.remote.inference.sambanova import SambaNovaImplConfig
|
from llama_stack.providers.remote.inference.sambanova import SambaNovaImplConfig
|
||||||
|
|
|
@ -13,7 +13,7 @@ from pydantic import BaseModel, Field
|
||||||
|
|
||||||
from llama_stack.apis.datasets import DatasetPurpose
|
from llama_stack.apis.datasets import DatasetPurpose
|
||||||
from llama_stack.apis.models.models import ModelType
|
from llama_stack.apis.models.models import ModelType
|
||||||
from llama_stack.apis.preprocessing.preprocessors import PreprocessorInput
|
from llama_stack.apis.preprocessors.preprocessors import PreprocessorInput
|
||||||
from llama_stack.distribution.datatypes import (
|
from llama_stack.distribution.datatypes import (
|
||||||
Api,
|
Api,
|
||||||
BenchmarkInput,
|
BenchmarkInput,
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from llama_stack.apis.models.models import ModelType
|
from llama_stack.apis.models.models import ModelType
|
||||||
from llama_stack.apis.preprocessing.preprocessors import PreprocessorInput
|
from llama_stack.apis.preprocessors.preprocessors import PreprocessorInput
|
||||||
from llama_stack.distribution.datatypes import (
|
from llama_stack.distribution.datatypes import (
|
||||||
ModelInput,
|
ModelInput,
|
||||||
Provider,
|
Provider,
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from llama_stack.apis.models.models import ModelType
|
from llama_stack.apis.models.models import ModelType
|
||||||
from llama_stack.apis.preprocessing.preprocessors import PreprocessorInput
|
from llama_stack.apis.preprocessors.preprocessors import PreprocessorInput
|
||||||
from llama_stack.distribution.datatypes import (
|
from llama_stack.distribution.datatypes import (
|
||||||
ModelInput,
|
ModelInput,
|
||||||
Provider,
|
Provider,
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
# the root directory of this source tree.
|
# the root directory of this source tree.
|
||||||
|
|
||||||
from llama_stack.apis.models.models import ModelType
|
from llama_stack.apis.models.models import ModelType
|
||||||
from llama_stack.apis.preprocessing.preprocessors import PreprocessorInput
|
from llama_stack.apis.preprocessors.preprocessors import PreprocessorInput
|
||||||
from llama_stack.distribution.datatypes import ModelInput, Provider
|
from llama_stack.distribution.datatypes import ModelInput, Provider
|
||||||
from llama_stack.providers.inline.inference.sentence_transformers import (
|
from llama_stack.providers.inline.inference.sentence_transformers import (
|
||||||
SentenceTransformersInferenceConfig,
|
SentenceTransformersInferenceConfig,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue