mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-23 02:52:25 +00:00
disable posttraining
# What does this PR do? ## Test Plan
This commit is contained in:
parent
b3d86ca926
commit
71effe5202
3 changed files with 0 additions and 18 deletions
|
|
@ -42,8 +42,6 @@ distribution_spec:
|
||||||
- inline::meta-reference
|
- inline::meta-reference
|
||||||
telemetry:
|
telemetry:
|
||||||
- inline::meta-reference
|
- inline::meta-reference
|
||||||
post_training:
|
|
||||||
- inline::huggingface
|
|
||||||
eval:
|
eval:
|
||||||
- inline::meta-reference
|
- inline::meta-reference
|
||||||
datasetio:
|
datasetio:
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,6 @@ apis:
|
||||||
- eval
|
- eval
|
||||||
- files
|
- files
|
||||||
- inference
|
- inference
|
||||||
- post_training
|
|
||||||
- safety
|
- safety
|
||||||
- scoring
|
- scoring
|
||||||
- telemetry
|
- telemetry
|
||||||
|
|
@ -200,13 +199,6 @@ providers:
|
||||||
sinks: ${env.TELEMETRY_SINKS:=console,sqlite}
|
sinks: ${env.TELEMETRY_SINKS:=console,sqlite}
|
||||||
sqlite_db_path: ${env.SQLITE_STORE_DIR:=~/.llama/distributions/starter}/trace_store.db
|
sqlite_db_path: ${env.SQLITE_STORE_DIR:=~/.llama/distributions/starter}/trace_store.db
|
||||||
otel_exporter_otlp_endpoint: ${env.OTEL_EXPORTER_OTLP_ENDPOINT:=}
|
otel_exporter_otlp_endpoint: ${env.OTEL_EXPORTER_OTLP_ENDPOINT:=}
|
||||||
post_training:
|
|
||||||
- provider_id: huggingface
|
|
||||||
provider_type: inline::huggingface
|
|
||||||
config:
|
|
||||||
checkpoint_format: huggingface
|
|
||||||
distributed_backend: null
|
|
||||||
device: cpu
|
|
||||||
eval:
|
eval:
|
||||||
- provider_id: meta-reference
|
- provider_id: meta-reference
|
||||||
provider_type: inline::meta-reference
|
provider_type: inline::meta-reference
|
||||||
|
|
|
||||||
|
|
@ -19,7 +19,6 @@ from llama_stack.providers.inline.files.localfs.config import LocalfsFilesImplCo
|
||||||
from llama_stack.providers.inline.inference.sentence_transformers import (
|
from llama_stack.providers.inline.inference.sentence_transformers import (
|
||||||
SentenceTransformersInferenceConfig,
|
SentenceTransformersInferenceConfig,
|
||||||
)
|
)
|
||||||
from llama_stack.providers.inline.post_training.huggingface import HuggingFacePostTrainingConfig
|
|
||||||
from llama_stack.providers.inline.vector_io.faiss.config import FaissVectorIOConfig
|
from llama_stack.providers.inline.vector_io.faiss.config import FaissVectorIOConfig
|
||||||
from llama_stack.providers.inline.vector_io.milvus.config import (
|
from llama_stack.providers.inline.vector_io.milvus.config import (
|
||||||
MilvusVectorIOConfig,
|
MilvusVectorIOConfig,
|
||||||
|
|
@ -256,7 +255,6 @@ def get_distribution_template() -> DistributionTemplate:
|
||||||
"safety": ["inline::llama-guard"],
|
"safety": ["inline::llama-guard"],
|
||||||
"agents": ["inline::meta-reference"],
|
"agents": ["inline::meta-reference"],
|
||||||
"telemetry": ["inline::meta-reference"],
|
"telemetry": ["inline::meta-reference"],
|
||||||
"post_training": ["inline::huggingface"],
|
|
||||||
"eval": ["inline::meta-reference"],
|
"eval": ["inline::meta-reference"],
|
||||||
"datasetio": ["remote::huggingface", "inline::localfs"],
|
"datasetio": ["remote::huggingface", "inline::localfs"],
|
||||||
"scoring": ["inline::basic", "inline::llm-as-judge", "inline::braintrust"],
|
"scoring": ["inline::basic", "inline::llm-as-judge", "inline::braintrust"],
|
||||||
|
|
@ -277,11 +275,6 @@ def get_distribution_template() -> DistributionTemplate:
|
||||||
provider_type="inline::sentence-transformers",
|
provider_type="inline::sentence-transformers",
|
||||||
config=SentenceTransformersInferenceConfig.sample_run_config(),
|
config=SentenceTransformersInferenceConfig.sample_run_config(),
|
||||||
)
|
)
|
||||||
post_training_provider = Provider(
|
|
||||||
provider_id="huggingface",
|
|
||||||
provider_type="inline::huggingface",
|
|
||||||
config=HuggingFacePostTrainingConfig.sample_run_config(f"~/.llama/distributions/{name}"),
|
|
||||||
)
|
|
||||||
default_tool_groups = [
|
default_tool_groups = [
|
||||||
ToolGroupInput(
|
ToolGroupInput(
|
||||||
toolgroup_id="builtin::websearch",
|
toolgroup_id="builtin::websearch",
|
||||||
|
|
@ -321,7 +314,6 @@ def get_distribution_template() -> DistributionTemplate:
|
||||||
"inference": remote_inference_providers + [embedding_provider],
|
"inference": remote_inference_providers + [embedding_provider],
|
||||||
"vector_io": vector_io_providers,
|
"vector_io": vector_io_providers,
|
||||||
"files": [files_provider],
|
"files": [files_provider],
|
||||||
"post_training": [post_training_provider],
|
|
||||||
},
|
},
|
||||||
default_models=default_models + [embedding_model],
|
default_models=default_models + [embedding_model],
|
||||||
default_tool_groups=default_tool_groups,
|
default_tool_groups=default_tool_groups,
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue