This commit is contained in:
Raghotham Murthy 2025-05-28 22:17:21 -07:00
parent 2d5d05a2b4
commit fa9dd0586a
2 changed files with 4 additions and 11 deletions

View file

@ -47,7 +47,7 @@ class RedisKVStoreConfig(CommonConfig):
class SqliteKVStoreConfig(CommonConfig):
type: Literal[KVStoreType.sqlite] = KVStoreType.sqlite.value
type: Literal[KVStoreType.sqlite.value] = KVStoreType.sqlite.value
db_path: str = Field(
default=(RUNTIME_BASE_DIR / "kvstore.db").as_posix(),
description="File path for the sqlite database",
@ -63,7 +63,7 @@ class SqliteKVStoreConfig(CommonConfig):
class PostgresKVStoreConfig(CommonConfig):
type: Literal[KVStoreType.postgres] = KVStoreType.postgres.value
type: Literal[KVStoreType.postgres.value] = KVStoreType.postgres.value
host: str = "localhost"
port: int = 5432
db: str = "llamastack"
@ -102,7 +102,7 @@ class PostgresKVStoreConfig(CommonConfig):
class MongoDBKVStoreConfig(CommonConfig):
type: Literal[KVStoreType.mongodb] = KVStoreType.mongodb.value
type: Literal[KVStoreType.mongodb.value] = KVStoreType.mongodb.value
host: str = "localhost"
port: int = 27017
db: str = "llamastack"

View file

@ -28,7 +28,7 @@ def get_distribution_template() -> DistributionTemplate:
"telemetry": ["inline::meta-reference"],
"eval": ["inline::meta-reference"],
"datasetio": ["inline::localfs"],
"scoring": ["inline::basic", "inline::llm-as-judge"],
"scoring": ["inline::llm-as-judge"],
"tool_runtime": [
"remote::brave-search",
"remote::tavily-search",
@ -42,11 +42,6 @@ def get_distribution_template() -> DistributionTemplate:
provider_type="remote::ollama",
config=OllamaImplConfig.sample_run_config(),
)
#vector_io_provider_faiss = Provider(
# provider_id="faiss",
# provider_type="inline::faiss",
# config=FaissVectorIOConfig.sample_run_config(f"~/.llama/distributions/{name}"),
#)
inference_model = ModelInput(
model_id="${env.INFERENCE_MODEL}",
provider_id="ollama",
@ -86,7 +81,6 @@ def get_distribution_template() -> DistributionTemplate:
"run.yaml": RunConfigSettings(
provider_overrides={
"inference": [inference_provider],
"vector_io": [vector_io_provider_faiss],
},
default_models=[inference_model, embedding_model],
default_tool_groups=default_tool_groups,
@ -94,7 +88,6 @@ def get_distribution_template() -> DistributionTemplate:
"run-with-safety.yaml": RunConfigSettings(
provider_overrides={
"inference": [inference_provider],
"vector_io": [vector_io_provider_faiss]
"safety": [
Provider(
provider_id="llama-guard",