mirror of
https://github.com/meta-llama/llama-stack.git
synced 2026-01-04 11:22:15 +00:00
Updated the configuration files to include the preprocessor resource.
This commit is contained in:
parent
1a6e71c61f
commit
e16bdf138f
31 changed files with 52 additions and 2 deletions
|
|
@ -35,6 +35,7 @@ def get_distribution_template() -> DistributionTemplate:
|
|||
"inline::rag-runtime",
|
||||
"remote::model-context-protocol",
|
||||
],
|
||||
"preprocessing": ["inline::basic", "inline::simple_chunking"],
|
||||
}
|
||||
name = "ollama"
|
||||
inference_provider = Provider(
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue