mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-17 07:41:25 +00:00
Add AWS Bedrock inference provider to Red Hat distribution
- Add bedrock provider configuration in providers.d/remote/inference/ - Include bedrock as inference provider option in build.yaml - Configure with proper boto3 dependencies and module references
This commit is contained in:
parent
394ab165d1
commit
3ee3746462
2 changed files with 8 additions and 0 deletions
|
|
@ -4,6 +4,7 @@ distribution_spec:
|
|||
providers:
|
||||
inference:
|
||||
- remote::vllm
|
||||
- remote::bedrock
|
||||
- inline::sentence-transformers
|
||||
vector_io:
|
||||
- inline::milvus
|
||||
|
|
|
|||
|
|
@ -0,0 +1,7 @@
|
|||
adapter:
|
||||
adapter_type: bedrock
|
||||
pip_packages: ["boto3>=1.26.137", "botocore"]
|
||||
config_class: llama_stack.providers.remote.inference.bedrock.config.BedrockConfig
|
||||
module: llama_stack.providers.remote.inference.bedrock
|
||||
api_dependencies: ["inference"]
|
||||
optional_api_dependencies: []
|
||||
Loading…
Add table
Add a link
Reference in a new issue