mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-17 12:39:48 +00:00
- Add bedrock provider configuration in providers.d/remote/inference/ - Include bedrock as inference provider option in build.yaml - Configure with proper boto3 dependencies and module references
7 lines
No EOL
283 B
YAML
7 lines
No EOL
283 B
YAML
adapter:
|
|
adapter_type: bedrock
|
|
pip_packages: ["boto3>=1.26.137", "botocore"]
|
|
config_class: llama_stack.providers.remote.inference.bedrock.config.BedrockConfig
|
|
module: llama_stack.providers.remote.inference.bedrock
|
|
api_dependencies: ["inference"]
|
|
optional_api_dependencies: [] |