mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-15 22:47:59 +00:00
feat: azure ai inference support
This commit is contained in:
parent
bf4f97a2e1
commit
27a0545f5f
5 changed files with 317 additions and 0 deletions
|
@ -140,6 +140,15 @@ def available_providers() -> List[ProviderSpec]:
|
|||
config_class="llama_stack.providers.adapters.inference.databricks.DatabricksImplConfig",
|
||||
),
|
||||
),
|
||||
remote_provider_spec(
|
||||
api=Api.inference,
|
||||
adapter=AdapterSpec(
|
||||
adapter_type="azure-ai-inference",
|
||||
pip_packages=["azure-ai-inference", "azure-identity", "aiohttp"],
|
||||
module="llama_stack.providers.adapters.inference.azure_ai_inference",
|
||||
config_class="llama_stack.providers.adapters.inference.azure_ai_inference.AzureAIInferenceConfig",
|
||||
),
|
||||
),
|
||||
InlineProviderSpec(
|
||||
api=Api.inference,
|
||||
provider_type="vllm",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue