mirror of
				https://github.com/meta-llama/llama-stack.git
				synced 2025-10-25 17:11:12 +00:00 
			
		
		
		
	# What does this PR do? use SecretStr for OpenAIMixin providers - RemoteInferenceProviderConfig now has auth_credential: SecretStr - the default alias is api_key (most common name) - some providers override to use api_token (RunPod, vLLM, Databricks) - some providers exclude it (Ollama, TGI, Vertex AI) addresses #3517 ## Test Plan ci w/ new tests
		
			
				
	
	
		
			25 lines
		
	
	
	
		
			691 B
		
	
	
	
		
			Python
		
	
	
	
	
	
			
		
		
	
	
			25 lines
		
	
	
	
		
			691 B
		
	
	
	
		
			Python
		
	
	
	
	
	
| # Copyright (c) Meta Platforms, Inc. and affiliates.
 | |
| # All rights reserved.
 | |
| #
 | |
| # This source code is licensed under the terms described in the LICENSE file in
 | |
| # the root directory of this source tree.
 | |
| 
 | |
| from urllib.parse import urljoin
 | |
| 
 | |
| from llama_stack.providers.utils.inference.openai_mixin import OpenAIMixin
 | |
| 
 | |
| from .config import AzureConfig
 | |
| 
 | |
| 
 | |
| class AzureInferenceAdapter(OpenAIMixin):
 | |
|     config: AzureConfig
 | |
| 
 | |
|     provider_data_api_key_field: str = "azure_api_key"
 | |
| 
 | |
|     def get_base_url(self) -> str:
 | |
|         """
 | |
|         Get the Azure API base URL.
 | |
| 
 | |
|         Returns the Azure API base URL from the configuration.
 | |
|         """
 | |
|         return urljoin(str(self.config.api_base), "/openai/v1")
 |