mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-12 13:00:39 +00:00
feat: Add clear error message when API key is missing (#2992)
# What does this PR do? Improve user experience by providing specific guidance when no API key is available, showing both provider data header and config options with the correct field name for each provider. Also adds comprehensive test coverage for API key resolution scenarios. addresses #2990 for providers using litellm openai mixin ## Test Plan `./scripts/unit-tests.sh tests/unit/providers/inference/test_litellm_openai_mixin.py`
This commit is contained in:
parent
22f79bdb9e
commit
218c89fff1
2 changed files with 118 additions and 0 deletions
|
@ -254,6 +254,12 @@ class LiteLLMOpenAIMixin(
|
|||
api_key = getattr(provider_data, key_field)
|
||||
else:
|
||||
api_key = self.api_key_from_config
|
||||
if not api_key:
|
||||
raise ValueError(
|
||||
"API key is not set. Please provide a valid API key in the "
|
||||
"provider data header, e.g. x-llamastack-provider-data: "
|
||||
f'{{"{key_field}": "<API_KEY>"}}, or in the provider config.'
|
||||
)
|
||||
return api_key
|
||||
|
||||
async def embeddings(
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue