mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-12 13:57:57 +00:00
chore: add provider-data-api-key support to openaimixin (#3639)
# What does this PR do? the LiteLLMOpenAIMixin provides support for reading key from provider data (headers users send). this adds the same functionality to the OpenAIMixin. this is infrastructure for migrating providers. ## Test Plan ci w/ new tests
This commit is contained in:
parent
28bbbcf2c1
commit
4dbe0593f9
5 changed files with 116 additions and 17 deletions
|
@ -19,6 +19,7 @@ class TestOpenAIBaseURLConfig:
|
|||
"""Test that the adapter uses the default OpenAI base URL when no environment variable is set."""
|
||||
config = OpenAIConfig(api_key="test-key")
|
||||
adapter = OpenAIInferenceAdapter(config)
|
||||
adapter.provider_data_api_key_field = None # Disable provider data for this test
|
||||
|
||||
assert adapter.get_base_url() == "https://api.openai.com/v1"
|
||||
|
||||
|
@ -27,6 +28,7 @@ class TestOpenAIBaseURLConfig:
|
|||
custom_url = "https://custom.openai.com/v1"
|
||||
config = OpenAIConfig(api_key="test-key", base_url=custom_url)
|
||||
adapter = OpenAIInferenceAdapter(config)
|
||||
adapter.provider_data_api_key_field = None # Disable provider data for this test
|
||||
|
||||
assert adapter.get_base_url() == custom_url
|
||||
|
||||
|
@ -38,6 +40,7 @@ class TestOpenAIBaseURLConfig:
|
|||
processed_config = replace_env_vars(config_data)
|
||||
config = OpenAIConfig.model_validate(processed_config)
|
||||
adapter = OpenAIInferenceAdapter(config)
|
||||
adapter.provider_data_api_key_field = None # Disable provider data for this test
|
||||
|
||||
assert adapter.get_base_url() == "https://env.openai.com/v1"
|
||||
|
||||
|
@ -47,6 +50,7 @@ class TestOpenAIBaseURLConfig:
|
|||
custom_url = "https://config.openai.com/v1"
|
||||
config = OpenAIConfig(api_key="test-key", base_url=custom_url)
|
||||
adapter = OpenAIInferenceAdapter(config)
|
||||
adapter.provider_data_api_key_field = None # Disable provider data for this test
|
||||
|
||||
# Config should take precedence over environment variable
|
||||
assert adapter.get_base_url() == custom_url
|
||||
|
@ -57,6 +61,7 @@ class TestOpenAIBaseURLConfig:
|
|||
custom_url = "https://test.openai.com/v1"
|
||||
config = OpenAIConfig(api_key="test-key", base_url=custom_url)
|
||||
adapter = OpenAIInferenceAdapter(config)
|
||||
adapter.provider_data_api_key_field = None # Disable provider data for this test
|
||||
|
||||
# Mock the get_api_key method since it's delegated to LiteLLMOpenAIMixin
|
||||
adapter.get_api_key = MagicMock(return_value="test-key")
|
||||
|
@ -76,6 +81,7 @@ class TestOpenAIBaseURLConfig:
|
|||
custom_url = "https://test.openai.com/v1"
|
||||
config = OpenAIConfig(api_key="test-key", base_url=custom_url)
|
||||
adapter = OpenAIInferenceAdapter(config)
|
||||
adapter.provider_data_api_key_field = None # Disable provider data for this test
|
||||
|
||||
# Mock the get_api_key method
|
||||
adapter.get_api_key = MagicMock(return_value="test-key")
|
||||
|
@ -117,6 +123,7 @@ class TestOpenAIBaseURLConfig:
|
|||
processed_config = replace_env_vars(config_data)
|
||||
config = OpenAIConfig.model_validate(processed_config)
|
||||
adapter = OpenAIInferenceAdapter(config)
|
||||
adapter.provider_data_api_key_field = None # Disable provider data for this test
|
||||
|
||||
# Mock the get_api_key method
|
||||
adapter.get_api_key = MagicMock(return_value="test-key")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue