mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-13 04:22:35 +00:00
Address review comments
Signed-off-by: Bill Murdock <bmurdock@redhat.com>
This commit is contained in:
parent
ca771cd921
commit
1d941b6aa0
9 changed files with 61 additions and 200 deletions
|
|
@ -16,9 +16,6 @@ from llama_stack.providers.utils.inference.litellm_openai_mixin import LiteLLMOp
|
|||
|
||||
|
||||
class WatsonXInferenceAdapter(LiteLLMOpenAIMixin):
|
||||
_config: WatsonXConfig
|
||||
__provider_id__: str = "watsonx"
|
||||
|
||||
def __init__(self, config: WatsonXConfig):
|
||||
LiteLLMOpenAIMixin.__init__(
|
||||
self,
|
||||
|
|
@ -29,17 +26,9 @@ class WatsonXInferenceAdapter(LiteLLMOpenAIMixin):
|
|||
self.available_models = None
|
||||
self.config = config
|
||||
|
||||
# get_api_key = LiteLLMOpenAIMixin.get_api_key
|
||||
|
||||
def get_base_url(self) -> str:
|
||||
return self.config.url
|
||||
|
||||
async def initialize(self):
|
||||
await super().initialize()
|
||||
|
||||
async def shutdown(self):
|
||||
await super().shutdown()
|
||||
|
||||
async def _get_params(self, request: ChatCompletionRequest) -> dict[str, Any]:
|
||||
# Get base parameters from parent
|
||||
params = await super()._get_params(request)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue