mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-04 04:04:14 +00:00
fix: Update Watsonx provider to use LiteLLM mixin and list all models
Signed-off-by: Bill Murdock <bmurdock@redhat.com>
This commit is contained in:
parent
9f6c658f2a
commit
999c28e809
6 changed files with 109 additions and 284 deletions
|
@ -27,11 +27,11 @@ class WatsonXConfig(RemoteInferenceProviderConfig):
|
|||
)
|
||||
api_key: SecretStr | None = Field(
|
||||
default_factory=lambda: os.getenv("WATSONX_API_KEY"),
|
||||
description="The watsonx API key",
|
||||
description="The watsonx.ai API key",
|
||||
)
|
||||
project_id: str | None = Field(
|
||||
default_factory=lambda: os.getenv("WATSONX_PROJECT_ID"),
|
||||
description="The Project ID key",
|
||||
description="The watsonx.ai project ID",
|
||||
)
|
||||
timeout: int = Field(
|
||||
default=60,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue