mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-12 12:06:04 +00:00
feat: add provider data keys for Cerebras, Databricks, NVIDIA, and RunPod
- added missing tests for Fireworks, Anthropic, Gemini, SambaNova, and vLLM
This commit is contained in:
parent
5d711d4bcb
commit
bb95c1a7c5
10 changed files with 125 additions and 8 deletions
|
|
@ -6,12 +6,19 @@
|
|||
|
||||
from typing import Any
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from llama_stack.providers.utils.inference.model_registry import RemoteInferenceProviderConfig
|
||||
from llama_stack.schema_utils import json_schema_type
|
||||
|
||||
|
||||
class RunpodProviderDataValidator(BaseModel):
|
||||
runpod_api_token: str | None = Field(
|
||||
default=None,
|
||||
description="API token for RunPod models",
|
||||
)
|
||||
|
||||
|
||||
@json_schema_type
|
||||
class RunpodImplConfig(RemoteInferenceProviderConfig):
|
||||
url: str | None = Field(
|
||||
|
|
|
|||
|
|
@ -24,6 +24,8 @@ class RunpodInferenceAdapter(OpenAIMixin):
|
|||
|
||||
config: RunpodImplConfig
|
||||
|
||||
provider_data_api_key_field: str = "runpod_api_token"
|
||||
|
||||
def get_api_key(self) -> str:
|
||||
"""Get API key for OpenAI client."""
|
||||
return self.config.api_token
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue