chore: make all remote inference provider configs RemoteInferenceProviderConfigs

This commit is contained in:
Matthew Farrellee 2025-10-02 18:13:50 -04:00
parent 4dfbe46954
commit 71d67a983e
37 changed files with 65 additions and 26 deletions

View file

@ -6,12 +6,14 @@
from typing import Any
from pydantic import BaseModel, Field
from pydantic import Field
from llama_stack.providers.utils.inference.model_registry import RemoteInferenceProviderConfig
DEFAULT_OLLAMA_URL = "http://localhost:11434"
class OllamaImplConfig(BaseModel):
class OllamaImplConfig(RemoteInferenceProviderConfig):
url: str = DEFAULT_OLLAMA_URL
refresh_models: bool = Field(
default=False,