rename all _file.py to file.py

This commit is contained in:
Matthew Farrellee 2024-11-21 15:22:01 -05:00
parent 8944491c3c
commit 1e18791fff
5 changed files with 6 additions and 6 deletions

View file

@ -6,12 +6,12 @@
from llama_stack.apis.inference import Inference
from ._config import NVIDIAConfig
from .config import NVIDIAConfig
async def get_adapter_impl(config: NVIDIAConfig, _deps) -> Inference:
# import dynamically so `llama stack build` does not fail due to missing dependencies
from ._nvidia import NVIDIAInferenceAdapter
from .nvidia import NVIDIAInferenceAdapter
if not isinstance(config, NVIDIAConfig):
raise RuntimeError(f"Unexpected config type: {type(config)}")

View file

@ -34,13 +34,13 @@ from llama_stack.providers.utils.inference.model_registry import (
ModelRegistryHelper,
)
from ._config import NVIDIAConfig
from ._openai_utils import (
from . import NVIDIAConfig
from .openai_utils import (
convert_chat_completion_request,
convert_openai_chat_completion_choice,
convert_openai_chat_completion_stream,
)
from ._utils import _is_nvidia_hosted, check_health
from .utils import _is_nvidia_hosted, check_health
_MODEL_ALIASES = [
build_model_alias_with_just_provider_model_id(

View file

@ -8,7 +8,7 @@ from typing import Tuple
import httpx
from ._config import NVIDIAConfig
from . import NVIDIAConfig
def _is_nvidia_hosted(config: NVIDIAConfig) -> bool: