diff --git a/llama_stack/providers/remote/inference/lmstudio/__init__.py b/llama_stack/providers/remote/inference/lmstudio/__init__.py new file mode 100644 index 000000000..6d2d39b7a --- /dev/null +++ b/llama_stack/providers/remote/inference/lmstudio/__init__.py @@ -0,0 +1,22 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + +from llama_stack.apis.inference import Inference + +from .config import LMSTUDIOConfig + + +async def get_adapter_impl(config: LMSTUDIOConfig, _deps) -> Inference: + # import dynamically so `llama stack build` does not fail due to missing dependencies + from .lmstudio import LMSTUDIOInferenceAdapter + + if not isinstance(config, LMSTUDIOConfig): + raise RuntimeError(f"Unexpected config type: {type(config)}") + adapter = LMSTUDIOInferenceAdapter(config) + return adapter + + +__all__ = ["get_adapter_impl", "LMSTUDIOConfig"] diff --git a/llama_stack/providers/remote/inference/lmstudio/config.py b/llama_stack/providers/remote/inference/lmstudio/config.py new file mode 100644 index 000000000..3e645dfa1 --- /dev/null +++ b/llama_stack/providers/remote/inference/lmstudio/config.py @@ -0,0 +1,22 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + +from typing import Any, Dict + +from pydantic import BaseModel + + +DEFAULT_LMSTUDIO_URL = "http://localhost:12345" + + +class LMSTUDIOImplConfig(BaseModel): + url: str = DEFAULT_LMSTUDIO_URL + + @classmethod + def sample_run_config( + cls, url: str = "${env.LMSTUDIO_URL:http://localhost:12345}", **kwargs + ) -> Dict[str, Any]: + return {"url": url} diff --git a/llama_stack/providers/remote/inference/lmstudio/lmstudio.py b/llama_stack/providers/remote/inference/lmstudio/lmstudio.py new file mode 100644 index 000000000..8adf25113 --- /dev/null +++ b/llama_stack/providers/remote/inference/lmstudio/lmstudio.py @@ -0,0 +1,50 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + + +from llama_models.sku_list import CoreModelId + + +# TODO: make sure it follows the same pattern for lmstudio's model id +_MODEL_ALIASES = [ + build_model_alias( + "meta/llama3-8b-instruct", + CoreModelId.llama3_8b_instruct.value, + ), + build_model_alias( + "meta/llama3-70b-instruct", + CoreModelId.llama3_70b_instruct.value, + ), + build_model_alias( + "meta/llama-3.1-8b-instruct", + CoreModelId.llama3_1_8b_instruct.value, + ), + build_model_alias( + "meta/llama-3.1-70b-instruct", + CoreModelId.llama3_1_70b_instruct.value, + ), + build_model_alias( + "meta/llama-3.1-405b-instruct", + CoreModelId.llama3_1_405b_instruct.value, + ), + build_model_alias( + "meta/llama-3.2-1b-instruct", + CoreModelId.llama3_2_1b_instruct.value, + ), + build_model_alias( + "meta/llama-3.2-3b-instruct", + CoreModelId.llama3_2_3b_instruct.value, + ), + build_model_alias( + "meta/llama-3.2-11b-vision-instruct", + CoreModelId.llama3_2_11b_vision_instruct.value, + ), + build_model_alias( + "meta/llama-3.2-90b-vision-instruct", + CoreModelId.llama3_2_90b_vision_instruct.value, + ), +] +# TODO: Implement LMSTUDIOInferenceAdapter CLASS