Compare commits

...
Sign in to create a new pull request.

2 commits

Author SHA1 Message Date
Justin Lee
a5a573ad76 init lmstudio inference structure 2025-01-31 13:37:59 -08:00
Justin Lee
5d88a2fff5 init lm studio 2025-01-31 13:24:06 -08:00
4 changed files with 105 additions and 0 deletions

View file

@ -215,4 +215,15 @@ def available_providers() -> List[ProviderSpec]:
config_class="llama_stack.providers.remote.inference.sambanova.SambaNovaImplConfig",
),
),
remote_provider_spec(
api=Api.inference,
adapter=AdapterSpec(
adapter_type="lmstudio",
pip_packages=[
"openai",
],
module="llama_stack.providers.remote.inference.lmstudio",
config_class="llama_stack.providers.remote.inference.lmstudio.LMStudioImplConfig",
),
),
]

View file

@ -0,0 +1,22 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
from llama_stack.apis.inference import Inference
from .config import LMSTUDIOConfig
async def get_adapter_impl(config: LMSTUDIOConfig, _deps) -> Inference:
# import dynamically so `llama stack build` does not fail due to missing dependencies
from .lmstudio import LMSTUDIOInferenceAdapter
if not isinstance(config, LMSTUDIOConfig):
raise RuntimeError(f"Unexpected config type: {type(config)}")
adapter = LMSTUDIOInferenceAdapter(config)
return adapter
__all__ = ["get_adapter_impl", "LMSTUDIOConfig"]

View file

@ -0,0 +1,22 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
from typing import Any, Dict
from pydantic import BaseModel
DEFAULT_LMSTUDIO_URL = "http://localhost:12345"
class LMSTUDIOImplConfig(BaseModel):
url: str = DEFAULT_LMSTUDIO_URL
@classmethod
def sample_run_config(
cls, url: str = "${env.LMSTUDIO_URL:http://localhost:12345}", **kwargs
) -> Dict[str, Any]:
return {"url": url}

View file

@ -0,0 +1,50 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
from llama_models.sku_list import CoreModelId
# TODO: make sure it follows the same pattern for lmstudio's model id
_MODEL_ALIASES = [
build_model_alias(
"meta/llama3-8b-instruct",
CoreModelId.llama3_8b_instruct.value,
),
build_model_alias(
"meta/llama3-70b-instruct",
CoreModelId.llama3_70b_instruct.value,
),
build_model_alias(
"meta/llama-3.1-8b-instruct",
CoreModelId.llama3_1_8b_instruct.value,
),
build_model_alias(
"meta/llama-3.1-70b-instruct",
CoreModelId.llama3_1_70b_instruct.value,
),
build_model_alias(
"meta/llama-3.1-405b-instruct",
CoreModelId.llama3_1_405b_instruct.value,
),
build_model_alias(
"meta/llama-3.2-1b-instruct",
CoreModelId.llama3_2_1b_instruct.value,
),
build_model_alias(
"meta/llama-3.2-3b-instruct",
CoreModelId.llama3_2_3b_instruct.value,
),
build_model_alias(
"meta/llama-3.2-11b-vision-instruct",
CoreModelId.llama3_2_11b_vision_instruct.value,
),
build_model_alias(
"meta/llama-3.2-90b-vision-instruct",
CoreModelId.llama3_2_90b_vision_instruct.value,
),
]
# TODO: Implement LMSTUDIOInferenceAdapter CLASS