diff --git a/docs/getting_started.md b/docs/getting_started.md index 32f4d2d15..6c8c902c0 100644 --- a/docs/getting_started.md +++ b/docs/getting_started.md @@ -73,7 +73,7 @@ docker run -it -p 5000:5000 -v ~/.llama:/root/.llama --gpus=all llamastack-local ``` > [!NOTE] -> `~/.llama` should be the path containing downloaded weights of Llama models. +> `~/.llama` should be the path containing downloaded weights of Llama models. #### Via conda diff --git a/llama_stack/providers/adapters/inference/databricks/__init__.py b/llama_stack/providers/adapters/inference/databricks/__init__.py index 097579d25..ca2a0a103 100644 --- a/llama_stack/providers/adapters/inference/databricks/__init__.py +++ b/llama_stack/providers/adapters/inference/databricks/__init__.py @@ -7,10 +7,11 @@ from .config import DatabricksImplConfig from .databricks import DatabricksInferenceAdapter + async def get_adapter_impl(config: DatabricksImplConfig, _deps): assert isinstance( config, DatabricksImplConfig ), f"Unexpected config type: {type(config)}" impl = DatabricksInferenceAdapter(config) await impl.initialize() - return impl \ No newline at end of file + return impl diff --git a/llama_stack/providers/adapters/inference/databricks/config.py b/llama_stack/providers/adapters/inference/databricks/config.py index 927bb474c..bb93a0a72 100644 --- a/llama_stack/providers/adapters/inference/databricks/config.py +++ b/llama_stack/providers/adapters/inference/databricks/config.py @@ -19,4 +19,4 @@ class DatabricksImplConfig(BaseModel): api_token: str = Field( default=None, description="The Databricks API token", - ) \ No newline at end of file + ) diff --git a/llama_stack/providers/impls/vllm/__init__.py b/llama_stack/providers/impls/vllm/__init__.py index 3d5a81ad9..aa0c4b101 100644 --- a/llama_stack/providers/impls/vllm/__init__.py +++ b/llama_stack/providers/impls/vllm/__init__.py @@ -1,3 +1,9 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. +# +# This source code is licensed under the terms described in the LICENSE file in +# the root directory of this source tree. + from typing import Any from .config import VLLMConfig