forked from phoenix-oss/llama-stack-mirror
Litellm support in llama stack:
This commit is contained in:
parent
15dcc4ea5e
commit
9e0c8a82cb
6 changed files with 359 additions and 0 deletions
19
llama_stack/providers/remote/inference/litellm/__init__.py
Normal file
19
llama_stack/providers/remote/inference/litellm/__init__.py
Normal file
|
@ -0,0 +1,19 @@
|
|||
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||
# All rights reserved.
|
||||
#
|
||||
# This source code is licensed under the terms described in the LICENSE file in
|
||||
# the root directory of this source tree.
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
from llama_stack.apis.inference import Inference
|
||||
|
||||
from .config import LitellmConfig
|
||||
|
||||
|
||||
async def get_adapter_impl(config: LitellmConfig, _deps) -> Inference:
|
||||
# import dynamically so the import is used only when it is needed
|
||||
from .litellm import LitellmInferenceAdapter
|
||||
assert isinstance(config, LitellmConfig), f"Unexpected config type: {type(config)}"
|
||||
adapter = LitellmInferenceAdapter(config)
|
||||
return adapter
|
Loading…
Add table
Add a link
Reference in a new issue