Litellm support in llama stack:

This commit is contained in:
Abhishek Kumawat 2025-02-03 06:10:51 -08:00
parent 15dcc4ea5e
commit 9e0c8a82cb
6 changed files with 359 additions and 0 deletions

View file

@ -0,0 +1,19 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
from pydantic import BaseModel
from llama_stack.apis.inference import Inference
from .config import LitellmConfig
async def get_adapter_impl(config: LitellmConfig, _deps) -> Inference:
# import dynamically so the import is used only when it is needed
from .litellm import LitellmInferenceAdapter
assert isinstance(config, LitellmConfig), f"Unexpected config type: {type(config)}"
adapter = LitellmInferenceAdapter(config)
return adapter