mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-31 03:29:59 +00:00
OpenAI-compatible completions and chats for litellm and together
This adds OpenAI-compatible completions and chat completions support for the native Together provider as well as all providers implemented with litellm.
This commit is contained in:
parent
1dbdff1496
commit
00c4493bda
4 changed files with 259 additions and 54 deletions
|
|
@ -1052,6 +1052,11 @@ async def convert_openai_chat_completion_stream(
|
|||
)
|
||||
|
||||
|
||||
async def prepare_openai_completion_params(**params):
|
||||
completion_params = {k: v for k, v in params.items() if v is not None}
|
||||
return completion_params
|
||||
|
||||
|
||||
class OpenAICompletionUnsupportedMixin:
|
||||
async def openai_completion(
|
||||
self,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue