mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-31 03:13:53 +00:00
Use our own pydantic models for OpenAI Server APIs
Importing the models from the OpenAI client library required a top-level dependency on the openai python package, and also was incompatible with our API generation code due to some quirks in how the OpenAI pydantic models are defined. So, this creates our own stubs of those pydantic models so that we're in more direct control of our API surface for this OpenAI-compatible API, so that it works with our code generation, and so that the openai python client isn't a hard requirement of Llama Stack's API.
This commit is contained in:
parent
a193c9fc3f
commit
92fdf6d0c9
8 changed files with 1826 additions and 15 deletions
|
|
@ -10,7 +10,6 @@ from typing import Any, AsyncGenerator, Dict, List, Optional, Union
|
|||
import httpx
|
||||
from openai import AsyncOpenAI
|
||||
from openai.types.chat import ChatCompletion as OpenAIChatCompletion
|
||||
from openai.types.chat import ChatCompletionMessageParam as OpenAIChatCompletionMessageParam
|
||||
from openai.types.chat.chat_completion_chunk import (
|
||||
ChatCompletionChunk as OpenAIChatCompletionChunk,
|
||||
)
|
||||
|
|
@ -48,6 +47,7 @@ from llama_stack.apis.inference import (
|
|||
ToolDefinition,
|
||||
ToolPromptFormat,
|
||||
)
|
||||
from llama_stack.apis.inference.inference import OpenAIMessageParam
|
||||
from llama_stack.apis.models import Model, ModelType
|
||||
from llama_stack.models.llama.datatypes import BuiltinTool, StopReason, ToolCall
|
||||
from llama_stack.models.llama.sku_list import all_registered_models
|
||||
|
|
@ -471,7 +471,7 @@ class VLLMInferenceAdapter(Inference, ModelsProtocolPrivate):
|
|||
async def openai_chat_completion(
|
||||
self,
|
||||
model: str,
|
||||
messages: List[OpenAIChatCompletionMessageParam],
|
||||
messages: List[OpenAIMessageParam],
|
||||
frequency_penalty: Optional[float] = None,
|
||||
function_call: Optional[Union[str, Dict[str, Any]]] = None,
|
||||
functions: Optional[List[Dict[str, Any]]] = None,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue