mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-03 09:53:45 +00:00
feat: add custom_metadata to OpenAIModel to unify /v1/models with /v1/openai/v1/models (#4051)
We need to remove `/v1/openai/v1` paths shortly. There is one trouble -- our current `/v1/openai/v1/models` endpoint provides different data than `/v1/models`. Unfortunately our tests target the latter (llama-stack customized) behavior. We need to get to true OpenAI compatibility. This is step 1: adding `custom_metadata` field to `OpenAIModel` that includes all the extra stuff we add in the native `/v1/models` response. This can be extracted on the consumer end by look at `__pydantic_extra__` or other similar fields. This PR: - Adds `custom_metadata` field to `OpenAIModel` class in `src/llama_stack/apis/models/models.py` - Modified `openai_list_models()` in `src/llama_stack/core/routing_tables/models.py` to populate custom_metadata Next Steps 1. Update stainless client to use `/v1/openai/v1/models` instead of `/v1/models` 2. Migrate tests to read from `custom_metadata` 3. Remove `/v1/openai/v1/` prefix entirely and consolidate to single `/v1/models` endpoint
This commit is contained in:
parent
2381714904
commit
44096512b5
7 changed files with 218 additions and 61 deletions
|
|
@ -90,12 +90,14 @@ class OpenAIModel(BaseModel):
|
|||
:object: The object type, which will be "model"
|
||||
:created: The Unix timestamp in seconds when the model was created
|
||||
:owned_by: The owner of the model
|
||||
:custom_metadata: Llama Stack-specific metadata including model_type, provider info, and additional metadata
|
||||
"""
|
||||
|
||||
id: str
|
||||
object: Literal["model"] = "model"
|
||||
created: int
|
||||
owned_by: str
|
||||
custom_metadata: dict[str, Any] | None = None
|
||||
|
||||
|
||||
class OpenAIListModelsResponse(BaseModel):
|
||||
|
|
@ -113,7 +115,7 @@ class Models(Protocol):
|
|||
"""
|
||||
...
|
||||
|
||||
@webmethod(route="/openai/v1/models", method="GET", level=LLAMA_STACK_API_V1, deprecated=True)
|
||||
@webmethod(route="/openai/v1/models", method="GET", level=LLAMA_STACK_API_V1)
|
||||
async def openai_list_models(self) -> OpenAIListModelsResponse:
|
||||
"""List models using the OpenAI API.
|
||||
|
||||
|
|
|
|||
|
|
@ -134,6 +134,12 @@ class ModelsRoutingTable(CommonRoutingTableImpl, Models):
|
|||
object="model",
|
||||
created=int(time.time()),
|
||||
owned_by="llama_stack",
|
||||
custom_metadata={
|
||||
"model_type": model.model_type,
|
||||
"provider_id": model.provider_id,
|
||||
"provider_resource_id": model.provider_resource_id,
|
||||
**model.metadata,
|
||||
},
|
||||
)
|
||||
for model in all_models
|
||||
]
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue