# What does this PR do?


## Test Plan
This commit is contained in:
Eric Huang 2025-10-09 20:53:19 -07:00
parent f50ce11a3b
commit 4a3d1e33f8
31 changed files with 727 additions and 892 deletions

View file

@ -11,6 +11,7 @@ from pathlib import Path
from typing import TextIO
from typing import Any, List, Optional, Union, get_type_hints, get_origin, get_args
from pydantic import BaseModel
from llama_stack.strong_typing.schema import object_to_json, StrictJsonType
from llama_stack.core.resolver import api_protocol_map
@ -205,6 +206,14 @@ def _validate_has_return_in_docstring(method) -> str | None:
def _validate_has_params_in_docstring(method) -> str | None:
source = inspect.getsource(method)
sig = inspect.signature(method)
params_list = [p for p in sig.parameters.values() if p.name != "self"]
if len(params_list) == 1:
param = params_list[0]
param_type = param.annotation
if issubclass(param_type, BaseModel):
return
# Only check if the method has more than one parameter
if len(sig.parameters) > 1 and ":param" not in source:
return "does not have a ':param' in its docstring"

View file

@ -7716,7 +7716,8 @@
"model",
"messages"
],
"title": "OpenaiChatCompletionRequest"
"title": "OpenaiChatCompletionRequest",
"description": "Request parameters for OpenAI-compatible chat completion endpoint."
},
"OpenAIChatCompletion": {
"type": "object",
@ -7900,7 +7901,7 @@
],
"title": "OpenAICompletionWithInputMessages"
},
"OpenaiCompletionRequest": {
"OpenAICompletionRequest": {
"type": "object",
"properties": {
"model": {
@ -8031,18 +8032,20 @@
"type": "string",
"description": "(Optional) The user to use."
},
"suffix": {
"type": "string",
"description": "(Optional) The suffix that should be appended to the completion."
},
"guided_choice": {
"type": "array",
"items": {
"type": "string"
}
},
"description": "(Optional) vLLM-specific parameter for guided generation with a list of choices."
},
"prompt_logprobs": {
"type": "integer"
},
"suffix": {
"type": "string",
"description": "(Optional) The suffix that should be appended to the completion."
"type": "integer",
"description": "(Optional) vLLM-specific parameter for number of log probabilities to return for prompt tokens."
}
},
"additionalProperties": false,
@ -8050,6 +8053,20 @@
"model",
"prompt"
],
"title": "OpenAICompletionRequest",
"description": "Request parameters for OpenAI-compatible completion endpoint."
},
"OpenaiCompletionRequest": {
"type": "object",
"properties": {
"params": {
"$ref": "#/components/schemas/OpenAICompletionRequest"
}
},
"additionalProperties": false,
"required": [
"params"
],
"title": "OpenaiCompletionRequest"
},
"OpenAICompletion": {

View file

@ -5671,6 +5671,8 @@ components:
- model
- messages
title: OpenaiChatCompletionRequest
description: >-
Request parameters for OpenAI-compatible chat completion endpoint.
OpenAIChatCompletion:
type: object
properties:
@ -5824,7 +5826,7 @@ components:
- model
- input_messages
title: OpenAICompletionWithInputMessages
OpenaiCompletionRequest:
OpenAICompletionRequest:
type: object
properties:
model:
@ -5912,20 +5914,37 @@ components:
user:
type: string
description: (Optional) The user to use.
guided_choice:
type: array
items:
type: string
prompt_logprobs:
type: integer
suffix:
type: string
description: >-
(Optional) The suffix that should be appended to the completion.
guided_choice:
type: array
items:
type: string
description: >-
(Optional) vLLM-specific parameter for guided generation with a list of
choices.
prompt_logprobs:
type: integer
description: >-
(Optional) vLLM-specific parameter for number of log probabilities to
return for prompt tokens.
additionalProperties: false
required:
- model
- prompt
title: OpenAICompletionRequest
description: >-
Request parameters for OpenAI-compatible completion endpoint.
OpenaiCompletionRequest:
type: object
properties:
params:
$ref: '#/components/schemas/OpenAICompletionRequest'
additionalProperties: false
required:
- params
title: OpenaiCompletionRequest
OpenAICompletion:
type: object

View file

@ -5212,7 +5212,8 @@
"model",
"messages"
],
"title": "OpenaiChatCompletionRequest"
"title": "OpenaiChatCompletionRequest",
"description": "Request parameters for OpenAI-compatible chat completion endpoint."
},
"OpenAIChatCompletion": {
"type": "object",
@ -5396,7 +5397,7 @@
],
"title": "OpenAICompletionWithInputMessages"
},
"OpenaiCompletionRequest": {
"OpenAICompletionRequest": {
"type": "object",
"properties": {
"model": {
@ -5527,18 +5528,20 @@
"type": "string",
"description": "(Optional) The user to use."
},
"suffix": {
"type": "string",
"description": "(Optional) The suffix that should be appended to the completion."
},
"guided_choice": {
"type": "array",
"items": {
"type": "string"
}
},
"description": "(Optional) vLLM-specific parameter for guided generation with a list of choices."
},
"prompt_logprobs": {
"type": "integer"
},
"suffix": {
"type": "string",
"description": "(Optional) The suffix that should be appended to the completion."
"type": "integer",
"description": "(Optional) vLLM-specific parameter for number of log probabilities to return for prompt tokens."
}
},
"additionalProperties": false,
@ -5546,6 +5549,20 @@
"model",
"prompt"
],
"title": "OpenAICompletionRequest",
"description": "Request parameters for OpenAI-compatible completion endpoint."
},
"OpenaiCompletionRequest": {
"type": "object",
"properties": {
"params": {
"$ref": "#/components/schemas/OpenAICompletionRequest"
}
},
"additionalProperties": false,
"required": [
"params"
],
"title": "OpenaiCompletionRequest"
},
"OpenAICompletion": {

View file

@ -3920,6 +3920,8 @@ components:
- model
- messages
title: OpenaiChatCompletionRequest
description: >-
Request parameters for OpenAI-compatible chat completion endpoint.
OpenAIChatCompletion:
type: object
properties:
@ -4073,7 +4075,7 @@ components:
- model
- input_messages
title: OpenAICompletionWithInputMessages
OpenaiCompletionRequest:
OpenAICompletionRequest:
type: object
properties:
model:
@ -4161,20 +4163,37 @@ components:
user:
type: string
description: (Optional) The user to use.
guided_choice:
type: array
items:
type: string
prompt_logprobs:
type: integer
suffix:
type: string
description: >-
(Optional) The suffix that should be appended to the completion.
guided_choice:
type: array
items:
type: string
description: >-
(Optional) vLLM-specific parameter for guided generation with a list of
choices.
prompt_logprobs:
type: integer
description: >-
(Optional) vLLM-specific parameter for number of log probabilities to
return for prompt tokens.
additionalProperties: false
required:
- model
- prompt
title: OpenAICompletionRequest
description: >-
Request parameters for OpenAI-compatible completion endpoint.
OpenaiCompletionRequest:
type: object
properties:
params:
$ref: '#/components/schemas/OpenAICompletionRequest'
additionalProperties: false
required:
- params
title: OpenaiCompletionRequest
OpenAICompletion:
type: object

View file

@ -7221,7 +7221,8 @@
"model",
"messages"
],
"title": "OpenaiChatCompletionRequest"
"title": "OpenaiChatCompletionRequest",
"description": "Request parameters for OpenAI-compatible chat completion endpoint."
},
"OpenAIChatCompletion": {
"type": "object",
@ -7405,7 +7406,7 @@
],
"title": "OpenAICompletionWithInputMessages"
},
"OpenaiCompletionRequest": {
"OpenAICompletionRequest": {
"type": "object",
"properties": {
"model": {
@ -7536,18 +7537,20 @@
"type": "string",
"description": "(Optional) The user to use."
},
"suffix": {
"type": "string",
"description": "(Optional) The suffix that should be appended to the completion."
},
"guided_choice": {
"type": "array",
"items": {
"type": "string"
}
},
"description": "(Optional) vLLM-specific parameter for guided generation with a list of choices."
},
"prompt_logprobs": {
"type": "integer"
},
"suffix": {
"type": "string",
"description": "(Optional) The suffix that should be appended to the completion."
"type": "integer",
"description": "(Optional) vLLM-specific parameter for number of log probabilities to return for prompt tokens."
}
},
"additionalProperties": false,
@ -7555,6 +7558,20 @@
"model",
"prompt"
],
"title": "OpenAICompletionRequest",
"description": "Request parameters for OpenAI-compatible completion endpoint."
},
"OpenaiCompletionRequest": {
"type": "object",
"properties": {
"params": {
"$ref": "#/components/schemas/OpenAICompletionRequest"
}
},
"additionalProperties": false,
"required": [
"params"
],
"title": "OpenaiCompletionRequest"
},
"OpenAICompletion": {

View file

@ -5365,6 +5365,8 @@ components:
- model
- messages
title: OpenaiChatCompletionRequest
description: >-
Request parameters for OpenAI-compatible chat completion endpoint.
OpenAIChatCompletion:
type: object
properties:
@ -5518,7 +5520,7 @@ components:
- model
- input_messages
title: OpenAICompletionWithInputMessages
OpenaiCompletionRequest:
OpenAICompletionRequest:
type: object
properties:
model:
@ -5606,20 +5608,37 @@ components:
user:
type: string
description: (Optional) The user to use.
guided_choice:
type: array
items:
type: string
prompt_logprobs:
type: integer
suffix:
type: string
description: >-
(Optional) The suffix that should be appended to the completion.
guided_choice:
type: array
items:
type: string
description: >-
(Optional) vLLM-specific parameter for guided generation with a list of
choices.
prompt_logprobs:
type: integer
description: >-
(Optional) vLLM-specific parameter for number of log probabilities to
return for prompt tokens.
additionalProperties: false
required:
- model
- prompt
title: OpenAICompletionRequest
description: >-
Request parameters for OpenAI-compatible completion endpoint.
OpenaiCompletionRequest:
type: object
properties:
params:
$ref: '#/components/schemas/OpenAICompletionRequest'
additionalProperties: false
required:
- params
title: OpenaiCompletionRequest
OpenAICompletion:
type: object