mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-12 12:06:04 +00:00
fixes
This commit is contained in:
parent
cc4009603b
commit
55ca8a5c50
6 changed files with 49 additions and 60 deletions
27
docs/static/llama-stack-spec.html
vendored
27
docs/static/llama-stack-spec.html
vendored
|
|
@ -496,7 +496,18 @@
|
||||||
"schema": {
|
"schema": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
"$ref": "#/components/schemas/ConversationItemInclude"
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"web_search_call.action.sources",
|
||||||
|
"code_interpreter_call.outputs",
|
||||||
|
"computer_call_output.output.image_url",
|
||||||
|
"file_search_call.results",
|
||||||
|
"message.input_image.image_url",
|
||||||
|
"message.output_text.logprobs",
|
||||||
|
"reasoning.encrypted_content"
|
||||||
|
],
|
||||||
|
"title": "ConversationItemInclude",
|
||||||
|
"description": "Specify additional output data to include in the model response."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
@ -6311,20 +6322,6 @@
|
||||||
"title": "ConversationDeletedResource",
|
"title": "ConversationDeletedResource",
|
||||||
"description": "Response for deleted conversation."
|
"description": "Response for deleted conversation."
|
||||||
},
|
},
|
||||||
"ConversationItemInclude": {
|
|
||||||
"type": "string",
|
|
||||||
"enum": [
|
|
||||||
"web_search_call.action.sources",
|
|
||||||
"code_interpreter_call.outputs",
|
|
||||||
"computer_call_output.output.image_url",
|
|
||||||
"file_search_call.results",
|
|
||||||
"message.input_image.image_url",
|
|
||||||
"message.output_text.logprobs",
|
|
||||||
"reasoning.encrypted_content"
|
|
||||||
],
|
|
||||||
"title": "ConversationItemInclude",
|
|
||||||
"description": "Specify additional output data to include in the model response."
|
|
||||||
},
|
|
||||||
"ConversationItemList": {
|
"ConversationItemList": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
|
|
||||||
26
docs/static/llama-stack-spec.yaml
vendored
26
docs/static/llama-stack-spec.yaml
vendored
|
|
@ -358,7 +358,18 @@ paths:
|
||||||
schema:
|
schema:
|
||||||
type: array
|
type: array
|
||||||
items:
|
items:
|
||||||
$ref: '#/components/schemas/ConversationItemInclude'
|
type: string
|
||||||
|
enum:
|
||||||
|
- web_search_call.action.sources
|
||||||
|
- code_interpreter_call.outputs
|
||||||
|
- computer_call_output.output.image_url
|
||||||
|
- file_search_call.results
|
||||||
|
- message.input_image.image_url
|
||||||
|
- message.output_text.logprobs
|
||||||
|
- reasoning.encrypted_content
|
||||||
|
title: ConversationItemInclude
|
||||||
|
description: >-
|
||||||
|
Specify additional output data to include in the model response.
|
||||||
- name: limit
|
- name: limit
|
||||||
in: query
|
in: query
|
||||||
description: >-
|
description: >-
|
||||||
|
|
@ -4738,19 +4749,6 @@ components:
|
||||||
- deleted
|
- deleted
|
||||||
title: ConversationDeletedResource
|
title: ConversationDeletedResource
|
||||||
description: Response for deleted conversation.
|
description: Response for deleted conversation.
|
||||||
ConversationItemInclude:
|
|
||||||
type: string
|
|
||||||
enum:
|
|
||||||
- web_search_call.action.sources
|
|
||||||
- code_interpreter_call.outputs
|
|
||||||
- computer_call_output.output.image_url
|
|
||||||
- file_search_call.results
|
|
||||||
- message.input_image.image_url
|
|
||||||
- message.output_text.logprobs
|
|
||||||
- reasoning.encrypted_content
|
|
||||||
title: ConversationItemInclude
|
|
||||||
description: >-
|
|
||||||
Specify additional output data to include in the model response.
|
|
||||||
ConversationItemList:
|
ConversationItemList:
|
||||||
type: object
|
type: object
|
||||||
properties:
|
properties:
|
||||||
|
|
|
||||||
27
docs/static/stainless-llama-stack-spec.html
vendored
27
docs/static/stainless-llama-stack-spec.html
vendored
|
|
@ -496,7 +496,18 @@
|
||||||
"schema": {
|
"schema": {
|
||||||
"type": "array",
|
"type": "array",
|
||||||
"items": {
|
"items": {
|
||||||
"$ref": "#/components/schemas/ConversationItemInclude"
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"web_search_call.action.sources",
|
||||||
|
"code_interpreter_call.outputs",
|
||||||
|
"computer_call_output.output.image_url",
|
||||||
|
"file_search_call.results",
|
||||||
|
"message.input_image.image_url",
|
||||||
|
"message.output_text.logprobs",
|
||||||
|
"reasoning.encrypted_content"
|
||||||
|
],
|
||||||
|
"title": "ConversationItemInclude",
|
||||||
|
"description": "Specify additional output data to include in the model response."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
@ -7983,20 +7994,6 @@
|
||||||
"title": "ConversationDeletedResource",
|
"title": "ConversationDeletedResource",
|
||||||
"description": "Response for deleted conversation."
|
"description": "Response for deleted conversation."
|
||||||
},
|
},
|
||||||
"ConversationItemInclude": {
|
|
||||||
"type": "string",
|
|
||||||
"enum": [
|
|
||||||
"web_search_call.action.sources",
|
|
||||||
"code_interpreter_call.outputs",
|
|
||||||
"computer_call_output.output.image_url",
|
|
||||||
"file_search_call.results",
|
|
||||||
"message.input_image.image_url",
|
|
||||||
"message.output_text.logprobs",
|
|
||||||
"reasoning.encrypted_content"
|
|
||||||
],
|
|
||||||
"title": "ConversationItemInclude",
|
|
||||||
"description": "Specify additional output data to include in the model response."
|
|
||||||
},
|
|
||||||
"ConversationItemList": {
|
"ConversationItemList": {
|
||||||
"type": "object",
|
"type": "object",
|
||||||
"properties": {
|
"properties": {
|
||||||
|
|
|
||||||
26
docs/static/stainless-llama-stack-spec.yaml
vendored
26
docs/static/stainless-llama-stack-spec.yaml
vendored
|
|
@ -361,7 +361,18 @@ paths:
|
||||||
schema:
|
schema:
|
||||||
type: array
|
type: array
|
||||||
items:
|
items:
|
||||||
$ref: '#/components/schemas/ConversationItemInclude'
|
type: string
|
||||||
|
enum:
|
||||||
|
- web_search_call.action.sources
|
||||||
|
- code_interpreter_call.outputs
|
||||||
|
- computer_call_output.output.image_url
|
||||||
|
- file_search_call.results
|
||||||
|
- message.input_image.image_url
|
||||||
|
- message.output_text.logprobs
|
||||||
|
- reasoning.encrypted_content
|
||||||
|
title: ConversationItemInclude
|
||||||
|
description: >-
|
||||||
|
Specify additional output data to include in the model response.
|
||||||
- name: limit
|
- name: limit
|
||||||
in: query
|
in: query
|
||||||
description: >-
|
description: >-
|
||||||
|
|
@ -5951,19 +5962,6 @@ components:
|
||||||
- deleted
|
- deleted
|
||||||
title: ConversationDeletedResource
|
title: ConversationDeletedResource
|
||||||
description: Response for deleted conversation.
|
description: Response for deleted conversation.
|
||||||
ConversationItemInclude:
|
|
||||||
type: string
|
|
||||||
enum:
|
|
||||||
- web_search_call.action.sources
|
|
||||||
- code_interpreter_call.outputs
|
|
||||||
- computer_call_output.output.image_url
|
|
||||||
- file_search_call.results
|
|
||||||
- message.input_image.image_url
|
|
||||||
- message.output_text.logprobs
|
|
||||||
- reasoning.encrypted_content
|
|
||||||
title: ConversationItemInclude
|
|
||||||
description: >-
|
|
||||||
Specify additional output data to include in the model response.
|
|
||||||
ConversationItemList:
|
ConversationItemList:
|
||||||
type: object
|
type: object
|
||||||
properties:
|
properties:
|
||||||
|
|
|
||||||
|
|
@ -148,7 +148,6 @@ class ConversationItemCreateRequest(BaseModel):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@json_schema_type
|
|
||||||
class ConversationItemInclude(StrEnum):
|
class ConversationItemInclude(StrEnum):
|
||||||
"""
|
"""
|
||||||
Specify additional output data to include in the model response.
|
Specify additional output data to include in the model response.
|
||||||
|
|
|
||||||
|
|
@ -131,7 +131,7 @@ class OpenAIResponsesImpl:
|
||||||
|
|
||||||
tool_context.recover_tools_from_previous_response(previous_response)
|
tool_context.recover_tools_from_previous_response(previous_response)
|
||||||
elif conversation is not None:
|
elif conversation is not None:
|
||||||
conversation_items = await self.conversations_api.list(conversation, order="asc")
|
conversation_items = await self.conversations_api.list_items(conversation, order="asc")
|
||||||
|
|
||||||
# Use stored messages as source of truth (like previous_response.messages)
|
# Use stored messages as source of truth (like previous_response.messages)
|
||||||
stored_messages = await self.responses_store.get_conversation_messages(conversation)
|
stored_messages = await self.responses_store.get_conversation_messages(conversation)
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue