diff --git a/docs/_static/llama-stack-spec.html b/docs/_static/llama-stack-spec.html
index 1de0dcac0..afa8a2049 100644
--- a/docs/_static/llama-stack-spec.html
+++ b/docs/_static/llama-stack-spec.html
@@ -3047,6 +3047,49 @@
]
}
},
+ "/v1/prompts/{prompt_id}/versions": {
+ "get": {
+ "responses": {
+ "200": {
+ "description": "A ListPromptsResponse containing all versions of the prompt.",
+ "content": {
+ "application/json": {
+ "schema": {
+ "$ref": "#/components/schemas/ListPromptsResponse"
+ }
+ }
+ }
+ },
+ "400": {
+ "$ref": "#/components/responses/BadRequest400"
+ },
+ "429": {
+ "$ref": "#/components/responses/TooManyRequests429"
+ },
+ "500": {
+ "$ref": "#/components/responses/InternalServerError500"
+ },
+ "default": {
+ "$ref": "#/components/responses/DefaultError"
+ }
+ },
+ "tags": [
+ "Prompts"
+ ],
+ "description": "List all versions of a specific prompt.",
+ "parameters": [
+ {
+ "name": "prompt_id",
+ "in": "path",
+ "description": "The identifier of the prompt to list versions for.",
+ "required": true,
+ "schema": {
+ "type": "string"
+ }
+ }
+ ]
+ }
+ },
"/v1/providers": {
"get": {
"responses": {
@@ -9976,12 +10019,18 @@
"type": "string"
},
"description": "Dictionary of prompt variable names and values"
+ },
+ "is_default": {
+ "type": "boolean",
+ "default": false,
+ "description": "Boolean indicating whether this version is the default version for this prompt"
}
},
"additionalProperties": false,
"required": [
"version",
- "prompt_id"
+ "prompt_id",
+ "is_default"
],
"title": "Prompt",
"description": "A prompt resource representing a stored OpenAI Compatible prompt template in Llama Stack."
diff --git a/docs/_static/llama-stack-spec.yaml b/docs/_static/llama-stack-spec.yaml
index 567933f94..23b76c05b 100644
--- a/docs/_static/llama-stack-spec.yaml
+++ b/docs/_static/llama-stack-spec.yaml
@@ -2132,6 +2132,37 @@ paths:
required: false
schema:
$ref: '#/components/schemas/Order'
+ /v1/prompts/{prompt_id}/versions:
+ get:
+ responses:
+ '200':
+ description: >-
+ A ListPromptsResponse containing all versions of the prompt.
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/ListPromptsResponse'
+ '400':
+ $ref: '#/components/responses/BadRequest400'
+ '429':
+ $ref: >-
+ #/components/responses/TooManyRequests429
+ '500':
+ $ref: >-
+ #/components/responses/InternalServerError500
+ default:
+ $ref: '#/components/responses/DefaultError'
+ tags:
+ - Prompts
+ description: List all versions of a specific prompt.
+ parameters:
+ - name: prompt_id
+ in: path
+ description: >-
+ The identifier of the prompt to list versions for.
+ required: true
+ schema:
+ type: string
/v1/providers:
get:
responses:
@@ -7373,10 +7404,17 @@ components:
type: string
description: >-
Dictionary of prompt variable names and values
+ is_default:
+ type: boolean
+ default: false
+ description: >-
+ Boolean indicating whether this version is the default version for this
+ prompt
additionalProperties: false
required:
- version
- prompt_id
+ - is_default
title: Prompt
description: >-
A prompt resource representing a stored OpenAI Compatible prompt template
diff --git a/llama_stack/apis/prompts/prompts.py b/llama_stack/apis/prompts/prompts.py
index 84cf74b40..37f8334ad 100644
--- a/llama_stack/apis/prompts/prompts.py
+++ b/llama_stack/apis/prompts/prompts.py
@@ -21,6 +21,7 @@ class Prompt(BaseModel):
:param version: Version string (integer start at 1 cast as string, incremented on save)
:param prompt_id: Unique identifier formatted as 'pmpt_<48-digit-hash>'
:param variables: Dictionary of prompt variable names and values
+ :param is_default: Boolean indicating whether this version is the default version for this prompt
"""
prompt: str | None = Field(default=None, description="The system prompt with variable placeholders")
@@ -29,6 +30,9 @@ class Prompt(BaseModel):
variables: dict[str, str] | None = Field(
default_factory=dict, description="Variables for dynamic injection using {{variable}} syntax"
)
+ is_default: bool = Field(
+ default=False, description="Boolean indicating whether this version is the default version"
+ )
@field_validator("prompt_id")
@classmethod
@@ -158,6 +162,18 @@ class Prompts(Protocol):
"""
...
+ @webmethod(route="/prompts/{prompt_id:path}/versions", method="GET")
+ async def list_prompt_versions(
+ self,
+ prompt_id: str,
+ ) -> ListPromptsResponse:
+ """List all versions of a specific prompt.
+
+ :param prompt_id: The identifier of the prompt to list versions for.
+ :returns: A ListPromptsResponse containing all versions of the prompt.
+ """
+ ...
+
@webmethod(route="/prompts/{prompt_id:path}/default-version", method="PUT")
async def set_default_version(
self,
diff --git a/llama_stack/core/prompts/prompts.py b/llama_stack/core/prompts/prompts.py
index be25b4928..31e8bfcc9 100644
--- a/llama_stack/core/prompts/prompts.py
+++ b/llama_stack/core/prompts/prompts.py
@@ -169,6 +169,32 @@ class PromptServiceImpl(Prompts):
for key in keys:
await self.kvstore.delete(key)
+ async def list_prompt_versions(self, prompt_id: str) -> ListPromptsResponse:
+ """List all versions of a specific prompt."""
+ prefix = f"prompts:v1:{prompt_id}:"
+ keys = await self.kvstore.keys_in_range(prefix, prefix + "\xff")
+
+ default_version = None
+ prompts = []
+
+ for key in keys:
+ data = await self.kvstore.get(key)
+ if key.endswith(":default"):
+ default_version = data
+ else:
+ if data:
+ prompt_obj = self._deserialize_prompt(data)
+ prompts.append(prompt_obj)
+
+ if not prompts:
+ raise ValueError(f"Prompt {prompt_id} not found")
+
+ for prompt in prompts:
+ prompt.is_default = prompt.version == default_version
+
+ prompts.sort(key=lambda x: int(x.version))
+ return ListPromptsResponse(data=prompts)
+
async def set_default_version(self, prompt_id: str, version: str) -> Prompt:
"""Set which version of a prompt should be the default (latest)."""
version_key = self._get_version_key(prompt_id, version)
diff --git a/tests/unit/providers/prompts/test_prompts.py b/tests/unit/providers/prompts/test_prompts.py
index 19c2563e5..47c154bf2 100644
--- a/tests/unit/providers/prompts/test_prompts.py
+++ b/tests/unit/providers/prompts/test_prompts.py
@@ -100,3 +100,20 @@ class TestPrompts:
response = await store.list_prompts()
listed_prompt = response.data[0]
assert listed_prompt.version == "1" and listed_prompt.prompt == "V1"
+
+ async def test_get_all_prompt_versions(self, store):
+ prompt = await store.create_prompt("V1")
+ await store.update_prompt(prompt.prompt_id, "V2")
+ await store.update_prompt(prompt.prompt_id, "V3")
+
+ versions = (await store.list_prompt_versions(prompt.prompt_id)).data
+ assert len(versions) == 3
+ assert [v.version for v in versions] == ["1", "2", "3"]
+ assert [v.is_default for v in versions] == [False, False, True]
+
+ await store.set_default_version(prompt.prompt_id, "2")
+ versions = (await store.list_prompt_versions(prompt.prompt_id)).data
+ assert [v.is_default for v in versions] == [False, True, False]
+
+ with pytest.raises(ValueError):
+ await store.list_prompt_versions("nonexistent")