diff --git a/docs/static/llama-stack-spec.html b/docs/static/llama-stack-spec.html
index 32ead1764..46d101414 100644
--- a/docs/static/llama-stack-spec.html
+++ b/docs/static/llama-stack-spec.html
@@ -9456,10 +9456,6 @@
"truncation": {
"type": "string",
"description": "(Optional) Truncation strategy applied to the response"
- },
- "user": {
- "type": "string",
- "description": "(Optional) User identifier associated with the request"
}
},
"additionalProperties": false,
@@ -13594,10 +13590,6 @@
"type": "string",
"description": "(Optional) Truncation strategy applied to the response"
},
- "user": {
- "type": "string",
- "description": "(Optional) User identifier associated with the request"
- },
"input": {
"type": "array",
"items": {
diff --git a/docs/static/llama-stack-spec.yaml b/docs/static/llama-stack-spec.yaml
index 3b5b92060..65bc9a0b4 100644
--- a/docs/static/llama-stack-spec.yaml
+++ b/docs/static/llama-stack-spec.yaml
@@ -6884,10 +6884,6 @@ components:
type: string
description: >-
(Optional) Truncation strategy applied to the response
- user:
- type: string
- description: >-
- (Optional) User identifier associated with the request
additionalProperties: false
required:
- created_at
@@ -10082,10 +10078,6 @@ components:
type: string
description: >-
(Optional) Truncation strategy applied to the response
- user:
- type: string
- description: >-
- (Optional) User identifier associated with the request
input:
type: array
items:
diff --git a/llama_stack/apis/agents/openai_responses.py b/llama_stack/apis/agents/openai_responses.py
index 591992479..b26b11f4f 100644
--- a/llama_stack/apis/agents/openai_responses.py
+++ b/llama_stack/apis/agents/openai_responses.py
@@ -336,7 +336,6 @@ class OpenAIResponseObject(BaseModel):
:param text: Text formatting configuration for the response
:param top_p: (Optional) Nucleus sampling parameter used for generation
:param truncation: (Optional) Truncation strategy applied to the response
- :param user: (Optional) User identifier associated with the request
"""
created_at: int
@@ -354,7 +353,6 @@ class OpenAIResponseObject(BaseModel):
text: OpenAIResponseText = OpenAIResponseText(format=OpenAIResponseTextFormat(type="text"))
top_p: float | None = None
truncation: str | None = None
- user: str | None = None
@json_schema_type
diff --git a/llama_stack/ui/app/logs/responses/[id]/page.tsx b/llama_stack/ui/app/logs/responses/[id]/page.tsx
index 922d35531..305e5752a 100644
--- a/llama_stack/ui/app/logs/responses/[id]/page.tsx
+++ b/llama_stack/ui/app/logs/responses/[id]/page.tsx
@@ -41,7 +41,6 @@ export default function ResponseDetailPage() {
temperature: responseData.temperature,
top_p: responseData.top_p,
truncation: responseData.truncation,
- user: responseData.user,
};
};
diff --git a/llama_stack/ui/components/responses/responses-table.tsx b/llama_stack/ui/components/responses/responses-table.tsx
index 0c0f8e56b..415e9ec2c 100644
--- a/llama_stack/ui/components/responses/responses-table.tsx
+++ b/llama_stack/ui/components/responses/responses-table.tsx
@@ -43,7 +43,6 @@ const convertResponseListData = (
temperature: responseData.temperature,
top_p: responseData.top_p,
truncation: responseData.truncation,
- user: responseData.user,
};
};