diff --git a/docs/_static/llama-stack-spec.html b/docs/_static/llama-stack-spec.html
index a1f6a6f30..7cb2a73f3 100644
--- a/docs/_static/llama-stack-spec.html
+++ b/docs/_static/llama-stack-spec.html
@@ -4129,7 +4129,7 @@
"tags": [
"Files"
],
- "description": "Upload a file that can be used across various endpoints.\nThe file upload should be a multipart form request with:\n- file: The File object (not file name) to be uploaded.\n- purpose: The intended purpose of the uploaded file.",
+ "description": "Upload a file that can be used across various endpoints.\nThe file upload should be a multipart form request with:\n- file: The File object (not file name) to be uploaded.\n- purpose: The intended purpose of the uploaded file.\n- expires_after: Optional form values describing expiration for the file. Expected expires_after[anchor] = \"created_at\", expires_after[seconds] = . Seconds must be between 3600 and 2592000 (1 hour to 30 days).",
"parameters": [],
"requestBody": {
"content": {
@@ -4143,11 +4143,33 @@
},
"purpose": {
"$ref": "#/components/schemas/OpenAIFilePurpose"
+ },
+ "expires_after_anchor": {
+ "oneOf": [
+ {
+ "type": "string"
+ },
+ {
+ "type": "null"
+ }
+ ]
+ },
+ "expires_after_seconds": {
+ "oneOf": [
+ {
+ "type": "integer"
+ },
+ {
+ "type": "null"
+ }
+ ]
}
},
"required": [
"file",
- "purpose"
+ "purpose",
+ "expires_after_anchor",
+ "expires_after_seconds"
]
}
}
diff --git a/docs/_static/llama-stack-spec.yaml b/docs/_static/llama-stack-spec.yaml
index 33142e3ff..25089868c 100644
--- a/docs/_static/llama-stack-spec.yaml
+++ b/docs/_static/llama-stack-spec.yaml
@@ -2933,6 +2933,10 @@ paths:
- file: The File object (not file name) to be uploaded.
- purpose: The intended purpose of the uploaded file.
+
+ - expires_after: Optional form values describing expiration for the file.
+ Expected expires_after[anchor] = "created_at", expires_after[seconds] = .
+ Seconds must be between 3600 and 2592000 (1 hour to 30 days).
parameters: []
requestBody:
content:
@@ -2945,9 +2949,19 @@ paths:
format: binary
purpose:
$ref: '#/components/schemas/OpenAIFilePurpose'
+ expires_after_anchor:
+ oneOf:
+ - type: string
+ - type: 'null'
+ expires_after_seconds:
+ oneOf:
+ - type: integer
+ - type: 'null'
required:
- file
- purpose
+ - expires_after_anchor
+ - expires_after_seconds
required: true
/v1/openai/v1/models:
get:
diff --git a/llama_stack/apis/files/files.py b/llama_stack/apis/files/files.py
index a1b9dd4dc..d39e96e96 100644
--- a/llama_stack/apis/files/files.py
+++ b/llama_stack/apis/files/files.py
@@ -5,10 +5,10 @@
# the root directory of this source tree.
from enum import StrEnum
-from typing import Annotated, Literal, Protocol, runtime_checkable
+from typing import Annotated, ClassVar, Literal, Protocol, runtime_checkable
from fastapi import File, Form, Response, UploadFile
-from pydantic import BaseModel
+from pydantic import BaseModel, Field
from llama_stack.apis.common.responses import Order
from llama_stack.providers.utils.telemetry.trace_protocol import trace_protocol
@@ -49,6 +49,23 @@ class OpenAIFileObject(BaseModel):
purpose: OpenAIFilePurpose
+@json_schema_type
+class ExpiresAfter(BaseModel):
+ """
+ Control expiration of uploaded files.
+
+ Params:
+ - anchor, must be "created_at"
+ - seconds, must be int between 3600 and 2592000 (1 hour to 30 days)
+ """
+
+ MIN: ClassVar[int] = 3600 # 1 hour
+ MAX: ClassVar[int] = 2592000 # 30 days
+
+ anchor: Literal["created_at"]
+ seconds: int = Field(..., ge=3600, le=2592000)
+
+
@json_schema_type
class ListOpenAIFileResponse(BaseModel):
"""
@@ -92,6 +109,9 @@ class Files(Protocol):
self,
file: Annotated[UploadFile, File()],
purpose: Annotated[OpenAIFilePurpose, Form()],
+ expires_after_anchor: Annotated[str | None, Form(alias="expires_after[anchor]")] = None,
+ expires_after_seconds: Annotated[int | None, Form(alias="expires_after[seconds]")] = None,
+ # TODO: expires_after is producing strange openapi spec, params are showing up as a required w/ oneOf being null
) -> OpenAIFileObject:
"""
Upload a file that can be used across various endpoints.
@@ -99,6 +119,7 @@ class Files(Protocol):
The file upload should be a multipart form request with:
- file: The File object (not file name) to be uploaded.
- purpose: The intended purpose of the uploaded file.
+ - expires_after: Optional form values describing expiration for the file. Expected expires_after[anchor] = "created_at", expires_after[seconds] = . Seconds must be between 3600 and 2592000 (1 hour to 30 days).
:param file: The uploaded file object containing content and metadata (filename, content_type, etc.).
:param purpose: The intended purpose of the uploaded file (e.g., "assistants", "fine-tune").
diff --git a/llama_stack/providers/inline/files/localfs/files.py b/llama_stack/providers/inline/files/localfs/files.py
index 4f6d571a4..9c610c1ba 100644
--- a/llama_stack/providers/inline/files/localfs/files.py
+++ b/llama_stack/providers/inline/files/localfs/files.py
@@ -86,11 +86,16 @@ class LocalfsFilesImpl(Files):
self,
file: Annotated[UploadFile, File()],
purpose: Annotated[OpenAIFilePurpose, Form()],
+ expires_after_anchor: Annotated[str | None, Form(alias="expires_after[anchor]")] = None,
+ expires_after_seconds: Annotated[int | None, Form(alias="expires_after[seconds]")] = None,
) -> OpenAIFileObject:
"""Upload a file that can be used across various endpoints."""
if not self.sql_store:
raise RuntimeError("Files provider not initialized")
+ if expires_after_anchor is not None or expires_after_seconds is not None:
+ raise NotImplementedError("File expiration is not supported by this provider")
+
file_id = self._generate_file_id()
file_path = self._get_file_path(file_id)
diff --git a/llama_stack/providers/remote/files/s3/files.py b/llama_stack/providers/remote/files/s3/files.py
index 0451f74ea..54742d900 100644
--- a/llama_stack/providers/remote/files/s3/files.py
+++ b/llama_stack/providers/remote/files/s3/files.py
@@ -4,9 +4,9 @@
# This source code is licensed under the terms described in the LICENSE file in
# the root directory of this source tree.
-import time
import uuid
-from typing import Annotated
+from datetime import UTC, datetime
+from typing import Annotated, Any
import boto3
from botocore.exceptions import BotoCoreError, ClientError, NoCredentialsError
@@ -15,6 +15,7 @@ from fastapi import File, Form, Response, UploadFile
from llama_stack.apis.common.errors import ResourceNotFoundError
from llama_stack.apis.common.responses import Order
from llama_stack.apis.files import (
+ ExpiresAfter,
Files,
ListOpenAIFileResponse,
OpenAIFileDeleteResponse,
@@ -85,18 +86,80 @@ async def _create_bucket_if_not_exists(client: boto3.client, config: S3FilesImpl
raise RuntimeError(f"Failed to access S3 bucket '{config.bucket_name}': {e}") from e
+def _make_file_object(
+ *,
+ id: str,
+ filename: str,
+ purpose: str,
+ bytes: int,
+ created_at: int,
+ expires_at: int,
+ **kwargs: Any, # here to ignore any additional fields, e.g. extra fields from AuthorizedSqlStore
+) -> OpenAIFileObject:
+ """
+ Construct an OpenAIFileObject and normalize expires_at.
+
+ If expires_at is greater than the max we treat it as no-expiration and
+ return None for expires_at.
+
+ The OpenAI spec says expires_at type is Integer, but the implementation
+ will return None for no expiration.
+ """
+ obj = OpenAIFileObject(
+ id=id,
+ filename=filename,
+ purpose=OpenAIFilePurpose(purpose),
+ bytes=bytes,
+ created_at=created_at,
+ expires_at=expires_at,
+ )
+
+ if obj.expires_at is not None and obj.expires_at > (obj.created_at + ExpiresAfter.MAX):
+ obj.expires_at = None # type: ignore
+
+ return obj
+
+
class S3FilesImpl(Files):
"""S3-based implementation of the Files API."""
- # TODO: implement expiration, for now a silly offset
- _SILLY_EXPIRATION_OFFSET = 100 * 365 * 24 * 60 * 60
-
def __init__(self, config: S3FilesImplConfig, policy: list[AccessRule]) -> None:
self._config = config
self.policy = policy
self._client: boto3.client | None = None
self._sql_store: AuthorizedSqlStore | None = None
+ def _now(self) -> int:
+ """Return current UTC timestamp as int seconds."""
+ return int(datetime.now(UTC).timestamp())
+
+ async def _get_file(self, file_id: str, return_expired: bool = False) -> dict[str, Any]:
+ where: dict[str, str | dict] = {"id": file_id}
+ if not return_expired:
+ where["expires_at"] = {">": self._now()}
+ if not (row := await self.sql_store.fetch_one("openai_files", policy=self.policy, where=where)):
+ raise ResourceNotFoundError(file_id, "File", "files.list()")
+ return row
+
+ async def _delete_file(self, file_id: str) -> None:
+ """Delete a file from S3 and the database."""
+ try:
+ self.client.delete_object(
+ Bucket=self._config.bucket_name,
+ Key=file_id,
+ )
+ except ClientError as e:
+ if e.response["Error"]["Code"] != "NoSuchKey":
+ raise RuntimeError(f"Failed to delete file from S3: {e}") from e
+
+ await self.sql_store.delete("openai_files", where={"id": file_id})
+
+ async def _delete_if_expired(self, file_id: str) -> None:
+ """If the file exists and is expired, delete it."""
+ if row := await self._get_file(file_id, return_expired=True):
+ if (expires_at := row.get("expires_at")) and expires_at <= self._now():
+ await self._delete_file(file_id)
+
async def initialize(self) -> None:
self._client = _create_s3_client(self._config)
await _create_bucket_if_not_exists(self._client, self._config)
@@ -132,27 +195,47 @@ class S3FilesImpl(Files):
self,
file: Annotated[UploadFile, File()],
purpose: Annotated[OpenAIFilePurpose, Form()],
+ expires_after_anchor: Annotated[str | None, Form(alias="expires_after[anchor]")] = None,
+ expires_after_seconds: Annotated[int | None, Form(alias="expires_after[seconds]")] = None,
) -> OpenAIFileObject:
file_id = f"file-{uuid.uuid4().hex}"
filename = getattr(file, "filename", None) or "uploaded_file"
- created_at = int(time.time())
- expires_at = created_at + self._SILLY_EXPIRATION_OFFSET
+ created_at = self._now()
+
+ expires_after = None
+ if expires_after_anchor is not None or expires_after_seconds is not None:
+ # we use ExpiresAfter to validate input
+ expires_after = ExpiresAfter(
+ anchor=expires_after_anchor, # type: ignore[arg-type]
+ seconds=expires_after_seconds, # type: ignore[arg-type]
+ )
+
+ # the default is no expiration.
+ # to implement no expiration we set an expiration beyond the max.
+ # we'll hide this fact from users when returning the file object.
+ expires_at = created_at + ExpiresAfter.MAX * 42
+ # the default for BATCH files is 30 days, which happens to be the expiration max.
+ if purpose == OpenAIFilePurpose.BATCH:
+ expires_at = created_at + ExpiresAfter.MAX
+
+ if expires_after is not None:
+ expires_at = created_at + expires_after.seconds
+
content = await file.read()
file_size = len(content)
- await self.sql_store.insert(
- "openai_files",
- {
- "id": file_id,
- "filename": filename,
- "purpose": purpose.value,
- "bytes": file_size,
- "created_at": created_at,
- "expires_at": expires_at,
- },
- )
+ entry: dict[str, Any] = {
+ "id": file_id,
+ "filename": filename,
+ "purpose": purpose.value,
+ "bytes": file_size,
+ "created_at": created_at,
+ "expires_at": expires_at,
+ }
+
+ await self.sql_store.insert("openai_files", entry)
try:
self.client.put_object(
@@ -166,14 +249,7 @@ class S3FilesImpl(Files):
raise RuntimeError(f"Failed to upload file to S3: {e}") from e
- return OpenAIFileObject(
- id=file_id,
- filename=filename,
- purpose=purpose,
- bytes=file_size,
- created_at=created_at,
- expires_at=expires_at,
- )
+ return _make_file_object(**entry)
async def openai_list_files(
self,
@@ -186,30 +262,20 @@ class S3FilesImpl(Files):
if not order:
order = Order.desc
- where_conditions = {}
+ where_conditions: dict[str, Any] = {"expires_at": {">": self._now()}}
if purpose:
where_conditions["purpose"] = purpose.value
paginated_result = await self.sql_store.fetch_all(
table="openai_files",
policy=self.policy,
- where=where_conditions if where_conditions else None,
+ where=where_conditions,
order_by=[("created_at", order.value)],
cursor=("id", after) if after else None,
limit=limit,
)
- files = [
- OpenAIFileObject(
- id=row["id"],
- filename=row["filename"],
- purpose=OpenAIFilePurpose(row["purpose"]),
- bytes=row["bytes"],
- created_at=row["created_at"],
- expires_at=row["expires_at"],
- )
- for row in paginated_result.data
- ]
+ files = [_make_file_object(**row) for row in paginated_result.data]
return ListOpenAIFileResponse(
data=files,
@@ -220,41 +286,20 @@ class S3FilesImpl(Files):
)
async def openai_retrieve_file(self, file_id: str) -> OpenAIFileObject:
- row = await self.sql_store.fetch_one("openai_files", policy=self.policy, where={"id": file_id})
- if not row:
- raise ResourceNotFoundError(file_id, "File", "files.list()")
-
- return OpenAIFileObject(
- id=row["id"],
- filename=row["filename"],
- purpose=OpenAIFilePurpose(row["purpose"]),
- bytes=row["bytes"],
- created_at=row["created_at"],
- expires_at=row["expires_at"],
- )
+ await self._delete_if_expired(file_id)
+ row = await self._get_file(file_id)
+ return _make_file_object(**row)
async def openai_delete_file(self, file_id: str) -> OpenAIFileDeleteResponse:
- row = await self.sql_store.fetch_one("openai_files", policy=self.policy, where={"id": file_id})
- if not row:
- raise ResourceNotFoundError(file_id, "File", "files.list()")
-
- try:
- self.client.delete_object(
- Bucket=self._config.bucket_name,
- Key=row["id"],
- )
- except ClientError as e:
- if e.response["Error"]["Code"] != "NoSuchKey":
- raise RuntimeError(f"Failed to delete file from S3: {e}") from e
-
- await self.sql_store.delete("openai_files", where={"id": file_id})
-
+ await self._delete_if_expired(file_id)
+ _ = await self._get_file(file_id) # raises if not found
+ await self._delete_file(file_id)
return OpenAIFileDeleteResponse(id=file_id, deleted=True)
async def openai_retrieve_file_content(self, file_id: str) -> Response:
- row = await self.sql_store.fetch_one("openai_files", policy=self.policy, where={"id": file_id})
- if not row:
- raise ResourceNotFoundError(file_id, "File", "files.list()")
+ await self._delete_if_expired(file_id)
+
+ row = await self._get_file(file_id)
try:
response = self.client.get_object(
@@ -265,7 +310,7 @@ class S3FilesImpl(Files):
content = response["Body"].read()
except ClientError as e:
if e.response["Error"]["Code"] == "NoSuchKey":
- await self.sql_store.delete("openai_files", where={"id": file_id})
+ await self._delete_file(file_id)
raise ResourceNotFoundError(file_id, "File", "files.list()") from e
raise RuntimeError(f"Failed to download file from S3: {e}") from e
diff --git a/pyproject.toml b/pyproject.toml
index aa1813e49..1f87a3aaa 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -33,7 +33,7 @@ dependencies = [
"jsonschema",
"llama-stack-client>=0.2.20",
"llama-api-client>=0.1.2",
- "openai>=1.99.6,<1.100.0",
+ "openai>=1.99.6",
"prompt-toolkit",
"python-dotenv",
"python-jose[cryptography]",
@@ -106,7 +106,7 @@ unit = [
# separately. If you are using "uv" to execute your tests, you can use the "--group" flag to specify extra
# dependencies.
test = [
- "openai",
+ "openai>=1.100.0", # for expires_after support
"aiosqlite",
"aiohttp",
"torch>=2.6.0",
diff --git a/tests/integration/files/test_files.py b/tests/integration/files/test_files.py
index 67351d4f7..516b0bd98 100644
--- a/tests/integration/files/test_files.py
+++ b/tests/integration/files/test_files.py
@@ -8,6 +8,7 @@ from io import BytesIO
from unittest.mock import patch
import pytest
+import requests
from llama_stack.core.datatypes import User
@@ -79,6 +80,88 @@ def test_openai_client_basic_operations(openai_client):
pass # ignore 404
+@pytest.mark.xfail(message="expires_after not available on all providers")
+def test_expires_after(openai_client):
+ """Test uploading a file with expires_after parameter."""
+ client = openai_client
+
+ uploaded_file = None
+ try:
+ with BytesIO(b"expires_after test") as file_buffer:
+ file_buffer.name = "expires_after.txt"
+ uploaded_file = client.files.create(
+ file=file_buffer,
+ purpose="assistants",
+ expires_after={"anchor": "created_at", "seconds": 4545},
+ )
+
+ assert uploaded_file.expires_at is not None
+ assert uploaded_file.expires_at == uploaded_file.created_at + 4545
+
+ listed = client.files.list()
+ ids = [f.id for f in listed.data]
+ assert uploaded_file.id in ids
+
+ retrieved = client.files.retrieve(uploaded_file.id)
+ assert retrieved.id == uploaded_file.id
+
+ finally:
+ if uploaded_file is not None:
+ try:
+ client.files.delete(uploaded_file.id)
+ except Exception:
+ pass
+
+
+@pytest.mark.xfail(message="expires_after not available on all providers")
+def test_expires_after_requests(openai_client):
+ """Upload a file using requests multipart/form-data and bracketed expires_after fields.
+
+ This ensures clients that send form fields like `expires_after[anchor]` and
+ `expires_after[seconds]` are handled by the server.
+ """
+ base_url = f"{openai_client.base_url}files"
+
+ uploaded_id = None
+ try:
+ files = {"file": ("expires_after_with_requests.txt", BytesIO(b"expires_after via requests"))}
+ data = {
+ "purpose": "assistants",
+ "expires_after[anchor]": "created_at",
+ "expires_after[seconds]": "4545",
+ }
+
+ session = requests.Session()
+ request = requests.Request("POST", base_url, files=files, data=data)
+ prepared = session.prepare_request(request)
+ resp = session.send(prepared, timeout=30)
+ resp.raise_for_status()
+ result = resp.json()
+
+ assert result.get("id", "").startswith("file-")
+ uploaded_id = result["id"]
+ assert result.get("created_at") is not None
+ assert result.get("expires_at") == result["created_at"] + 4545
+
+ list_resp = requests.get(base_url, timeout=30)
+ list_resp.raise_for_status()
+ listed = list_resp.json()
+ ids = [f["id"] for f in listed.get("data", [])]
+ assert uploaded_id in ids
+
+ retrieve_resp = requests.get(f"{base_url}/{uploaded_id}", timeout=30)
+ retrieve_resp.raise_for_status()
+ retrieved = retrieve_resp.json()
+ assert retrieved["id"] == uploaded_id
+
+ finally:
+ if uploaded_id:
+ try:
+ requests.delete(f"{base_url}/{uploaded_id}", timeout=30)
+ except Exception:
+ pass
+
+
@pytest.mark.xfail(message="User isolation broken for current providers, must be fixed.")
@patch("llama_stack.providers.utils.sqlstore.authorized_sqlstore.get_authenticated_user")
def test_files_authentication_isolation(mock_get_authenticated_user, llama_stack_client):
diff --git a/tests/unit/providers/files/test_s3_files.py b/tests/unit/providers/files/test_s3_files.py
index 3bd4836df..c665bf124 100644
--- a/tests/unit/providers/files/test_s3_files.py
+++ b/tests/unit/providers/files/test_s3_files.py
@@ -197,3 +197,104 @@ class TestS3FilesImpl:
files_list = await s3_provider.openai_list_files()
assert len(files_list.data) == 0, "No file metadata should remain after failed upload"
+
+ @pytest.mark.parametrize("purpose", [p for p in OpenAIFilePurpose if p != OpenAIFilePurpose.BATCH])
+ async def test_default_no_expiration(self, s3_provider, sample_text_file, purpose):
+ """Test that by default files have no expiration."""
+ sample_text_file.filename = "test_default_no_expiration"
+ uploaded = await s3_provider.openai_upload_file(
+ file=sample_text_file,
+ purpose=purpose,
+ )
+ assert uploaded.expires_at is None, "By default files should have no expiration"
+
+ async def test_default_batch_expiration(self, s3_provider, sample_text_file):
+ """Test that by default batch files have an expiration."""
+ sample_text_file.filename = "test_default_batch_an_expiration"
+ uploaded = await s3_provider.openai_upload_file(
+ file=sample_text_file,
+ purpose=OpenAIFilePurpose.BATCH,
+ )
+ assert uploaded.expires_at is not None, "By default batch files should have an expiration"
+ thirty_days_seconds = 30 * 24 * 3600
+ assert uploaded.expires_at == uploaded.created_at + thirty_days_seconds, (
+ "Batch default expiration should be 30 days"
+ )
+
+ async def test_expired_file_is_unavailable(self, s3_provider, sample_text_file, s3_config, s3_client):
+ """Uploaded file that has expired should not be listed or retrievable/deletable."""
+ with patch.object(s3_provider, "_now") as mock_now: # control time
+ two_hours = 2 * 60 * 60
+
+ mock_now.return_value = 0
+
+ sample_text_file.filename = "test_expired_file"
+ uploaded = await s3_provider.openai_upload_file(
+ file=sample_text_file,
+ purpose=OpenAIFilePurpose.ASSISTANTS,
+ expires_after_anchor="created_at",
+ expires_after_seconds=two_hours,
+ )
+
+ mock_now.return_value = two_hours * 2 # fast forward 4 hours
+
+ listed = await s3_provider.openai_list_files()
+ assert uploaded.id not in [f.id for f in listed.data]
+
+ with pytest.raises(ResourceNotFoundError, match="not found"):
+ await s3_provider.openai_retrieve_file(uploaded.id)
+
+ with pytest.raises(ResourceNotFoundError, match="not found"):
+ await s3_provider.openai_retrieve_file_content(uploaded.id)
+
+ with pytest.raises(ResourceNotFoundError, match="not found"):
+ await s3_provider.openai_delete_file(uploaded.id)
+
+ with pytest.raises(ClientError) as exc_info:
+ s3_client.head_object(Bucket=s3_config.bucket_name, Key=uploaded.id)
+ assert exc_info.value.response["Error"]["Code"] == "404"
+
+ with pytest.raises(ResourceNotFoundError, match="not found"):
+ await s3_provider._get_file(uploaded.id, return_expired=True)
+
+ async def test_unsupported_expires_after_anchor(self, s3_provider, sample_text_file):
+ """Unsupported anchor value should raise ValueError."""
+ sample_text_file.filename = "test_unsupported_expires_after_anchor"
+
+ with pytest.raises(ValueError, match="Input should be 'created_at'"):
+ await s3_provider.openai_upload_file(
+ file=sample_text_file,
+ purpose=OpenAIFilePurpose.ASSISTANTS,
+ expires_after_anchor="now",
+ expires_after_seconds=3600,
+ )
+
+ async def test_nonint_expires_after_seconds(self, s3_provider, sample_text_file):
+ """Non-integer seconds in expires_after should raise ValueError."""
+ sample_text_file.filename = "test_nonint_expires_after_seconds"
+
+ with pytest.raises(ValueError, match="should be a valid integer"):
+ await s3_provider.openai_upload_file(
+ file=sample_text_file,
+ purpose=OpenAIFilePurpose.ASSISTANTS,
+ expires_after_anchor="created_at",
+ expires_after_seconds="many",
+ )
+
+ async def test_expires_after_seconds_out_of_bounds(self, s3_provider, sample_text_file):
+ """Seconds outside allowed range should raise ValueError."""
+ with pytest.raises(ValueError, match="greater than or equal to 3600"):
+ await s3_provider.openai_upload_file(
+ file=sample_text_file,
+ purpose=OpenAIFilePurpose.ASSISTANTS,
+ expires_after_anchor="created_at",
+ expires_after_seconds=3599,
+ )
+
+ with pytest.raises(ValueError, match="less than or equal to 2592000"):
+ await s3_provider.openai_upload_file(
+ file=sample_text_file,
+ purpose=OpenAIFilePurpose.ASSISTANTS,
+ expires_after_anchor="created_at",
+ expires_after_seconds=2592001,
+ )
diff --git a/uv.lock b/uv.lock
index 6eac1efb7..73b52a3e9 100644
--- a/uv.lock
+++ b/uv.lock
@@ -1909,7 +1909,7 @@ requires-dist = [
{ name = "llama-api-client", specifier = ">=0.1.2" },
{ name = "llama-stack-client", specifier = ">=0.2.20" },
{ name = "llama-stack-client", marker = "extra == 'ui'", specifier = ">=0.2.20" },
- { name = "openai", specifier = ">=1.99.6,<1.100.0" },
+ { name = "openai", specifier = ">=1.99.6" },
{ name = "opentelemetry-exporter-otlp-proto-http", specifier = ">=1.30.0" },
{ name = "opentelemetry-sdk", specifier = ">=1.30.0" },
{ name = "pandas", marker = "extra == 'ui'" },
@@ -1979,7 +1979,7 @@ test = [
{ name = "datasets" },
{ name = "mcp" },
{ name = "milvus-lite", specifier = ">=2.5.0" },
- { name = "openai" },
+ { name = "openai", specifier = ">=1.100.0" },
{ name = "psycopg2-binary", specifier = ">=2.9.0" },
{ name = "pymilvus", specifier = ">=2.5.12" },
{ name = "pypdf" },
@@ -2638,7 +2638,7 @@ wheels = [
[[package]]
name = "openai"
-version = "1.99.6"
+version = "1.102.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
@@ -2650,9 +2650,9 @@ dependencies = [
{ name = "tqdm" },
{ name = "typing-extensions" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/11/45/38a87bd6949236db5ae3132f41d5861824702b149f86d2627d6900919103/openai-1.99.6.tar.gz", hash = "sha256:f48f4239b938ef187062f3d5199a05b69711d8b600b9a9b6a3853cd271799183", size = 505364, upload-time = "2025-08-09T15:20:54.438Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/07/55/da5598ed5c6bdd9939633854049cddc5cbac0da938dfcfcb3c6b119c16c0/openai-1.102.0.tar.gz", hash = "sha256:2e0153bcd64a6523071e90211cbfca1f2bbc5ceedd0993ba932a5869f93b7fc9", size = 519027, upload-time = "2025-08-26T20:50:29.397Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/d6/dd/9aa956485c2856346b3181542fbb0aea4e5b457fa7a523944726746da8da/openai-1.99.6-py3-none-any.whl", hash = "sha256:e40d44b2989588c45ce13819598788b77b8fb80ba2f7ae95ce90d14e46f1bd26", size = 786296, upload-time = "2025-08-09T15:20:51.95Z" },
+ { url = "https://files.pythonhosted.org/packages/bd/0d/c9e7016d82c53c5b5e23e2bad36daebb8921ed44f69c0a985c6529a35106/openai-1.102.0-py3-none-any.whl", hash = "sha256:d751a7e95e222b5325306362ad02a7aa96e1fab3ed05b5888ce1c7ca63451345", size = 812015, upload-time = "2025-08-26T20:50:27.219Z" },
]
[[package]]