add readme and keep only stubs

This commit is contained in:
Swapna Lekkala 2025-11-04 10:42:00 -08:00
parent 0a98415e25
commit 9cf349a263
12 changed files with 328 additions and 1109 deletions

View file

@ -7,13 +7,13 @@
import asyncio
import tempfile
from pathlib import Path
from unittest.mock import AsyncMock, MagicMock
import pytest
from llama_stack.core.storage.datatypes import KVStoreReference, SqliteKVStoreConfig
from llama_stack.providers.utils.job_scheduler import (
InlineSchedulerConfig,
JobStatus,
scheduler_impl,
)
from llama_stack.providers.utils.kvstore import register_kvstore_backends
@ -43,238 +43,72 @@ def scheduler_config():
)
async def test_inline_scheduler_basic(scheduler_config):
"""Test basic scheduler functionality."""
async def test_scheduler_api_exists(scheduler_config):
"""Test that scheduler API is properly defined."""
scheduler = await scheduler_impl(scheduler_config)
try:
# Register default executor
# Verify all required methods exist
assert hasattr(scheduler, "initialize")
assert hasattr(scheduler, "start")
assert hasattr(scheduler, "shutdown")
assert hasattr(scheduler, "register_job_executor")
assert hasattr(scheduler, "schedule_job")
assert hasattr(scheduler, "get_job_info")
assert hasattr(scheduler, "cancel_job")
assert hasattr(scheduler, "delete_job")
assert hasattr(scheduler, "list_jobs")
async def test_scheduler_not_implemented(scheduler_config):
"""Test that scheduler methods raise NotImplementedError."""
scheduler = await scheduler_impl(scheduler_config)
# Test that all methods raise NotImplementedError
with pytest.raises(NotImplementedError, match="not yet available"):
await scheduler.initialize()
with pytest.raises(NotImplementedError, match="not yet available"):
await scheduler.start()
with pytest.raises(NotImplementedError, match="not yet available"):
scheduler.register_job_executor("test_job", default_test_executor)
# Schedule a job
job_id = await scheduler.schedule_job(
job_type="test_job",
job_data={"test": "data"},
metadata={"user": "test_user"},
)
with pytest.raises(NotImplementedError, match="not yet available"):
await scheduler.schedule_job("test_job", {})
assert job_id is not None
assert isinstance(job_id, str)
with pytest.raises(NotImplementedError, match="not yet available"):
await scheduler.get_job_info("job_id")
# Wait a bit for the job to complete
await asyncio.sleep(0.2)
with pytest.raises(NotImplementedError, match="not yet available"):
await scheduler.cancel_job("job_id")
# Get job info
job_info = await scheduler.get_job_info(job_id)
assert job_info["job_id"] == job_id
assert job_info["job_type"] == "test_job"
assert job_info["status"] == JobStatus.COMPLETED.value
assert job_info["metadata"]["user"] == "test_user"
assert job_info["progress"] == 1.0
assert job_info["result"] is not None
with pytest.raises(NotImplementedError, match="not yet available"):
await scheduler.delete_job("job_id")
finally:
with pytest.raises(NotImplementedError, match="not yet available"):
await scheduler.list_jobs()
with pytest.raises(NotImplementedError, match="not yet available"):
await scheduler.shutdown()
async def test_inline_scheduler_list_jobs(scheduler_config):
"""Test listing jobs with filters."""
async def test_two_phase_initialization_pattern(scheduler_config):
"""Test that the two-phase initialization pattern is supported."""
scheduler = await scheduler_impl(scheduler_config)
try:
# Register executors for different job types
scheduler.register_job_executor("batch_processing", default_test_executor)
scheduler.register_job_executor("log_aggregation", default_test_executor)
# Mock the methods to test the pattern
scheduler.initialize = AsyncMock()
scheduler.start = AsyncMock()
scheduler.register_job_executor = MagicMock()
# Schedule multiple jobs
await scheduler.schedule_job(
job_type="batch_processing",
job_data={"batch": 1},
)
await scheduler.schedule_job(
job_type="log_aggregation",
job_data={"logs": []},
)
await scheduler.schedule_job(
job_type="batch_processing",
job_data={"batch": 2},
)
# Phase 1: Initialize (loads jobs, doesn't start them)
await scheduler.initialize()
scheduler.initialize.assert_called_once()
# Wait for jobs to complete
await asyncio.sleep(0.3)
# Register executors after initialization
scheduler.register_job_executor("test_job", default_test_executor)
scheduler.register_job_executor.assert_called_once()
# List all jobs
all_jobs = await scheduler.list_jobs()
assert len(all_jobs) == 3
# List jobs by type
batch_jobs = await scheduler.list_jobs(job_type="batch_processing")
assert len(batch_jobs) == 2
log_jobs = await scheduler.list_jobs(job_type="log_aggregation")
assert len(log_jobs) == 1
# List jobs by status
completed_jobs = await scheduler.list_jobs(status=JobStatus.COMPLETED)
assert len(completed_jobs) == 3
finally:
await scheduler.shutdown()
async def test_inline_scheduler_cancel_job(scheduler_config):
"""Test cancelling a job."""
scheduler = await scheduler_impl(scheduler_config)
try:
# Register executor
scheduler.register_job_executor("long_running_job", default_test_executor)
# Schedule a job
job_id = await scheduler.schedule_job(
job_type="long_running_job",
job_data={"duration": 10},
)
# Try to cancel immediately
await scheduler.cancel_job(job_id)
# Wait a bit
await asyncio.sleep(0.1)
# Check job status
job_info = await scheduler.get_job_info(job_id)
# Job might be CANCELLED or COMPLETED depending on timing
assert job_info["status"] in [JobStatus.CANCELLED.value, JobStatus.COMPLETED.value]
finally:
await scheduler.shutdown()
async def test_inline_scheduler_delete_job(scheduler_config):
"""Test deleting a completed job."""
scheduler = await scheduler_impl(scheduler_config)
try:
# Register executor
scheduler.register_job_executor("test_job", default_test_executor)
# Schedule a job
job_id = await scheduler.schedule_job(
job_type="test_job",
job_data={"test": "data"},
)
# Wait for completion
await asyncio.sleep(0.2)
# Verify job exists
job_info = await scheduler.get_job_info(job_id)
assert job_info["status"] == JobStatus.COMPLETED.value
# Delete the job
success = await scheduler.delete_job(job_id)
assert success is True
# Verify job is deleted
with pytest.raises(ValueError, match="not found"):
await scheduler.get_job_info(job_id)
# Deleting again should return False
success = await scheduler.delete_job(job_id)
assert success is False
finally:
await scheduler.shutdown()
async def test_inline_scheduler_concurrent_jobs(scheduler_config):
"""Test running multiple jobs concurrently."""
scheduler = await scheduler_impl(scheduler_config)
try:
# Register executor
scheduler.register_job_executor("test_job", default_test_executor)
# Schedule multiple jobs
job_ids = []
for i in range(5):
job_id = await scheduler.schedule_job(
job_type="test_job",
job_data={"index": i},
)
job_ids.append(job_id)
# Wait for all jobs to complete
await asyncio.sleep(0.5)
# Verify all jobs completed
for job_id in job_ids:
job_info = await scheduler.get_job_info(job_id)
assert job_info["status"] == JobStatus.COMPLETED.value
finally:
await scheduler.shutdown()
async def test_inline_scheduler_pagination(scheduler_config):
"""Test job listing with pagination."""
scheduler = await scheduler_impl(scheduler_config)
try:
# Register executor
scheduler.register_job_executor("test_job", default_test_executor)
# Schedule 10 jobs
for i in range(10):
await scheduler.schedule_job(
job_type="test_job",
job_data={"index": i},
)
# Wait for completion
await asyncio.sleep(0.5)
# Test pagination
page1 = await scheduler.list_jobs(limit=5, offset=0)
assert len(page1) == 5
page2 = await scheduler.list_jobs(limit=5, offset=5)
assert len(page2) == 5
page3 = await scheduler.list_jobs(limit=5, offset=10)
assert len(page3) == 0
finally:
await scheduler.shutdown()
async def test_inline_scheduler_register_job_executor(scheduler_config):
"""Test registering a job executor."""
scheduler = await scheduler_impl(scheduler_config)
try:
# Define a custom job executor
async def custom_executor(job_data: dict) -> dict:
return {"custom": "result", "input": job_data}
# Register the executor
scheduler.register_job_executor("custom_job_type", custom_executor)
# Schedule a job with the custom type
job_id = await scheduler.schedule_job(
job_type="custom_job_type",
job_data={"test": "data"},
)
# Wait for job to complete
await asyncio.sleep(0.2)
# Verify the custom executor was called
job_info = await scheduler.get_job_info(job_id)
assert job_info["status"] == JobStatus.COMPLETED.value
assert job_info["result"]["custom"] == "result"
assert job_info["result"]["input"]["test"] == "data"
finally:
await scheduler.shutdown()
# Phase 2: Start (resumes jobs after executors registered)
await scheduler.start()
scheduler.start.assert_called_once()