feat: drop python 3.10 support (#2469)

# What does this PR do?

dropped python3.10, updated pyproject and dependencies, and also removed
some blocks of code with special handling for enum.StrEnum

Closes #2458

Signed-off-by: Charlie Doern <cdoern@redhat.com>
This commit is contained in:
Charlie Doern 2025-06-19 02:37:14 -04:00 committed by GitHub
parent db2cd9e8f3
commit d12f195f56
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
32 changed files with 63 additions and 104 deletions

View file

@ -11,7 +11,7 @@ import secrets
import string
import uuid
from collections.abc import AsyncGenerator
from datetime import datetime, timezone
from datetime import UTC, datetime
import httpx
@ -242,7 +242,7 @@ class ChatAgent(ShieldRunnerMixin):
in_progress_tool_call_step = await self.storage.get_in_progress_tool_call_step(
request.session_id, request.turn_id
)
now = datetime.now(timezone.utc).isoformat()
now = datetime.now(UTC).isoformat()
tool_execution_step = ToolExecutionStep(
step_id=(in_progress_tool_call_step.step_id if in_progress_tool_call_step else str(uuid.uuid4())),
turn_id=request.turn_id,
@ -267,7 +267,7 @@ class ChatAgent(ShieldRunnerMixin):
start_time = last_turn.started_at
else:
messages.extend(request.messages)
start_time = datetime.now(timezone.utc).isoformat()
start_time = datetime.now(UTC).isoformat()
input_messages = request.messages
output_message = None
@ -298,7 +298,7 @@ class ChatAgent(ShieldRunnerMixin):
input_messages=input_messages,
output_message=output_message,
started_at=start_time,
completed_at=datetime.now(timezone.utc).isoformat(),
completed_at=datetime.now(UTC).isoformat(),
steps=steps,
)
await self.storage.add_turn_to_session(request.session_id, turn)
@ -389,7 +389,7 @@ class ChatAgent(ShieldRunnerMixin):
return
step_id = str(uuid.uuid4())
shield_call_start_time = datetime.now(timezone.utc).isoformat()
shield_call_start_time = datetime.now(UTC).isoformat()
try:
yield AgentTurnResponseStreamChunk(
event=AgentTurnResponseEvent(
@ -413,7 +413,7 @@ class ChatAgent(ShieldRunnerMixin):
turn_id=turn_id,
violation=e.violation,
started_at=shield_call_start_time,
completed_at=datetime.now(timezone.utc).isoformat(),
completed_at=datetime.now(UTC).isoformat(),
),
)
)
@ -436,7 +436,7 @@ class ChatAgent(ShieldRunnerMixin):
turn_id=turn_id,
violation=None,
started_at=shield_call_start_time,
completed_at=datetime.now(timezone.utc).isoformat(),
completed_at=datetime.now(UTC).isoformat(),
),
)
)
@ -491,7 +491,7 @@ class ChatAgent(ShieldRunnerMixin):
client_tools[tool.name] = tool
while True:
step_id = str(uuid.uuid4())
inference_start_time = datetime.now(timezone.utc).isoformat()
inference_start_time = datetime.now(UTC).isoformat()
yield AgentTurnResponseStreamChunk(
event=AgentTurnResponseEvent(
payload=AgentTurnResponseStepStartPayload(
@ -603,7 +603,7 @@ class ChatAgent(ShieldRunnerMixin):
turn_id=turn_id,
model_response=copy.deepcopy(message),
started_at=inference_start_time,
completed_at=datetime.now(timezone.utc).isoformat(),
completed_at=datetime.now(UTC).isoformat(),
),
)
)
@ -681,7 +681,7 @@ class ChatAgent(ShieldRunnerMixin):
"input": message.model_dump_json(),
},
) as span:
tool_execution_start_time = datetime.now(timezone.utc).isoformat()
tool_execution_start_time = datetime.now(UTC).isoformat()
tool_result = await self.execute_tool_call_maybe(
session_id,
tool_call,
@ -710,7 +710,7 @@ class ChatAgent(ShieldRunnerMixin):
)
],
started_at=tool_execution_start_time,
completed_at=datetime.now(timezone.utc).isoformat(),
completed_at=datetime.now(UTC).isoformat(),
)
# Yield the step completion event
@ -747,7 +747,7 @@ class ChatAgent(ShieldRunnerMixin):
turn_id=turn_id,
tool_calls=client_tool_calls,
tool_responses=[],
started_at=datetime.now(timezone.utc).isoformat(),
started_at=datetime.now(UTC).isoformat(),
),
)

View file

@ -7,7 +7,7 @@
import logging
import uuid
from collections.abc import AsyncGenerator
from datetime import datetime, timezone
from datetime import UTC, datetime
from llama_stack.apis.agents import (
Agent,
@ -93,7 +93,7 @@ class MetaReferenceAgentsImpl(Agents):
agent_config: AgentConfig,
) -> AgentCreateResponse:
agent_id = str(uuid.uuid4())
created_at = datetime.now(timezone.utc)
created_at = datetime.now(UTC)
agent_info = AgentInfo(
**agent_config.model_dump(),

View file

@ -7,7 +7,7 @@
import json
import logging
import uuid
from datetime import datetime, timezone
from datetime import UTC, datetime
from llama_stack.apis.agents import AgentConfig, Session, ToolExecutionStep, Turn
from llama_stack.distribution.access_control.access_control import AccessDeniedError, is_action_allowed
@ -47,7 +47,7 @@ class AgentPersistence:
session_info = AgentSessionInfo(
session_id=session_id,
session_name=name,
started_at=datetime.now(timezone.utc),
started_at=datetime.now(UTC),
owner=user,
turns=[],
identifier=name, # should this be qualified in any way?

View file

@ -11,7 +11,7 @@ import multiprocessing
import os
import signal
import sys
from datetime import datetime, timezone
from datetime import UTC, datetime
from pathlib import Path
from typing import Any
@ -670,7 +670,7 @@ class HFFinetuningSingleDevice:
# Create checkpoint
checkpoint = Checkpoint(
identifier=f"{model}-sft-{config.n_epochs}",
created_at=datetime.now(timezone.utc),
created_at=datetime.now(UTC),
epoch=config.n_epochs,
post_training_job_id=job_uuid,
path=str(output_dir_path / "merged_model"),

View file

@ -7,7 +7,7 @@
import logging
import os
import time
from datetime import datetime, timezone
from datetime import UTC, datetime
from functools import partial
from pathlib import Path
from typing import Any
@ -537,7 +537,7 @@ class LoraFinetuningSingleDevice:
checkpoint_path = await self.save_checkpoint(epoch=curr_epoch)
checkpoint = Checkpoint(
identifier=f"{self.model_id}-sft-{curr_epoch}",
created_at=datetime.now(timezone.utc),
created_at=datetime.now(UTC),
epoch=curr_epoch,
post_training_job_id=self.job_uuid,
path=checkpoint_path,

View file

@ -5,7 +5,7 @@
# the root directory of this source tree.
import json
from datetime import datetime, timezone
from datetime import UTC, datetime
from opentelemetry.sdk.trace import ReadableSpan
from opentelemetry.sdk.trace.export import SpanProcessor
@ -34,7 +34,7 @@ class ConsoleSpanProcessor(SpanProcessor):
if span.attributes and span.attributes.get("__autotraced__"):
return
timestamp = datetime.fromtimestamp(span.start_time / 1e9, tz=timezone.utc).strftime("%H:%M:%S.%f")[:-3]
timestamp = datetime.fromtimestamp(span.start_time / 1e9, tz=UTC).strftime("%H:%M:%S.%f")[:-3]
print(
f"{COLORS['dim']}{timestamp}{COLORS['reset']} "
@ -46,7 +46,7 @@ class ConsoleSpanProcessor(SpanProcessor):
if span.attributes and span.attributes.get("__autotraced__"):
return
timestamp = datetime.fromtimestamp(span.end_time / 1e9, tz=timezone.utc).strftime("%H:%M:%S.%f")[:-3]
timestamp = datetime.fromtimestamp(span.end_time / 1e9, tz=UTC).strftime("%H:%M:%S.%f")[:-3]
span_context = (
f"{COLORS['dim']}{timestamp}{COLORS['reset']} "
@ -74,7 +74,7 @@ class ConsoleSpanProcessor(SpanProcessor):
print(f" {COLORS['dim']}{key}: {str_value}{COLORS['reset']}")
for event in span.events:
event_time = datetime.fromtimestamp(event.timestamp / 1e9, tz=timezone.utc).strftime("%H:%M:%S.%f")[:-3]
event_time = datetime.fromtimestamp(event.timestamp / 1e9, tz=UTC).strftime("%H:%M:%S.%f")[:-3]
severity = event.attributes.get("severity", "info")
message = event.attributes.get("message", event.name)

View file

@ -8,7 +8,7 @@ import json
import os
import sqlite3
import threading
from datetime import datetime, timezone
from datetime import UTC, datetime
from opentelemetry.sdk.trace import SpanProcessor
from opentelemetry.trace import Span
@ -125,8 +125,8 @@ class SQLiteSpanProcessor(SpanProcessor):
trace_id,
service_name,
(span_id if span.attributes.get("__root_span__") == "true" else None),
datetime.fromtimestamp(span.start_time / 1e9, timezone.utc).isoformat(),
datetime.fromtimestamp(span.end_time / 1e9, timezone.utc).isoformat(),
datetime.fromtimestamp(span.start_time / 1e9, UTC).isoformat(),
datetime.fromtimestamp(span.end_time / 1e9, UTC).isoformat(),
),
)
@ -144,8 +144,8 @@ class SQLiteSpanProcessor(SpanProcessor):
trace_id,
parent_span_id,
span.name,
datetime.fromtimestamp(span.start_time / 1e9, timezone.utc).isoformat(),
datetime.fromtimestamp(span.end_time / 1e9, timezone.utc).isoformat(),
datetime.fromtimestamp(span.start_time / 1e9, UTC).isoformat(),
datetime.fromtimestamp(span.end_time / 1e9, UTC).isoformat(),
json.dumps(dict(span.attributes)),
span.status.status_code.name,
span.kind.name,
@ -162,7 +162,7 @@ class SQLiteSpanProcessor(SpanProcessor):
(
span_id,
event.name,
datetime.fromtimestamp(event.timestamp / 1e9, timezone.utc).isoformat(),
datetime.fromtimestamp(event.timestamp / 1e9, UTC).isoformat(),
json.dumps(dict(event.attributes)),
),
)

View file

@ -87,9 +87,7 @@ class RefreshableBotoSession:
"access_key": session_credentials.access_key,
"secret_key": session_credentials.secret_key,
"token": session_credentials.token,
"expiry_time": datetime.datetime.fromtimestamp(
time() + self.session_ttl, datetime.timezone.utc
).isoformat(),
"expiry_time": datetime.datetime.fromtimestamp(time() + self.session_ttl, datetime.UTC).isoformat(),
}
return credentials

View file

@ -5,7 +5,7 @@
# the root directory of this source tree.
from collections.abc import AsyncIterator
from datetime import datetime, timezone
from datetime import UTC, datetime
from typing import Any
from llama_stack.apis.inference import (
@ -122,7 +122,7 @@ async def stream_and_store_openai_completion(
final_response = OpenAIChatCompletion(
id=id,
choices=assembled_choices,
created=created or int(datetime.now(timezone.utc).timestamp()),
created=created or int(datetime.now(UTC).timestamp()),
model=model,
object="chat.completion",
)

View file

@ -9,7 +9,7 @@ import asyncio
import functools
import threading
from collections.abc import Callable, Coroutine, Iterable
from datetime import datetime, timezone
from datetime import UTC, datetime
from enum import Enum
from typing import Any, TypeAlias
@ -61,7 +61,7 @@ class Job:
self._handler = handler
self._artifacts: list[JobArtifact] = []
self._logs: list[LogMessage] = []
self._state_transitions: list[tuple[datetime, JobStatus]] = [(datetime.now(timezone.utc), JobStatus.new)]
self._state_transitions: list[tuple[datetime, JobStatus]] = [(datetime.now(UTC), JobStatus.new)]
@property
def handler(self) -> JobHandler:
@ -77,7 +77,7 @@ class Job:
raise ValueError(f"Job is already in a completed state ({self.status})")
if self.status == status:
return
self._state_transitions.append((datetime.now(timezone.utc), status))
self._state_transitions.append((datetime.now(UTC), status))
@property
def artifacts(self) -> list[JobArtifact]:
@ -215,7 +215,7 @@ class Scheduler:
self._backend = _get_backend_impl(backend)
def _on_log_message_cb(self, job: Job, message: str) -> None:
msg = (datetime.now(timezone.utc), message)
msg = (datetime.now(UTC), message)
# At least for the time being, until there's a better way to expose
# logs to users, log messages on console
logger.info(f"Job {job.id}: {message}")

View file

@ -11,7 +11,7 @@ import queue
import random
import threading
from collections.abc import Callable
from datetime import datetime, timezone
from datetime import UTC, datetime
from functools import wraps
from typing import Any
@ -121,7 +121,7 @@ class TraceContext:
span_id=generate_span_id(),
trace_id=self.trace_id,
name=name,
start_time=datetime.now(timezone.utc),
start_time=datetime.now(UTC),
parent_span_id=current_span.span_id if current_span else None,
attributes=attributes,
)
@ -239,7 +239,7 @@ class TelemetryHandler(logging.Handler):
UnstructuredLogEvent(
trace_id=span.trace_id,
span_id=span.span_id,
timestamp=datetime.now(timezone.utc),
timestamp=datetime.now(UTC),
message=self.format(record),
severity=severity(record.levelname),
)