mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-04 10:10:36 +00:00
refactor: rename trace_protocol marker to mark_as_traced
Rename the marker decorator in apis/common/tracing.py from trace_protocol to mark_as_traced to disambiguate it from the actual tracing implementation decorator in core/telemetry/trace_protocol.py. Changes: - Rename decorator: trace_protocol -> mark_as_traced - Rename attribute: __trace_protocol__ -> __marked_for_tracing__ - Update all API protocol files to use new decorator name - Update router logic to check for new attribute name This makes it clear that the marker decorator is metadata-only and doesn't perform actual tracing, while the core decorator does the implementation. Signed-off-by: Charlie Doern <cdoern@redhat.com>
This commit is contained in:
parent
cd17c62ec4
commit
29f93a6391
11 changed files with 28 additions and 26 deletions
|
|
@ -20,7 +20,7 @@ from llama_stack.apis.agents.openai_responses import (
|
|||
OpenAIResponseOutputMessageMCPListTools,
|
||||
OpenAIResponseOutputMessageWebSearchToolCall,
|
||||
)
|
||||
from llama_stack.apis.common.tracing import trace_protocol
|
||||
from llama_stack.apis.common.tracing import mark_as_traced
|
||||
from llama_stack.apis.version import LLAMA_STACK_API_V1
|
||||
from llama_stack.schema_utils import json_schema_type, register_schema, webmethod
|
||||
|
||||
|
|
@ -183,7 +183,7 @@ class ConversationItemDeletedResource(BaseModel):
|
|||
|
||||
|
||||
@runtime_checkable
|
||||
@trace_protocol
|
||||
@mark_as_traced
|
||||
class Conversations(Protocol):
|
||||
"""Conversations
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue