mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-05 02:17:31 +00:00
refactor: rename trace_protocol marker to mark_as_traced
Rename the marker decorator in apis/common/tracing.py from trace_protocol to mark_as_traced to disambiguate it from the actual tracing implementation decorator in core/telemetry/trace_protocol.py. Changes: - Rename decorator: trace_protocol -> mark_as_traced - Rename attribute: __trace_protocol__ -> __marked_for_tracing__ - Update all API protocol files to use new decorator name - Update router logic to check for new attribute name This makes it clear that the marker decorator is metadata-only and doesn't perform actual tracing, while the core decorator does the implementation. Signed-off-by: Charlie Doern <cdoern@redhat.com>
This commit is contained in:
parent
cd17c62ec4
commit
29f93a6391
11 changed files with 28 additions and 26 deletions
|
|
@ -20,7 +20,7 @@ from typing_extensions import TypedDict
|
|||
|
||||
from llama_stack.apis.common.content_types import ContentDelta, InterleavedContent
|
||||
from llama_stack.apis.common.responses import MetricResponseMixin, Order
|
||||
from llama_stack.apis.common.tracing import trace_protocol
|
||||
from llama_stack.apis.common.tracing import mark_as_traced
|
||||
from llama_stack.apis.models import Model
|
||||
from llama_stack.apis.version import LLAMA_STACK_API_V1, LLAMA_STACK_API_V1ALPHA
|
||||
from llama_stack.models.llama.datatypes import (
|
||||
|
|
@ -1159,7 +1159,7 @@ class OpenAIEmbeddingsRequestWithExtraBody(BaseModel, extra="allow"):
|
|||
|
||||
|
||||
@runtime_checkable
|
||||
@trace_protocol
|
||||
@mark_as_traced
|
||||
class InferenceProvider(Protocol):
|
||||
"""
|
||||
This protocol defines the interface that should be implemented by all inference providers.
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue