mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-04 18:13:44 +00:00
fix: rename decorator to telemetry_traceable
more descriptive, less redundant Signed-off-by: Charlie Doern <cdoern@redhat.com>
This commit is contained in:
parent
d1adc5a6eb
commit
89b23ec21d
10 changed files with 21 additions and 21 deletions
|
|
@ -20,7 +20,7 @@ from typing_extensions import TypedDict
|
|||
|
||||
from llama_stack.apis.common.content_types import ContentDelta, InterleavedContent
|
||||
from llama_stack.apis.common.responses import MetricResponseMixin, Order
|
||||
from llama_stack.apis.common.tracing import mark_as_traced
|
||||
from llama_stack.apis.common.tracing import telemetry_traceable
|
||||
from llama_stack.apis.models import Model
|
||||
from llama_stack.apis.version import LLAMA_STACK_API_V1, LLAMA_STACK_API_V1ALPHA
|
||||
from llama_stack.models.llama.datatypes import (
|
||||
|
|
@ -1159,7 +1159,7 @@ class OpenAIEmbeddingsRequestWithExtraBody(BaseModel, extra="allow"):
|
|||
|
||||
|
||||
@runtime_checkable
|
||||
@mark_as_traced
|
||||
@telemetry_traceable
|
||||
class InferenceProvider(Protocol):
|
||||
"""
|
||||
This protocol defines the interface that should be implemented by all inference providers.
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue