mirror of
https://github.com/meta-llama/llama-stack.git
synced 2026-01-02 10:24:31 +00:00
add a warning for when there are no spans present
This commit is contained in:
parent
1ddc026355
commit
8fee85a178
1 changed files with 14 additions and 0 deletions
|
|
@ -162,7 +162,21 @@ class InferenceRouter(Inference):
|
||||||
def _construct_metrics(
|
def _construct_metrics(
|
||||||
self, prompt_tokens: int, completion_tokens: int, total_tokens: int, model: Model
|
self, prompt_tokens: int, completion_tokens: int, total_tokens: int, model: Model
|
||||||
) -> List[MetricEvent]:
|
) -> List[MetricEvent]:
|
||||||
|
"""Constructs a list of MetricEvent objects containing token usage metrics.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
prompt_tokens: Number of tokens in the prompt
|
||||||
|
completion_tokens: Number of tokens in the completion
|
||||||
|
total_tokens: Total number of tokens used
|
||||||
|
model: Model object containing model_id and provider_id
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of MetricEvent objects with token usage metrics
|
||||||
|
"""
|
||||||
span = get_current_span()
|
span = get_current_span()
|
||||||
|
if span is None:
|
||||||
|
logger.warning("No span found for token usage metrics")
|
||||||
|
return []
|
||||||
metrics = [
|
metrics = [
|
||||||
("prompt_tokens", prompt_tokens),
|
("prompt_tokens", prompt_tokens),
|
||||||
("completion_tokens", completion_tokens),
|
("completion_tokens", completion_tokens),
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue