mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-03 09:53:45 +00:00
fix: failure in responses during construct metrics (#4157)
# What does this PR do?
Without this we get below in server logs
```
RuntimeError: OpenAI response failed: InferenceRouter._construct_metrics() got an unexpected keyword argument
'model_id'
```
Seems the method signature got update but this callsite was not updated
## Test Plan
CI and test with Sabre (Agent framework integration)
This commit is contained in:
parent
a82b79ce57
commit
ba744d791a
1 changed files with 1 additions and 1 deletions
|
|
@ -417,7 +417,7 @@ class InferenceRouter(Inference):
|
||||||
prompt_tokens=chunk.usage.prompt_tokens,
|
prompt_tokens=chunk.usage.prompt_tokens,
|
||||||
completion_tokens=chunk.usage.completion_tokens,
|
completion_tokens=chunk.usage.completion_tokens,
|
||||||
total_tokens=chunk.usage.total_tokens,
|
total_tokens=chunk.usage.total_tokens,
|
||||||
model_id=fully_qualified_model_id,
|
fully_qualified_model_id=fully_qualified_model_id,
|
||||||
provider_id=provider_id,
|
provider_id=provider_id,
|
||||||
)
|
)
|
||||||
for metric in metrics:
|
for metric in metrics:
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue