mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 03:34:10 +00:00
* fix TypeError: 'CompletionUsage' object is not subscriptable (#5441) * test(test_team_logging.py): mark flaky test --------- Co-authored-by: yafei lee <yafei@dao42.com>
This commit is contained in:
parent
e9591bdfe9
commit
fe2a3c02e5
2 changed files with 5 additions and 4 deletions
|
@ -300,8 +300,8 @@ class LangFuseLogger:
|
|||
prompt=input,
|
||||
completion=output,
|
||||
usage={
|
||||
"prompt_tokens": response_obj["usage"]["prompt_tokens"],
|
||||
"completion_tokens": response_obj["usage"]["completion_tokens"],
|
||||
"prompt_tokens": response_obj.usage.prompt_tokens,
|
||||
"completion_tokens": response_obj.usage.completion_tokens,
|
||||
},
|
||||
metadata=metadata,
|
||||
)
|
||||
|
@ -526,8 +526,8 @@ class LangFuseLogger:
|
|||
if response_obj is not None and response_obj.get("id", None) is not None:
|
||||
generation_id = litellm.utils.get_logging_id(start_time, response_obj)
|
||||
usage = {
|
||||
"prompt_tokens": response_obj["usage"]["prompt_tokens"],
|
||||
"completion_tokens": response_obj["usage"]["completion_tokens"],
|
||||
"prompt_tokens": response_obj.usage.prompt_tokens,
|
||||
"completion_tokens": response_obj.usage.completion_tokens,
|
||||
"total_cost": cost if supports_costs else None,
|
||||
}
|
||||
generation_name = clean_metadata.pop("generation_name", None)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue