mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 19:54:13 +00:00
test(tests/litellm): add unit test for transform usage function
This commit is contained in:
parent
b51495f2da
commit
09eb748e13
2 changed files with 36 additions and 9 deletions
|
@ -0,0 +1,36 @@
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from fastapi.testclient import TestClient
|
||||||
|
|
||||||
|
sys.path.insert(
|
||||||
|
0, os.path.abspath("../../../../..")
|
||||||
|
) # Adds the parent directory to the system path
|
||||||
|
from unittest.mock import MagicMock, patch
|
||||||
|
|
||||||
|
from litellm.llms.bedrock.chat.converse_transformation import AmazonConverseConfig
|
||||||
|
from litellm.types.llms.bedrock import ConverseTokenUsageBlock
|
||||||
|
|
||||||
|
|
||||||
|
def test_transform_usage():
|
||||||
|
usage = ConverseTokenUsageBlock(
|
||||||
|
**{
|
||||||
|
"cacheReadInputTokenCount": 0,
|
||||||
|
"cacheReadInputTokens": 10,
|
||||||
|
"cacheCreationInputTokenCount": 0,
|
||||||
|
"cacheCreationInputTokens": 0,
|
||||||
|
"inputTokens": 12,
|
||||||
|
"outputTokens": 56,
|
||||||
|
"totalTokens": 78,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
config = AmazonConverseConfig()
|
||||||
|
openai_usage = config._transform_usage(usage)
|
||||||
|
assert openai_usage.prompt_tokens == 22
|
||||||
|
assert openai_usage.completion_tokens == 56
|
||||||
|
assert openai_usage.total_tokens == 78
|
||||||
|
assert openai_usage.prompt_tokens_details.cached_tokens == 10
|
||||||
|
assert openai_usage._cache_creation_input_tokens == 0
|
||||||
|
assert openai_usage._cache_read_input_tokens == 10
|
|
@ -2948,12 +2948,3 @@ async def test_bedrock_stream_thinking_content_openwebui():
|
||||||
assert (
|
assert (
|
||||||
len(response_content) > 0
|
len(response_content) > 0
|
||||||
), "There should be non-empty content after thinking tags"
|
), "There should be non-empty content after thinking tags"
|
||||||
|
|
||||||
|
|
||||||
def test_bedrock_usage_block():
|
|
||||||
litellm._turn_on_debug()
|
|
||||||
response = completion(
|
|
||||||
model="bedrock/us.anthropic.claude-3-7-sonnet-20250219-v1:0",
|
|
||||||
messages=[{"role": "user", "content": "Hello who is this?"}],
|
|
||||||
)
|
|
||||||
assert response.usage.total_tokens > 0
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue