test: update tests

This commit is contained in:
Krrish Dholakia 2024-11-14 18:56:59 +05:30
parent 74c2177e5e
commit dc4235e7f1

View file

@ -172,6 +172,8 @@ def test_stream_chunk_builder_litellm_usage_chunks():
""" """
Checks if stream_chunk_builder is able to correctly rebuild with given metadata from streaming chunks Checks if stream_chunk_builder is able to correctly rebuild with given metadata from streaming chunks
""" """
from litellm.types.utils import Usage
messages = [ messages = [
{"role": "user", "content": "Tell me the funniest joke you know."}, {"role": "user", "content": "Tell me the funniest joke you know."},
{ {
@ -183,16 +185,19 @@ def test_stream_chunk_builder_litellm_usage_chunks():
{"role": "user", "content": "\nI am waiting...\n\n...\n"}, {"role": "user", "content": "\nI am waiting...\n\n...\n"},
] ]
# make a regular gemini call # make a regular gemini call
response = completion(
model="gemini/gemini-1.5-flash",
messages=messages,
)
usage: litellm.Usage = response.usage usage: litellm.Usage = Usage(
completion_tokens=64,
prompt_tokens=55,
total_tokens=119,
completion_tokens_details=None,
prompt_tokens_details=None,
)
gemini_pt = usage.prompt_tokens gemini_pt = usage.prompt_tokens
# make a streaming gemini call # make a streaming gemini call
try:
response = completion( response = completion(
model="gemini/gemini-1.5-flash", model="gemini/gemini-1.5-flash",
messages=messages, messages=messages,
@ -200,6 +205,8 @@ def test_stream_chunk_builder_litellm_usage_chunks():
complete_response=True, complete_response=True,
stream_options={"include_usage": True}, stream_options={"include_usage": True},
) )
except litellm.ServiceUnavailableError as e:
pytest.skip(f"ServiceUnavailableError - {str(e)}")
usage: litellm.Usage = response.usage usage: litellm.Usage = response.usage