fix(utils.py): handle gemini chunk no parts error

Fixes https://github.com/BerriAI/litellm/issues/3468
This commit is contained in:
Krrish Dholakia 2024-05-06 10:59:36 -07:00
parent e8d3dd475a
commit 4b5cf26c1b
3 changed files with 48 additions and 47 deletions

View file

@ -5,50 +5,21 @@ plugins: timeout-2.2.0, asyncio-0.23.2, anyio-3.7.1, xdist-3.3.1
asyncio: mode=Mode.STRICT
collected 1 item
test_completion.py F [100%]
test_completion.py Chunks sorted
token_counter messages received: [{'role': 'user', 'content': 'what is the capital of congo?'}]
Token Counter - using generic token counter, for model=gemini-1.5-pro-latest
LiteLLM: Utils - Counting tokens for OpenAI model=gpt-3.5-turbo
.Token Counter - using generic token counter, for model=gemini-1.5-pro-latest
LiteLLM: Utils - Counting tokens for OpenAI model=gpt-3.5-turbo
Looking up model=gemini/gemini-1.5-pro-latest in model_cost_map
Success: model=gemini/gemini-1.5-pro-latest in model_cost_map
prompt_tokens=15; completion_tokens=1
Returned custom cost for model=gemini/gemini-1.5-pro-latest - prompt_tokens_cost_usd_dollar: 0, completion_tokens_cost_usd_dollar: 0
final cost: 0; prompt_tokens_cost_usd_dollar: 0; completion_tokens_cost_usd_dollar: 0
[100%]
=================================== FAILURES ===================================
______________________ test_completion_anthropic_hanging _______________________
def test_completion_anthropic_hanging():
litellm.set_verbose = True
litellm.modify_params = True
messages = [
{
"role": "user",
"content": "What's the capital of fictional country Ubabababababaaba? Use your tools.",
},
{
"role": "assistant",
"function_call": {
"name": "get_capital",
"arguments": '{"country": "Ubabababababaaba"}',
},
},
{"role": "function", "name": "get_capital", "content": "Kokoko"},
]
converted_messages = anthropic_messages_pt(messages)
print(f"converted_messages: {converted_messages}")
## ENSURE USER / ASSISTANT ALTERNATING
for i, msg in enumerate(converted_messages):
if i < len(converted_messages) - 1:
> assert msg["role"] != converted_messages[i + 1]["role"]
E AssertionError: assert 'user' != 'user'
test_completion.py:2406: AssertionError
---------------------------- Captured stdout setup -----------------------------
<module 'litellm' from '/Users/krrishdholakia/Documents/litellm/litellm/__init__.py'>
pytest fixture - resetting callbacks
----------------------------- Captured stdout call -----------------------------
message: {'role': 'user', 'content': "What's the capital of fictional country Ubabababababaaba? Use your tools."}
message: {'role': 'function', 'name': 'get_capital', 'content': 'Kokoko'}
converted_messages: [{'role': 'user', 'content': [{'type': 'text', 'text': "What's the capital of fictional country Ubabababababaaba? Use your tools."}]}, {'role': 'user', 'content': [{'type': 'tool_result', 'tool_use_id': '10e9f4d4-bdc9-4514-8b7a-c10bc555d67c', 'content': 'Kokoko'}]}]
=============================== warnings summary ===============================
../../../../../../opt/homebrew/lib/python3.11/site-packages/pydantic/_internal/_config.py:284: 23 warnings
../../../../../../opt/homebrew/lib/python3.11/site-packages/pydantic/_internal/_config.py:284: 24 warnings
/opt/homebrew/lib/python3.11/site-packages/pydantic/_internal/_config.py:284: PydanticDeprecatedSince20: Support for class-based `config` is deprecated, use ConfigDict instead. Deprecated in Pydantic V2.0 to be removed in V3.0. See Pydantic V2 Migration Guide at https://errors.pydantic.dev/2.7/migration/
warnings.warn(DEPRECATION_MESSAGE, DeprecationWarning)
@ -121,6 +92,7 @@ converted_messages: [{'role': 'user', 'content': [{'type': 'text', 'text': "What
../../../../../../opt/homebrew/lib/python3.11/site-packages/pkg_resources/__init__.py:2349
../../../../../../opt/homebrew/lib/python3.11/site-packages/pkg_resources/__init__.py:2349
../../../../../../opt/homebrew/lib/python3.11/site-packages/pkg_resources/__init__.py:2349
test_completion.py::test_gemini_completion_call_error
/opt/homebrew/lib/python3.11/site-packages/pkg_resources/__init__.py:2349: DeprecationWarning: Deprecated call to `pkg_resources.declare_namespace('google')`.
Implementing implicit namespace packages (as specified in PEP 420) is preferred to `pkg_resources.declare_namespace`. See https://setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages
declare_namespace(parent)
@ -151,7 +123,10 @@ converted_messages: [{'role': 'user', 'content': [{'type': 'text', 'text': "What
Implementing implicit namespace packages (as specified in PEP 420) is preferred to `pkg_resources.declare_namespace`. See https://setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages
declare_namespace(pkg)
test_completion.py::test_gemini_completion_call_error
/opt/homebrew/lib/python3.11/site-packages/google/rpc/__init__.py:20: DeprecationWarning: Deprecated call to `pkg_resources.declare_namespace('google.rpc')`.
Implementing implicit namespace packages (as specified in PEP 420) is preferred to `pkg_resources.declare_namespace`. See https://setuptools.pypa.io/en/latest/references/keywords.html#keyword-namespace-packages
pkg_resources.declare_namespace(__name__)
-- Docs: https://docs.pytest.org/en/stable/how-to/capture-warnings.html
=========================== short test summary info ============================
FAILED test_completion.py::test_completion_anthropic_hanging - AssertionError...
======================== 1 failed, 60 warnings in 0.15s ========================
======================== 1 passed, 63 warnings in 1.48s ========================

View file

@ -299,6 +299,24 @@ async def test_anthropic_no_content_error():
pytest.fail(f"An unexpected error occurred - {str(e)}")
def test_gemini_completion_call_error():
try:
print("test completion + streaming")
litellm.set_verbose = True
messages = [{"role": "user", "content": "what is the capital of congo?"}]
response = completion(
model="gemini/gemini-1.5-pro-latest",
messages=messages,
stream=True,
max_tokens=10,
)
print(f"response: {response}")
for chunk in response:
print(chunk)
except Exception as e:
pytest.fail(f"error occurred: {str(e)}")
def test_completion_cohere_command_r_plus_function_call():
litellm.set_verbose = True
tools = [

View file

@ -8499,7 +8499,13 @@ def exception_type(
message=f"GeminiException - {original_exception.message}",
llm_provider="palm",
model=model,
request=original_exception.request,
request=httpx.Response(
status_code=429,
request=httpx.Request(
method="POST",
url=" https://cloud.google.com/vertex-ai/",
),
),
)
if hasattr(original_exception, "status_code"):
if original_exception.status_code == 400:
@ -10289,7 +10295,9 @@ class CustomStreamWrapper:
try:
if len(chunk.parts) > 0:
completion_obj["content"] = chunk.parts[0].text
if hasattr(chunk.parts[0], "finish_reason"):
if len(chunk.parts) > 0 and hasattr(
chunk.parts[0], "finish_reason"
):
self.received_finish_reason = chunk.parts[
0
].finish_reason.name