diff --git a/litellm/tests/test_streaming.py b/litellm/tests/test_streaming.py index 329769782..d7af02a42 100644 --- a/litellm/tests/test_streaming.py +++ b/litellm/tests/test_streaming.py @@ -591,6 +591,64 @@ def test_completion_mistral_api_stream(): pytest.fail(f"Error occurred: {e}") +def test_completion_mistral_api_mistral_large_function_call_with_streaming(): + litellm.set_verbose = True + tools = [ + { + "type": "function", + "function": { + "name": "get_current_weather", + "description": "Get the current weather in a given location", + "parameters": { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "The city and state, e.g. San Francisco, CA", + }, + "unit": {"type": "string", "enum": ["celsius", "fahrenheit"]}, + }, + "required": ["location"], + }, + }, + } + ] + messages = [ + { + "role": "user", + "content": "What's the weather like in Boston today in fahrenheit?", + } + ] + try: + # test without max tokens + response = completion( + model="mistral/mistral-large-latest", + messages=messages, + tools=tools, + tool_choice="auto", + stream=True, + ) + idx = 0 + for chunk in response: + print(f"chunk in response: {chunk}") + if idx == 0: + assert ( + chunk.choices[0].delta.tool_calls[0].function.arguments is not None + ) + assert isinstance( + chunk.choices[0].delta.tool_calls[0].function.arguments, str + ) + validate_first_streaming_function_calling_chunk(chunk=chunk) + elif idx == 1 and chunk.choices[0].finish_reason is None: + validate_second_streaming_function_calling_chunk(chunk=chunk) + elif chunk.choices[0].finish_reason is not None: # last chunk + validate_final_streaming_function_calling_chunk(chunk=chunk) + idx += 1 + # raise Exception("it worked!") + except Exception as e: + pytest.fail(f"Error occurred: {e}") + + # test_completion_mistral_api_stream()