fix: Ensure that tool calls with no arguments get handled correctly (#3560)

# What does this PR do?
When a model decides to use an MCP tool call that requires no arguments,
it sets the `arguments` field to `None`. This causes the user to see a
`400 bad requst error` due to validation errors down the stack because
this field gets removed when being parsed by an openai compatible
inference provider like vLLM
This PR ensures that, as soon as the tool call args are accumulated
while streaming, we check to ensure no tool call function arguments are
set to None - if they are we replace them with "{}"

<!-- If resolving an issue, uncomment and update the line below -->
Closes #3456

## Test Plan
Added new unit test to verify that any tool calls with function
arguments set to `None` get handled correctly

---------

Signed-off-by: Jaideep Rao <jrao@redhat.com>
Co-authored-by: github-actions[bot] <github-actions[bot]@users.noreply.github.com>
Co-authored-by: Ashwin Bharambe <ashwin.bharambe@gmail.com>
This commit is contained in:
Jaideep Rao 2025-10-01 08:36:57 -04:00 committed by GitHub
parent 42414a1a1b
commit ca47d90926
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
51 changed files with 11061 additions and 10200 deletions

View file

@ -22,7 +22,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-09-03T17:37:48.840898Z",
"created_at": "2025-10-01T01:33:52.93635761Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -40,7 +40,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-09-03T17:37:48.883619Z",
"created_at": "2025-10-01T01:33:53.133195005Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -58,7 +58,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-09-03T17:37:48.92504Z",
"created_at": "2025-10-01T01:33:53.332277092Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -76,7 +76,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-09-03T17:37:48.966274Z",
"created_at": "2025-10-01T01:33:53.529012616Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -94,7 +94,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-09-03T17:37:49.007525Z",
"created_at": "2025-10-01T01:33:53.724651797Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -112,7 +112,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-09-03T17:37:49.049125Z",
"created_at": "2025-10-01T01:33:53.923248219Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -130,7 +130,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-09-03T17:37:49.090893Z",
"created_at": "2025-10-01T01:33:54.117881107Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -148,7 +148,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-09-03T17:37:49.132101Z",
"created_at": "2025-10-01T01:33:54.311986552Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -166,7 +166,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-09-03T17:37:49.17401Z",
"created_at": "2025-10-01T01:33:54.505749874Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -184,7 +184,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-09-03T17:37:49.216115Z",
"created_at": "2025-10-01T01:33:54.699245098Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -202,7 +202,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-09-03T17:37:49.257109Z",
"created_at": "2025-10-01T01:33:54.890029079Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -220,7 +220,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-09-03T17:37:49.298731Z",
"created_at": "2025-10-01T01:33:55.081182058Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -238,7 +238,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-09-03T17:37:49.338833Z",
"created_at": "2025-10-01T01:33:55.27115012Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -256,7 +256,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-09-03T17:37:49.38053Z",
"created_at": "2025-10-01T01:33:55.46403171Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -274,7 +274,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-09-03T17:37:49.421378Z",
"created_at": "2025-10-01T01:33:55.655042212Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -292,7 +292,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-09-03T17:37:49.462646Z",
"created_at": "2025-10-01T01:33:55.844320935Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -310,7 +310,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-09-03T17:37:49.503814Z",
"created_at": "2025-10-01T01:33:56.035465828Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -328,7 +328,7 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-09-03T17:37:49.545397Z",
"created_at": "2025-10-01T01:33:56.240155299Z",
"done": false,
"done_reason": null,
"total_duration": null,
@ -346,15 +346,15 @@
"__type__": "ollama._types.GenerateResponse",
"__data__": {
"model": "llama3.2:3b-instruct-fp16",
"created_at": "2025-09-03T17:37:49.586834Z",
"created_at": "2025-10-01T01:33:56.432393304Z",
"done": true,
"done_reason": "stop",
"total_duration": 1409239209,
"load_duration": 118889250,
"total_duration": 34185152900,
"load_duration": 44303323,
"prompt_eval_count": 368,
"prompt_eval_duration": 543077166,
"prompt_eval_duration": 30642631331,
"eval_count": 19,
"eval_duration": 746733584,
"eval_duration": 3497664639,
"response": "",
"thinking": null,
"context": null