use incorrect keys to trigger inference error

This commit is contained in:
Matthew Farrellee 2025-08-15 11:56:47 -04:00 committed by Ashwin Bharambe
parent 2b599aa9b4
commit 1a5cf1c57f

View file

@ -180,8 +180,8 @@ class TestBatchesIntegration:
"url": "/v1/chat/completions",
"body": {
"model": text_model_id,
"messages": [{"role": "user", "content": "This should fail"}],
"max_tokens": -1, # Invalid negative max_tokens will cause inference error
"messages": [{"rolez": "user", "contentz": "This should fail"}], # Invalid keys to trigger error
# note: ollama does not validate max_tokens values or the "role" key, so they won't trigger an error
},
},
]