mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-17 14:32:36 +00:00
use incorrect keys to trigger inference error
This commit is contained in:
parent
2b599aa9b4
commit
1a5cf1c57f
1 changed files with 2 additions and 2 deletions
|
|
@ -180,8 +180,8 @@ class TestBatchesIntegration:
|
||||||
"url": "/v1/chat/completions",
|
"url": "/v1/chat/completions",
|
||||||
"body": {
|
"body": {
|
||||||
"model": text_model_id,
|
"model": text_model_id,
|
||||||
"messages": [{"role": "user", "content": "This should fail"}],
|
"messages": [{"rolez": "user", "contentz": "This should fail"}], # Invalid keys to trigger error
|
||||||
"max_tokens": -1, # Invalid negative max_tokens will cause inference error
|
# note: ollama does not validate max_tokens values or the "role" key, so they won't trigger an error
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue