mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-04 12:07:34 +00:00
chore(groq test): skip with_n tests for groq, it is not supported server-side (#3346)
# What does this PR do? skip the with_n test for groq, because it isn't supported by the provider's service see https://console.groq.com/docs/openai#currently-unsupported-openai-features Co-authored-by: raghotham <rsm@meta.com>
This commit is contained in:
parent
bf02cd846f
commit
9252d9fc01
1 changed files with 3 additions and 0 deletions
|
@ -64,6 +64,9 @@ def skip_if_doesnt_support_n(client_with_models, model_id):
|
|||
if provider.provider_type in (
|
||||
"remote::sambanova",
|
||||
"remote::ollama",
|
||||
# https://console.groq.com/docs/openai#currently-unsupported-openai-features
|
||||
# -> Error code: 400 - {'error': {'message': "'n' : number must be at most 1", 'type': 'invalid_request_error'}}
|
||||
"remote::groq",
|
||||
# Error code: 400 - [{'error': {'code': 400, 'message': 'Only one candidate can be specified in the
|
||||
# current model', 'status': 'INVALID_ARGUMENT'}}]
|
||||
"remote::gemini",
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue