mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-12 12:06:04 +00:00
Merge branch 'main' into change-default-embedding-model
This commit is contained in:
commit
da35f2452e
15 changed files with 473 additions and 231 deletions
|
|
@ -50,11 +50,15 @@ def skip_if_model_doesnt_support_encoding_format_base64(client, model_id):
|
|||
|
||||
def skip_if_model_doesnt_support_variable_dimensions(client_with_models, model_id):
|
||||
provider = provider_from_model(client_with_models, model_id)
|
||||
if provider.provider_type in (
|
||||
"remote::together", # returns 400
|
||||
"inline::sentence-transformers",
|
||||
# Error code: 400 - {'error_code': 'BAD_REQUEST', 'message': 'Bad request: json: unknown field "dimensions"\n'}
|
||||
"remote::databricks",
|
||||
if (
|
||||
provider.provider_type
|
||||
in (
|
||||
"remote::together", # returns 400
|
||||
"inline::sentence-transformers",
|
||||
# Error code: 400 - {'error_code': 'BAD_REQUEST', 'message': 'Bad request: json: unknown field "dimensions"\n'}
|
||||
"remote::databricks",
|
||||
"remote::watsonx", # openai.BadRequestError: Error code: 400 - {'detail': "litellm.UnsupportedParamsError: watsonx does not support parameters: {'dimensions': 384}
|
||||
)
|
||||
):
|
||||
pytest.skip(
|
||||
f"Model {model_id} hosted by {provider.provider_type} does not support variable output embedding dimensions."
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue