mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-10-04 04:04:14 +00:00
feat: together now supports base64 embedding encoding (#3559)
# What does this PR do? use together's new base64 support ## Test Plan recordings for: ./scripts/integration-tests.sh --stack-config server:ci-tests --suite base --setup together --subdirs inference --pattern openai
This commit is contained in:
parent
9c751b6789
commit
65e01b5684
27 changed files with 15951 additions and 7 deletions
|
@ -41,7 +41,6 @@ def skip_if_model_doesnt_support_user_param(client, model_id):
|
|||
def skip_if_model_doesnt_support_encoding_format_base64(client, model_id):
|
||||
provider = provider_from_model(client, model_id)
|
||||
if provider.provider_type in (
|
||||
"remote::together", # param silently ignored, always returns floats
|
||||
"remote::databricks", # param silently ignored, always returns floats
|
||||
"remote::fireworks", # param silently ignored, always returns list of floats
|
||||
"remote::ollama", # param silently ignored, always returns list of floats
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue