mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-05 18:27:22 +00:00
feat: openai files provider
# What does this PR do? ## Test Plan
This commit is contained in:
parent
94b0592240
commit
889ce058b9
6 changed files with 347 additions and 27 deletions
|
|
@ -80,6 +80,13 @@ def skip_if_provider_doesnt_support_openai_vector_stores_search(client_with_mode
|
|||
)
|
||||
|
||||
|
||||
# TODO
|
||||
def skip_if_files_provider_is_openai(llama_stack_client):
|
||||
providers = [provider for provider in llama_stack_client.providers.list() if provider.api == "files"]
|
||||
assert len(providers) == 1, "Expected exactly one files provider"
|
||||
return providers[0].provider_type == "remote::openai"
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def sample_chunks():
|
||||
return [
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue