mirror of
				https://github.com/meta-llama/llama-stack.git
				synced 2025-10-26 17:23:00 +00:00 
			
		
		
		
	
		
			Some checks failed
		
		
	
	Integration Auth Tests / test-matrix (oauth2_token) (push) Failing after 1s
				
			Test External Providers Installed via Module / test-external-providers-from-module (venv) (push) Has been skipped
				
			SqlStore Integration Tests / test-postgres (3.12) (push) Failing after 5s
				
			Vector IO Integration Tests / test-matrix (push) Failing after 4s
				
			Test External API and Providers / test-external (venv) (push) Failing after 4s
				
			Unit Tests / unit-tests (3.13) (push) Failing after 3s
				
			SqlStore Integration Tests / test-postgres (3.13) (push) Failing after 21s
				
			Python Package Build Test / build (3.12) (push) Failing after 20s
				
			Python Package Build Test / build (3.13) (push) Failing after 25s
				
			Unit Tests / unit-tests (3.12) (push) Failing after 25s
				
			Integration Tests (Replay) / Integration Tests (, , , client=, ) (push) Failing after 28s
				
			API Conformance Tests / check-schema-compatibility (push) Successful in 33s
				
			UI Tests / ui-tests (22) (push) Successful in 58s
				
			Pre-commit / pre-commit (push) Successful in 1m17s
				
			# What does this PR do? the nvidia datastore tests were running when the datastore was not configured. they would always fail. this introduces a skip when the nvidia datastore is not configured. ## Test Plan ci
		
			
				
	
	
		
			57 lines
		
	
	
	
		
			1.9 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
			
		
		
	
	
			57 lines
		
	
	
	
		
			1.9 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
| # Copyright (c) Meta Platforms, Inc. and affiliates.
 | |
| # All rights reserved.
 | |
| #
 | |
| # This source code is licensed under the terms described in the LICENSE file in
 | |
| # the root directory of this source tree.
 | |
| 
 | |
| 
 | |
| import pytest
 | |
| 
 | |
| from . import skip_in_github_actions
 | |
| 
 | |
| # How to run this test:
 | |
| #
 | |
| # LLAMA_STACK_CONFIG="nvidia" pytest -v tests/integration/providers/nvidia/test_datastore.py
 | |
| 
 | |
| 
 | |
| @pytest.fixture(autouse=True)
 | |
| def skip_if_no_nvidia_provider(llama_stack_client):
 | |
|     provider_types = {p.provider_type for p in llama_stack_client.providers.list() if p.api == "datasetio"}
 | |
|     if "remote::nvidia" not in provider_types:
 | |
|         pytest.skip("datasetio=remote::nvidia provider not configured, skipping")
 | |
| 
 | |
| 
 | |
| # nvidia provider only
 | |
| @skip_in_github_actions
 | |
| @pytest.mark.parametrize(
 | |
|     "provider_id",
 | |
|     [
 | |
|         "nvidia",
 | |
|     ],
 | |
| )
 | |
| def test_register_and_unregister(llama_stack_client, provider_id):
 | |
|     purpose = "eval/messages-answer"
 | |
|     source = {
 | |
|         "type": "uri",
 | |
|         "uri": "hf://datasets/llamastack/simpleqa?split=train",
 | |
|     }
 | |
|     dataset_id = f"test-dataset-{provider_id}"
 | |
|     dataset = llama_stack_client.datasets.register(
 | |
|         dataset_id=dataset_id,
 | |
|         purpose=purpose,
 | |
|         source=source,
 | |
|         metadata={"provider_id": provider_id, "format": "json", "description": "Test dataset description"},
 | |
|     )
 | |
|     assert dataset.identifier is not None
 | |
|     assert dataset.provider_id == provider_id
 | |
|     assert dataset.identifier == dataset_id
 | |
| 
 | |
|     dataset_list = llama_stack_client.datasets.list()
 | |
|     provider_datasets = [d for d in dataset_list if d.provider_id == provider_id]
 | |
|     assert any(provider_datasets)
 | |
|     assert any(d.identifier == dataset_id for d in provider_datasets)
 | |
| 
 | |
|     llama_stack_client.datasets.unregister(dataset.identifier)
 | |
|     dataset_list = llama_stack_client.datasets.list()
 | |
|     provider_datasets = [d for d in dataset_list if d.identifier == dataset.identifier]
 | |
|     assert not any(provider_datasets)
 |