mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-08-03 01:03:59 +00:00
update tests
This commit is contained in:
parent
234f4e4583
commit
a3c07ac10a
4 changed files with 93 additions and 34 deletions
|
@ -10,7 +10,7 @@ import mimetypes
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from llama_stack.apis.datasets import Dataset
|
|
||||||
# How to run this test:
|
# How to run this test:
|
||||||
#
|
#
|
||||||
# LLAMA_STACK_CONFIG="template-name" pytest -v tests/integration/datasets
|
# LLAMA_STACK_CONFIG="template-name" pytest -v tests/integration/datasets
|
||||||
|
@ -94,36 +94,3 @@ def test_register_and_iterrows(llama_stack_client, purpose, source, provider_id,
|
||||||
llama_stack_client.datasets.unregister(dataset.identifier)
|
llama_stack_client.datasets.unregister(dataset.identifier)
|
||||||
dataset_list = llama_stack_client.datasets.list()
|
dataset_list = llama_stack_client.datasets.list()
|
||||||
assert dataset.identifier not in [d.identifier for d in dataset_list]
|
assert dataset.identifier not in [d.identifier for d in dataset_list]
|
||||||
|
|
||||||
# nvidia provider only
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
"provider_id",
|
|
||||||
[
|
|
||||||
"nvidia",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def test_register_and_unregister(llama_stack_client, provider_id):
|
|
||||||
purpose = "eval/messages-answer"
|
|
||||||
source = {
|
|
||||||
"type": "uri",
|
|
||||||
"uri": "hf://datasets/llamastack/simpleqa?split=train",
|
|
||||||
}
|
|
||||||
dataset = llama_stack_client.datasets.register(
|
|
||||||
dataset_id=f"test-dataset-{provider_id}",
|
|
||||||
purpose=purpose,
|
|
||||||
source=source,
|
|
||||||
metadata={"provider": provider_id, "format": "json", "description": "Test dataset description"},
|
|
||||||
)
|
|
||||||
assert dataset.identifier is not None
|
|
||||||
assert dataset.provider_id == provider_id
|
|
||||||
assert dataset.identifier == f"test-dataset-{provider_id}"
|
|
||||||
|
|
||||||
dataset_list = llama_stack_client.datasets.list()
|
|
||||||
provider_datasets = [d for d in dataset_list if d.provider_id == provider_id]
|
|
||||||
assert any(provider_datasets)
|
|
||||||
assert any([d.identifier == f"test-dataset-{provider_id}" for d in provider_datasets])
|
|
||||||
|
|
||||||
llama_stack_client.datasets.unregister(dataset.identifier)
|
|
||||||
dataset_list = llama_stack_client.datasets.list()
|
|
||||||
provider_datasets = [d for d in dataset_list if d.identifier == dataset.identifier]
|
|
||||||
assert not any(provider_datasets)
|
|
5
tests/integration/providers/nvidia/__init__.py
Normal file
5
tests/integration/providers/nvidia/__init__.py
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||||
|
# All rights reserved.
|
||||||
|
#
|
||||||
|
# This source code is licensed under the terms described in the LICENSE file in
|
||||||
|
# the root directory of this source tree.
|
47
tests/integration/providers/nvidia/test_datastore.py
Normal file
47
tests/integration/providers/nvidia/test_datastore.py
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
||||||
|
# All rights reserved.
|
||||||
|
#
|
||||||
|
# This source code is licensed under the terms described in the LICENSE file in
|
||||||
|
# the root directory of this source tree.
|
||||||
|
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
# How to run this test:
|
||||||
|
#
|
||||||
|
# LLAMA_STACK_CONFIG="nvidia" pytest -v tests/integration/providers/nvidia/test_datastore.py
|
||||||
|
|
||||||
|
|
||||||
|
# nvidia provider only
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"provider_id",
|
||||||
|
[
|
||||||
|
"nvidia",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_register_and_unregister(llama_stack_client, provider_id):
|
||||||
|
purpose = "eval/messages-answer"
|
||||||
|
source = {
|
||||||
|
"type": "uri",
|
||||||
|
"uri": "hf://datasets/llamastack/simpleqa?split=train",
|
||||||
|
}
|
||||||
|
dataset_id = f"test-dataset-{provider_id}"
|
||||||
|
dataset = llama_stack_client.datasets.register(
|
||||||
|
dataset_id=dataset_id,
|
||||||
|
purpose=purpose,
|
||||||
|
source=source,
|
||||||
|
metadata={"provider": provider_id, "format": "json", "description": "Test dataset description"},
|
||||||
|
)
|
||||||
|
assert dataset.identifier is not None
|
||||||
|
assert dataset.provider_id == provider_id
|
||||||
|
assert dataset.identifier == dataset_id
|
||||||
|
|
||||||
|
dataset_list = llama_stack_client.datasets.list()
|
||||||
|
provider_datasets = [d for d in dataset_list if d.provider_id == provider_id]
|
||||||
|
assert any(provider_datasets)
|
||||||
|
assert any(d.identifier == dataset_id for d in provider_datasets)
|
||||||
|
|
||||||
|
llama_stack_client.datasets.unregister(dataset.identifier)
|
||||||
|
dataset_list = llama_stack_client.datasets.list()
|
||||||
|
provider_datasets = [d for d in dataset_list if d.identifier == dataset.identifier]
|
||||||
|
assert not any(provider_datasets)
|
|
@ -93,6 +93,46 @@ class TestNvidiaDatastore(unittest.TestCase):
|
||||||
self.mock_make_request.assert_called_once()
|
self.mock_make_request.assert_called_once()
|
||||||
self._assert_request(self.mock_make_request, "DELETE", "/v1/datasets/default/test-dataset")
|
self._assert_request(self.mock_make_request, "DELETE", "/v1/datasets/default/test-dataset")
|
||||||
|
|
||||||
|
def test_register_dataset_with_custom_namespace_project(self):
|
||||||
|
custom_config = NvidiaDatasetIOConfig(
|
||||||
|
datasets_url=os.environ["NVIDIA_DATASETS_URL"],
|
||||||
|
dataset_namespace="custom-namespace",
|
||||||
|
project_id="custom-project",
|
||||||
|
)
|
||||||
|
custom_adapter = NvidiaDatasetIOAdapter(custom_config)
|
||||||
|
|
||||||
|
self.mock_make_request.return_value = {
|
||||||
|
"id": "dataset-123456",
|
||||||
|
"name": "test-dataset",
|
||||||
|
"namespace": "custom-namespace",
|
||||||
|
}
|
||||||
|
|
||||||
|
dataset_def = Dataset(
|
||||||
|
identifier="test-dataset",
|
||||||
|
type="dataset",
|
||||||
|
provider_resource_id="",
|
||||||
|
provider_id="",
|
||||||
|
purpose=DatasetPurpose.post_training_messages,
|
||||||
|
source=URIDataSource(uri="https://example.com/data.jsonl"),
|
||||||
|
metadata={"format": "jsonl"},
|
||||||
|
)
|
||||||
|
|
||||||
|
self.run_async(custom_adapter.register_dataset(dataset_def))
|
||||||
|
|
||||||
|
self.mock_make_request.assert_called_once()
|
||||||
|
self._assert_request(
|
||||||
|
self.mock_make_request,
|
||||||
|
"POST",
|
||||||
|
"/v1/datasets",
|
||||||
|
expected_json={
|
||||||
|
"name": "test-dataset",
|
||||||
|
"namespace": "custom-namespace",
|
||||||
|
"files_url": "https://example.com/data.jsonl",
|
||||||
|
"project": "custom-project",
|
||||||
|
"format": "jsonl",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
unittest.main()
|
unittest.main()
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue