mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-24 14:50:01 +00:00
Merge branch 'main' into fix/issue-2584-llama4-tool-calling
This commit is contained in:
commit
d9f558e69f
14 changed files with 145 additions and 38 deletions
|
|
@ -821,6 +821,59 @@ def test_openai_vector_store_update_file(compat_client_with_empty_stores, client
|
|||
assert retrieved_file.attributes["foo"] == "baz"
|
||||
|
||||
|
||||
def test_create_vector_store_files_duplicate_vector_store_name(compat_client_with_empty_stores, client_with_models):
|
||||
"""
|
||||
This test confirms that client.vector_stores.create() creates a unique ID
|
||||
"""
|
||||
skip_if_provider_doesnt_support_openai_vector_stores(client_with_models)
|
||||
skip_if_provider_doesnt_support_openai_vector_store_files_api(client_with_models)
|
||||
|
||||
if isinstance(compat_client_with_empty_stores, LlamaStackClient):
|
||||
pytest.skip("Vector Store Files create is not yet supported with LlamaStackClient")
|
||||
|
||||
compat_client = compat_client_with_empty_stores
|
||||
|
||||
# Create a vector store with files
|
||||
file_ids = []
|
||||
for i in range(3):
|
||||
with BytesIO(f"This is a test file {i}".encode()) as file_buffer:
|
||||
file_buffer.name = f"openai_test_{i}.txt"
|
||||
file = compat_client.files.create(file=file_buffer, purpose="assistants")
|
||||
file_ids.append(file.id)
|
||||
|
||||
vector_store = compat_client.vector_stores.create(
|
||||
name="test_store_with_files",
|
||||
)
|
||||
assert vector_store.file_counts.completed == 0
|
||||
assert vector_store.file_counts.total == 0
|
||||
assert vector_store.file_counts.cancelled == 0
|
||||
assert vector_store.file_counts.failed == 0
|
||||
assert vector_store.file_counts.in_progress == 0
|
||||
|
||||
vector_store2 = compat_client.vector_stores.create(
|
||||
name="test_store_with_files",
|
||||
)
|
||||
|
||||
vector_stores_list = compat_client.vector_stores.list()
|
||||
assert len(vector_stores_list.data) == 2
|
||||
|
||||
created_file = compat_client.vector_stores.files.create(
|
||||
vector_store_id=vector_store.id,
|
||||
file_id=file_ids[0],
|
||||
)
|
||||
assert created_file.status == "completed"
|
||||
|
||||
_ = compat_client.vector_stores.delete(vector_store2.id)
|
||||
created_file_from_non_deleted_vector_store = compat_client.vector_stores.files.create(
|
||||
vector_store_id=vector_store.id,
|
||||
file_id=file_ids[1],
|
||||
)
|
||||
assert created_file_from_non_deleted_vector_store.status == "completed"
|
||||
|
||||
vector_stores_list_post_delete = compat_client.vector_stores.list()
|
||||
assert len(vector_stores_list_post_delete.data) == 1
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="Client library needs to be scaffolded to support search_mode parameter")
|
||||
def test_openai_vector_store_search_modes():
|
||||
"""Test OpenAI vector store search with different search modes.
|
||||
|
|
|
|||
|
|
@ -15,6 +15,37 @@ from llama_stack.distribution.configure import (
|
|||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def config_with_image_name_int():
|
||||
return yaml.safe_load(
|
||||
f"""
|
||||
version: {LLAMA_STACK_RUN_CONFIG_VERSION}
|
||||
image_name: 1234
|
||||
apis_to_serve: []
|
||||
built_at: {datetime.now().isoformat()}
|
||||
providers:
|
||||
inference:
|
||||
- provider_id: provider1
|
||||
provider_type: inline::meta-reference
|
||||
config: {{}}
|
||||
safety:
|
||||
- provider_id: provider1
|
||||
provider_type: inline::meta-reference
|
||||
config:
|
||||
llama_guard_shield:
|
||||
model: Llama-Guard-3-1B
|
||||
excluded_categories: []
|
||||
disable_input_check: false
|
||||
disable_output_check: false
|
||||
enable_prompt_guard: false
|
||||
memory:
|
||||
- provider_id: provider1
|
||||
provider_type: inline::meta-reference
|
||||
config: {{}}
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def up_to_date_config():
|
||||
return yaml.safe_load(
|
||||
|
|
@ -125,3 +156,8 @@ def test_parse_and_maybe_upgrade_config_old_format(old_config):
|
|||
def test_parse_and_maybe_upgrade_config_invalid(invalid_config):
|
||||
with pytest.raises(KeyError):
|
||||
parse_and_maybe_upgrade_config(invalid_config)
|
||||
|
||||
|
||||
def test_parse_and_maybe_upgrade_config_image_name_int(config_with_image_name_int):
|
||||
result = parse_and_maybe_upgrade_config(config_with_image_name_int)
|
||||
assert isinstance(result.image_name, str)
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue