mirror of
https://github.com/meta-llama/llama-stack.git
synced 2025-12-13 04:22:35 +00:00
refactor: switch to the new default nomic-embed-text-v1.5 embedding model in LS
This commit is contained in:
parent
b95f095a54
commit
429f1d2405
51 changed files with 16149 additions and 83 deletions
|
|
@ -468,8 +468,20 @@ def test_openai_vector_store_search_with_ranking_options(
|
|||
chunks=sample_chunks,
|
||||
)
|
||||
|
||||
# First search without threshold to determine reasonable threshold
|
||||
initial_search = compat_client.vector_stores.search(
|
||||
vector_store_id=vector_store.id,
|
||||
query="machine learning and artificial intelligence",
|
||||
max_num_results=3,
|
||||
)
|
||||
|
||||
# Use a threshold that's lower than the lowest score to ensure we get results
|
||||
if initial_search.data:
|
||||
threshold = min(result.score for result in initial_search.data) * 0.9
|
||||
else:
|
||||
threshold = 0.01
|
||||
|
||||
# Search with ranking options
|
||||
threshold = 0.1
|
||||
search_response = compat_client.vector_stores.search(
|
||||
vector_store_id=vector_store.id,
|
||||
query="machine learning and artificial intelligence",
|
||||
|
|
@ -605,7 +617,12 @@ def test_openai_vector_store_attach_file(
|
|||
test_content = b"The secret string is foobazbar."
|
||||
with BytesIO(test_content) as file_buffer:
|
||||
file_buffer.name = "openai_test.txt"
|
||||
file = compat_client.files.create(file=file_buffer, purpose="assistants")
|
||||
file = compat_client.files.create(
|
||||
file=file_buffer,
|
||||
purpose="assistants",
|
||||
expires_after_anchor="created_at",
|
||||
expires_after_seconds=86400, # 24 hours
|
||||
)
|
||||
|
||||
# Attach the file to the vector store
|
||||
file_attach_response = compat_client.vector_stores.files.create(
|
||||
|
|
@ -653,7 +670,12 @@ def test_openai_vector_store_attach_files_on_creation(
|
|||
for i in range(3):
|
||||
with BytesIO(f"This is a test file {i}".encode()) as file_buffer:
|
||||
file_buffer.name = f"openai_test_{i}.txt"
|
||||
file = compat_client.files.create(file=file_buffer, purpose="assistants")
|
||||
file = compat_client.files.create(
|
||||
file=file_buffer,
|
||||
purpose="assistants",
|
||||
expires_after_anchor="created_at",
|
||||
expires_after_seconds=86400, # 24 hours
|
||||
)
|
||||
valid_file_ids.append(file.id)
|
||||
|
||||
# include an invalid file ID so we can test failed status
|
||||
|
|
@ -723,7 +745,12 @@ def test_openai_vector_store_list_files(
|
|||
for i in range(3):
|
||||
with BytesIO(f"This is a test file {i}".encode()) as file_buffer:
|
||||
file_buffer.name = f"openai_test_{i}.txt"
|
||||
file = compat_client.files.create(file=file_buffer, purpose="assistants")
|
||||
file = compat_client.files.create(
|
||||
file=file_buffer,
|
||||
purpose="assistants",
|
||||
expires_after_anchor="created_at",
|
||||
expires_after_seconds=86400, # 24 hours
|
||||
)
|
||||
|
||||
response = compat_client.vector_stores.files.create(
|
||||
vector_store_id=vector_store.id,
|
||||
|
|
@ -810,7 +837,12 @@ def test_openai_vector_store_retrieve_file_contents(
|
|||
attributes = {"foo": "bar"}
|
||||
with BytesIO(test_content) as file_buffer:
|
||||
file_buffer.name = file_name
|
||||
file = compat_client.files.create(file=file_buffer, purpose="assistants")
|
||||
file = compat_client.files.create(
|
||||
file=file_buffer,
|
||||
purpose="assistants",
|
||||
expires_after_anchor="created_at",
|
||||
expires_after_seconds=86400, # 24 hours
|
||||
)
|
||||
|
||||
# Attach the file to the vector store
|
||||
file_attach_response = compat_client.vector_stores.files.create(
|
||||
|
|
@ -867,7 +899,12 @@ def test_openai_vector_store_delete_file(
|
|||
for i in range(3):
|
||||
with BytesIO(f"This is a test file {i}".encode()) as file_buffer:
|
||||
file_buffer.name = f"openai_test_{i}.txt"
|
||||
file = compat_client.files.create(file=file_buffer, purpose="assistants")
|
||||
file = compat_client.files.create(
|
||||
file=file_buffer,
|
||||
purpose="assistants",
|
||||
expires_after_anchor="created_at",
|
||||
expires_after_seconds=86400, # 24 hours
|
||||
)
|
||||
|
||||
compat_client.vector_stores.files.create(
|
||||
vector_store_id=vector_store.id,
|
||||
|
|
@ -927,7 +964,12 @@ def test_openai_vector_store_delete_file_removes_from_vector_store(
|
|||
test_content = b"The secret string is foobazbar."
|
||||
with BytesIO(test_content) as file_buffer:
|
||||
file_buffer.name = "openai_test.txt"
|
||||
file = compat_client.files.create(file=file_buffer, purpose="assistants")
|
||||
file = compat_client.files.create(
|
||||
file=file_buffer,
|
||||
purpose="assistants",
|
||||
expires_after_anchor="created_at",
|
||||
expires_after_seconds=86400, # 24 hours
|
||||
)
|
||||
|
||||
# Attach the file to the vector store
|
||||
file_attach_response = compat_client.vector_stores.files.create(
|
||||
|
|
@ -974,7 +1016,12 @@ def test_openai_vector_store_update_file(
|
|||
test_content = b"This is a test file"
|
||||
with BytesIO(test_content) as file_buffer:
|
||||
file_buffer.name = "openai_test.txt"
|
||||
file = compat_client.files.create(file=file_buffer, purpose="assistants")
|
||||
file = compat_client.files.create(
|
||||
file=file_buffer,
|
||||
purpose="assistants",
|
||||
expires_after_anchor="created_at",
|
||||
expires_after_seconds=86400, # 24 hours
|
||||
)
|
||||
|
||||
# Attach the file to the vector store
|
||||
file_attach_response = compat_client.vector_stores.files.create(
|
||||
|
|
@ -1019,7 +1066,12 @@ def test_create_vector_store_files_duplicate_vector_store_name(
|
|||
for i in range(3):
|
||||
with BytesIO(f"This is a test file {i}".encode()) as file_buffer:
|
||||
file_buffer.name = f"openai_test_{i}.txt"
|
||||
file = compat_client.files.create(file=file_buffer, purpose="assistants")
|
||||
file = compat_client.files.create(
|
||||
file=file_buffer,
|
||||
purpose="assistants",
|
||||
expires_after_anchor="created_at",
|
||||
expires_after_seconds=86400, # 24 hours
|
||||
)
|
||||
file_ids.append(file.id)
|
||||
|
||||
vector_store = compat_client.vector_stores.create(
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue