Skip to content

Commit

Permalink
test: Cherry pick sparse invert index algo tests (#39816)
Browse files Browse the repository at this point in the history
related issue: #39332
related pr: #39691

---------

Signed-off-by: yanliang567 <[email protected]>
  • Loading branch information
yanliang567 authored Feb 17, 2025
1 parent ee25af4 commit 3a951f2
Show file tree
Hide file tree
Showing 10 changed files with 119 additions and 54 deletions.
7 changes: 4 additions & 3 deletions tests/python_client/check/func_check.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,8 +251,8 @@ def check_describe_collection_property(res, func_name, check_items):
assert res["enable_dynamic_field"] == check_items.get("enable_dynamic_field", True)
if check_items.get("num_partitions", 1):
assert res["num_partitions"] == check_items.get("num_partitions", 1)
if check_items.get("id_name", "id"):
assert res["fields"][0]["name"] == check_items.get("id_name", "id")
if check_items.get("primary_field", None) is not None:
assert res["fields"][0]["name"] == check_items.get("primary_field")
if check_items.get("vector_name", "vector"):
assert res["fields"][1]["name"] == check_items.get("vector_name", "vector")
if check_items.get("dim", None) is not None:
Expand Down Expand Up @@ -372,13 +372,14 @@ def check_search_results(search_res, func_name, check_items):
log.info("search_results_check: Numbers of query searched is correct")
enable_milvus_client_api = check_items.get("enable_milvus_client_api", False)
# log.debug(search_res)
pk_name = check_items.get('primary_field', 'id')
for hits in search_res:
searched_original_vectors = []
ids = []
vector_id = 0
if enable_milvus_client_api:
for hit in hits:
ids.append(hit['id'])
ids.append(hit[pk_name])
else:
ids = list(hits.ids)
if (len(hits) != check_items["limit"]) \
Expand Down
4 changes: 3 additions & 1 deletion tests/python_client/common/common_type.py
Original file line number Diff line number Diff line change
Expand Up @@ -220,10 +220,12 @@
"SPARSE_INVERTED_INDEX", "SPARSE_WAND",
"GPU_IVF_FLAT", "GPU_IVF_PQ"]

inverted_index_algo = ['TAAT_NAIVE', 'DAAT_WAND', 'DAAT_MAXSCORE']

default_all_indexes_params = [{}, {"nlist": 128}, {"nlist": 128}, {"nlist": 128, "m": 16, "nbits": 8},
{"M": 32, "efConstruction": 360}, {"nlist": 128}, {},
{}, {"nlist": 64},
{"drop_ratio_build": 0.2}, {"drop_ratio_build": 0.2},
{}, {"drop_ratio_build": 0.2},
{"nlist": 64}, {"nlist": 64, "m": 16, "nbits": 8}]

default_all_search_params_params = [{}, {"nprobe": 32}, {"nprobe": 32}, {"nprobe": 32},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -306,7 +306,7 @@ def test_milvus_client_collection_self_creation_default(self, nullable):
"consistency_level": 0,
"enable_dynamic_field": False,
"num_partitions": 16,
"id_name": "id_string",
"primary_field": "id_string",
"vector_name": "embeddings"}
if nullable:
check_items["nullable_fields"] = ["nullable_field", "array_field"]
Expand Down
31 changes: 23 additions & 8 deletions tests/python_client/milvus_client/test_milvus_client_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,39 +247,54 @@ def test_milvus_client_rename_search_query_default(self):
client = self._client()
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
self.create_collection(client, collection_name, default_dim, consistency_level="Bounded")
schema = self.create_schema(client, enable_dynamic_field=True)[0]
pk_name = 'pk_varchar'
schema.add_field(pk_name, DataType.VARCHAR, max_length=64, is_primary=True,
auto_id=False)
schema.add_field(default_vector_field_name, DataType.FLOAT_VECTOR, dim=default_dim)
schema.add_field(default_string_field_name, DataType.VARCHAR, max_length=64)
schema.add_field(default_float_field_name, DataType.FLOAT)
self.create_collection(client, collection_name, schema=schema, consistency_level="Bounded")
collections = self.list_collections(client)[0]
assert collection_name in collections
self.describe_collection(client, collection_name,
check_task=CheckTasks.check_describe_collection_property,
check_items={"collection_name": collection_name,
"dim": default_dim,
"dim": default_dim, "primary_field": pk_name,
"consistency_level": 0})
old_name = collection_name
new_name = collection_name + "new"
self.rename_collection(client, old_name, new_name)
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
rows = [{pk_name: str(i),
default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_string_field_name: str(i),
default_float_field_name: i*1.0
} for i in range(default_nb)]
self.insert(client, new_name, rows)
self.flush(client, new_name)
index_params = self.prepare_index_params(client)[0]
index_params.add_index(default_vector_field_name, metric_type="COSINE")
self.create_index(client, new_name, index_params=index_params)
self.load_collection(client, new_name)
# assert self.num_entities(client, collection_name)[0] == default_nb
# 3. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
insert_ids = [str(i) for i in range(default_nb)]
self.search(client, new_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"ids": insert_ids, "primary_field": pk_name,
"limit": default_limit})
# 4. query
self.query(client, new_name, filter=default_search_exp,
filter = f"{default_float_field_name} >= 0"
self.query(client, new_name, filter=filter,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
"primary_field": pk_name})
self.release_collection(client, new_name)
self.drop_collection(client, new_name)

Expand Down
4 changes: 2 additions & 2 deletions tests/python_client/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@ pytest-parallel
pytest-random-order

# pymilvus
pymilvus==2.5.3
pymilvus[bulk_writer]==2.5.3
pymilvus==2.5.5rc5
pymilvus[bulk_writer]==2.5.5rc5

# for customize config test
python-benedict==0.24.3
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -281,6 +281,7 @@ async def test_async_client_with_schema(self, schema):
"params": {"ef": "96"}},
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"primary_field": ct.default_string_field_name,
"nq": ct.default_nq,
"limit": ct.default_limit})
tasks.append(default_search_task)
Expand All @@ -307,6 +308,7 @@ async def test_async_client_with_schema(self, schema):
check_task=CheckTasks.check_search_results,
check_items={
"enable_milvus_client_api": True,
"primary_field": ct.default_string_field_name,
"nq": ct.default_nq,
"limit": 5})
tasks.append(filter_params_search_task)
Expand Down
10 changes: 7 additions & 3 deletions tests/python_client/testcases/test_full_text_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -2315,9 +2315,10 @@ def test_full_text_search_default(
@pytest.mark.parametrize("expr", ["text_match"])
@pytest.mark.parametrize("offset", [10])
@pytest.mark.parametrize("tokenizer", ["jieba"])
@pytest.mark.parametrize("inverted_index_algo", ct.inverted_index_algo)
def test_full_text_search_with_jieba_tokenizer(
self, offset, tokenizer, expr, enable_inverted_index, enable_partition_key, empty_percent, index_type, nq
):
self, offset, tokenizer, expr, enable_inverted_index, enable_partition_key,
empty_percent, index_type, nq, inverted_index_algo):
"""
target: test full text search
method: 1. enable full text search with jieba tokenizer and insert data with varchar
Expand Down Expand Up @@ -2430,6 +2431,7 @@ def test_full_text_search_with_jieba_tokenizer(
"params": {
"bm25_k1": 1.5,
"bm25_b": 0.75,
"inverted_index_algo": inverted_index_algo
}
}
)
Expand Down Expand Up @@ -3302,8 +3304,9 @@ class TestHybridSearchWithFullTextSearch(TestcaseBase):
@pytest.mark.parametrize("enable_inverted_index", [True])
@pytest.mark.parametrize("index_type", ["SPARSE_INVERTED_INDEX"])
@pytest.mark.parametrize("tokenizer", ["standard"])
@pytest.mark.parametrize("inverted_index_algo", ct.inverted_index_algo)
def test_hybrid_search_with_full_text_search(
self, tokenizer, enable_inverted_index, enable_partition_key, empty_percent, index_type
self, tokenizer, enable_inverted_index, enable_partition_key, empty_percent, index_type, inverted_index_algo
):
"""
target: test full text search
Expand Down Expand Up @@ -3403,6 +3406,7 @@ def test_hybrid_search_with_full_text_search(
"params": {
"bm25_k1": 1.5,
"bm25_b": 0.75,
"inverted_index_algo": inverted_index_algo
}
}
)
Expand Down
22 changes: 22 additions & 0 deletions tests/python_client/testcases/test_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -1484,6 +1484,28 @@ def test_invalid_sparse_ratio(self, ratio, index):
check_task=CheckTasks.err_res,
check_items=error)

@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("inverted_index_algo", ["INVALID_ALGO"])
@pytest.mark.parametrize("index ", ct.all_index_types[9:11])
def test_invalid_sparse_inverted_index_algo(self, inverted_index_algo, index):
"""
target: index creation for unsupported ratio parameter
method: indexing of unsupported ratio parameters
expected: raise exception
"""
c_name = cf.gen_unique_str(prefix)
schema = cf.gen_default_sparse_schema()
collection_w = self.init_collection_wrap(name=c_name, schema=schema)
data = cf.gen_default_list_sparse_data()
collection_w.insert(data=data)
params = {"index_type": index, "metric_type": "IP", "params": {"inverted_index_algo": inverted_index_algo}}
error = {ct.err_code: 999,
ct.err_msg: f"sparse inverted index algo {inverted_index_algo} not found or not supported, "
f"supported: [TAAT_NAIVE DAAT_WAND DAAT_MAXSCORE]"}
index, _ = self.index_wrap.init_index(collection_w.collection, ct.default_sparse_vec_field_name, params,
check_task=CheckTasks.err_res,
check_items=error)


@pytest.mark.tags(CaseLabel.GPU)
class TestNewIndexAsync(TestcaseBase):
Expand Down
Loading

0 comments on commit 3a951f2

Please sign in to comment.