|
| 1 | +From 246d8e82c805e2e49ea0abd39abc9b2d800bde59 Mon Sep 17 00:00:00 2001 |
| 2 | +From: Colin Watson < [email protected]> |
| 3 | +Date: Mon, 3 Feb 2025 15:28:10 +0000 |
| 4 | +Subject: [PATCH] Support numpy 2.0 |
| 5 | + |
| 6 | +See: https://numpy.org/devdocs/numpy_2_0_migration_guide.html#adapting-to-changes-in-the-copy-keyword |
| 7 | +--- |
| 8 | + hickle/legacy_v3/loaders/load_numpy.py | 2 +- |
| 9 | + hickle/loaders/load_builtins.py | 4 ++-- |
| 10 | + hickle/loaders/load_numpy.py | 2 +- |
| 11 | + hickle/tests/test_02_hickle_lookup.py | 6 +++--- |
| 12 | + 4 files changed, 7 insertions(+), 7 deletions(-) |
| 13 | + |
| 14 | +diff --git a/hickle/legacy_v3/loaders/load_numpy.py b/hickle/legacy_v3/loaders/load_numpy.py |
| 15 | +index 0fd6c0a4..d17044bc 100644 |
| 16 | +--- a/hickle/legacy_v3/loaders/load_numpy.py |
| 17 | ++++ b/hickle/legacy_v3/loaders/load_numpy.py |
| 18 | +@@ -115,7 +115,7 @@ def load_np_scalar_dataset(h_node): |
| 19 | + |
| 20 | + def load_ndarray_dataset(h_node): |
| 21 | + py_type, data = get_type_and_data(h_node) |
| 22 | +- return np.array(data, copy=False) |
| 23 | ++ return np.asarray(data) |
| 24 | + |
| 25 | + def load_ndarray_masked_dataset(h_node): |
| 26 | + py_type, data = get_type_and_data(h_node) |
| 27 | +diff --git a/hickle/loaders/load_builtins.py b/hickle/loaders/load_builtins.py |
| 28 | +index 7cfbf281..53b74429 100644 |
| 29 | +--- a/hickle/loaders/load_builtins.py |
| 30 | ++++ b/hickle/loaders/load_builtins.py |
| 31 | +@@ -170,7 +170,7 @@ def create_listlike_dataset(py_obj, h_group, name,list_len = -1,item_dtype = Non |
| 32 | + # strings and bytes are stored as array of bytes with strings encoded |
| 33 | + # using utf8 encoding |
| 34 | + string_data = bytearray(py_obj,"utf8") if isinstance(py_obj,str) else memoryview(py_obj) |
| 35 | +- string_data = np.array(string_data,copy=False) |
| 36 | ++ string_data = np.asarray(string_data) |
| 37 | + string_data.dtype = 'S1' |
| 38 | + dataset = h_group.create_dataset( name, data = string_data,shape = (1,string_data.size), **kwargs) |
| 39 | + dataset.attrs["str_type"] = py_obj.__class__.__name__.encode("ascii") |
| 40 | +@@ -385,7 +385,7 @@ def load_list_dataset(h_node,base_type,py_obj_type): |
| 41 | + if h_node.dtype.itemsize > 1 and 'bytes' in h_node.dtype.name: |
| 42 | + |
| 43 | + # string dataset 4.0.x style convert it back to python string |
| 44 | +- content = np.array(content, copy=False, dtype=str).tolist() |
| 45 | ++ content = np.asarray(content, dtype=str).tolist() |
| 46 | + else: |
| 47 | + |
| 48 | + # decode bytes representing python string before final conversion |
| 49 | +diff --git a/hickle/loaders/load_numpy.py b/hickle/loaders/load_numpy.py |
| 50 | +index a4c76e91..bff98187 100644 |
| 51 | +--- a/hickle/loaders/load_numpy.py |
| 52 | ++++ b/hickle/loaders/load_numpy.py |
| 53 | +@@ -232,7 +232,7 @@ def load_ndarray_dataset(h_node,base_type,py_obj_type): |
| 54 | + # not converted to list of string but saved as ar consequently |
| 55 | + # itemsize of dtype is > 1 |
| 56 | + string_data = bytes(string_data).decode("utf8") |
| 57 | +- return np.array(string_data,copy=False,dtype=dtype) |
| 58 | ++ return np.asarray(string_data,dtype=dtype) |
| 59 | + if issubclass(py_obj_type,np.matrix): |
| 60 | + return py_obj_type(data=h_node[()],dtype=dtype) |
| 61 | + # TODO how to restore other ndarray derived object_types |
| 62 | +diff --git a/hickle/tests/test_02_hickle_lookup.py b/hickle/tests/test_02_hickle_lookup.py |
| 63 | +index 628a2b12..dd91ffd1 100644 |
| 64 | +--- a/hickle/tests/test_02_hickle_lookup.py |
| 65 | ++++ b/hickle/tests/test_02_hickle_lookup.py |
| 66 | +@@ -816,7 +816,7 @@ def test_ReferenceManager_get_root(h5_data): |
| 67 | + content = data_group.create_dataset('mydata',data=12) |
| 68 | + type_table = root_group.create_group('hickle_types_table') |
| 69 | + int_pickle_string = bytearray(pickle.dumps(int)) |
| 70 | +- int_np_entry = np.array(int_pickle_string,copy=False) |
| 71 | ++ int_np_entry = np.asarray(int_pickle_string) |
| 72 | + int_np_entry.dtype = 'S1' |
| 73 | + int_entry = type_table.create_dataset(str(len(type_table)),data = int_np_entry,shape =(1,int_np_entry.size)) |
| 74 | + int_base_type = b'int' |
| 75 | +@@ -878,7 +878,7 @@ def test_ReferenceManager(h5_data): |
| 76 | + with pytest.raises(lookup.ReferenceError): |
| 77 | + reference_manager = lookup.ReferenceManager(false_root) |
| 78 | + int_pickle_string = bytearray(pickle.dumps(int)) |
| 79 | +- int_np_entry = np.array(int_pickle_string,copy=False) |
| 80 | ++ int_np_entry = np.asarray(int_pickle_string) |
| 81 | + int_np_entry.dtype = 'S1' |
| 82 | + int_entry = type_table.create_dataset(str(len(type_table)),data = int_np_entry,shape =(1,int_np_entry.size)) |
| 83 | + int_base_type = b'int' |
| 84 | +@@ -1052,7 +1052,7 @@ def test_ReferenceManager_store_type(h5_data,compression_kwargs): |
| 85 | + @pytest.mark.no_compression |
| 86 | + def test_ReferenceManager_get_manager(h5_data): |
| 87 | + h_node = h5_data.create_group('some_list') |
| 88 | +- item_data = np.array(memoryview(b'hallo welt lore grueszet dich ipsum aus der lore von ipsum gelort in ipsum'),copy=False) |
| 89 | ++ item_data = np.asarray(memoryview(b'hallo welt lore grueszet dich ipsum aus der lore von ipsum gelort in ipsum')) |
| 90 | + item_data.dtype = 'S1' |
| 91 | + h_item = h_node.create_dataset('0',data=item_data,shape=(1,item_data.size)) |
| 92 | + with lookup.ReferenceManager.create_manager(h5_data) as memo: |
0 commit comments