diff --git a/mmdet/core/evaluation/mean_ap.py b/mmdet/core/evaluation/mean_ap.py index 5205f2f..192b8ab 100644 --- a/mmdet/core/evaluation/mean_ap.py +++ b/mmdet/core/evaluation/mean_ap.py @@ -84,8 +84,8 @@ def tpfp_imagenet( # an indicator of ignored gts gt_ignore_inds = np.concatenate( ( - np.zeros(gt_bboxes.shape[0], dtype=np.bool), - np.ones(gt_bboxes_ignore.shape[0], dtype=np.bool), + np.zeros(gt_bboxes.shape[0], dtype=bool), + np.ones(gt_bboxes_ignore.shape[0], dtype=bool), ) ) # stack gt_bboxes and gt_bboxes_ignore for convenience @@ -179,8 +179,8 @@ def tpfp_default( # an indicator of ignored gts gt_ignore_inds = np.concatenate( ( - np.zeros(gt_bboxes.shape[0], dtype=np.bool), - np.ones(gt_bboxes_ignore.shape[0], dtype=np.bool), + np.zeros(gt_bboxes.shape[0], dtype=bool), + np.ones(gt_bboxes_ignore.shape[0], dtype=bool), ) ) # stack gt_bboxes and gt_bboxes_ignore for convenience diff --git a/mmdet/core/mask/structures.py b/mmdet/core/mask/structures.py index 6c8e154..57c0fea 100644 --- a/mmdet/core/mask/structures.py +++ b/mmdet/core/mask/structures.py @@ -860,5 +860,5 @@ def polygon_to_bitmap(polygons, height, width): """ rles = maskUtils.frPyObjects(polygons, height, width) rle = maskUtils.merge(rles) - bitmap_mask = maskUtils.decode(rle).astype(np.bool) + bitmap_mask = maskUtils.decode(rle).astype(bool) return bitmap_mask diff --git a/mmdet/models/detectors/pa_predictor.py b/mmdet/models/detectors/pa_predictor.py index c8130ac..6a1e821 100644 --- a/mmdet/models/detectors/pa_predictor.py +++ b/mmdet/models/detectors/pa_predictor.py @@ -51,7 +51,7 @@ def palette2filter(palette, neighbor_sizes=None, bidirection=True): """ def generate_cross_filter(number_of_neighbors, dist): - cross_filter = np.zeros((number_of_neighbors,) + palette.shape, dtype=np.bool) + cross_filter = np.zeros((number_of_neighbors,) + palette.shape, dtype=bool) cross_filter[0, dist:, :] = np.logical_or(palette[dist:, :], palette[:-dist, :]) cross_filter[1, :, dist:] = np.logical_or(palette[:, dist:], palette[:, :-dist]) cross_filter[2, dist:, dist:] = np.logical_or( @@ -81,7 +81,7 @@ def generate_cross_filter(number_of_neighbors, dist): number_of_span = len(neighbor_sizes) number_of_neighbors_per_span = 8 if bidirection else 4 potential_filter = np.zeros( - (number_of_neighbors_per_span * number_of_span,) + palette.shape, dtype=np.bool + (number_of_neighbors_per_span * number_of_span,) + palette.shape, dtype=bool ) for neighbor_idx, dist in enumerate(neighbor_sizes): offset = neighbor_idx * number_of_neighbors_per_span @@ -103,7 +103,7 @@ def palette2weight( """ def generate_cross_weight(number_of_neighbors, dist): - cross_filter = np.zeros((number_of_neighbors,) + palette.shape, dtype=np.bool) + cross_filter = np.zeros((number_of_neighbors,) + palette.shape, dtype=bool) cross_filter[0, dist:, :] = np.maximum(palette[dist:, :], palette[:-dist, :]) cross_filter[1, :, dist:] = np.maximum(palette[:, dist:], palette[:, :-dist]) cross_filter[2, dist:, dist:] = np.maximum( @@ -152,7 +152,7 @@ def generate_cross_weight(number_of_neighbors, dist): number_of_span = len(neighbor_sizes) number_of_neighbors_per_span = 8 if bidirection else 4 potential_weight = np.zeros( - (number_of_neighbors_per_span * number_of_span,) + palette.shape, dtype=np.bool + (number_of_neighbors_per_span * number_of_span,) + palette.shape, dtype=bool ) for neighbor_idx, dist in enumerate(neighbor_sizes): offset = neighbor_idx * number_of_neighbors_per_span diff --git a/pa_lib/evaluate_helper.py b/pa_lib/evaluate_helper.py index cb7642a..40c676f 100644 --- a/pa_lib/evaluate_helper.py +++ b/pa_lib/evaluate_helper.py @@ -61,8 +61,8 @@ def rank_by_variance(image, masks, weight_by_size=False, nms=1.0): def compute_iou(annotation, segmentation, mask_threshold=0.0): if type(annotation) == torch.Tensor: annotation = annotation.numpy() - annotation = annotation.astype(np.bool) - segmentation = (segmentation > mask_threshold).astype(np.bool) + annotation = annotation.astype(bool) + segmentation = (segmentation > mask_threshold).astype(bool) if np.isclose(np.sum(annotation), 0) and np.isclose(np.sum(segmentation), 0): return 1