-
Notifications
You must be signed in to change notification settings - Fork 10
/
evaluation.py
123 lines (89 loc) · 3.69 KB
/
evaluation.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
## Ultra-High-Definition Low-Light Image Enhancement: A Benchmark and Transformer-Based Method
## Tao Wang, Kaihao Zhang, Tianrun Shen, Wenhan Luo, Bjorn Stenger, Tong Lu
## https://arxiv.org/pdf/2212.11548.pdf
import glob
import os
import time
from collections import OrderedDict
import numpy as np
import torch
import cv2
import argparse
from natsort import natsort
from skimage.metrics import structural_similarity as ssim
from skimage.metrics import peak_signal_noise_ratio as psnr
import lpips
class Measure():
def __init__(self, net='alex', use_gpu=False):
self.device = 'cuda' if use_gpu else 'cpu'
self.model = lpips.LPIPS(net=net)
self.model.to(self.device)
def measure(self, imgA, imgB):
return [float(f(imgA, imgB)) for f in [self.psnr, self.ssim, self.lpips]]
def lpips(self, imgA, imgB, model=None):
tA = t(imgA).to(self.device)
tB = t(imgB).to(self.device)
dist01 = self.model.forward(tA, tB).item()
return dist01
def ssim(self, imgA, imgB):
# multichannel: If True, treat the last dimension of the array as channels. Similarity calculations are done independently for each channel then averaged.
score, diff = ssim(imgA, imgB, full=True, multichannel=True)
return score
def psnr(self, imgA, imgB):
psnr_val = psnr(imgA, imgB)
return psnr_val
def t(img):
def to_4d(img):
assert len(img.shape) == 3
assert img.dtype == np.uint8
img_new = np.expand_dims(img, axis=0)
assert len(img_new.shape) == 4
return img_new
def to_CHW(img):
return np.transpose(img, [2, 0, 1])
def to_tensor(img):
return torch.Tensor(img)
return to_tensor(to_4d(to_CHW(img))) / 127.5 - 1
def fiFindByWildcard(wildcard):
return natsort.natsorted(glob.glob(wildcard, recursive=True))
def imread(path):
return cv2.imread(path)[:, :, [2, 1, 0]]
def format_result(psnr, ssim, lpips):
return f'{psnr:0.2f}, {ssim:0.3f}, {lpips:0.3f}'
def measure_dirs(dirA, dirB, use_gpu, verbose=False):
if verbose:
vprint = lambda x: print(x)
else:
vprint = lambda x: None
t_init = time.time()
paths_A = fiFindByWildcard(os.path.join(dirA, f'*.{type}'))
paths_B = fiFindByWildcard(os.path.join(dirB, f'*.{type}'))
vprint("Comparing: ")
vprint(dirA)
vprint(dirB)
measure = Measure(use_gpu=use_gpu)
results = []
for pathA, pathB in zip(paths_A, paths_B):
result = OrderedDict()
t = time.time()
result['psnr'], result['ssim'], result['lpips'] = measure.measure(imread(pathA), imread(pathB))
d = time.time() - t
vprint(f"{pathA.split('/')[-1]}, {pathB.split('/')[-1]}, {format_result(**result)}, {d:0.1f}")
results.append(result)
psnr = np.mean([result['psnr'] for result in results])
ssim = np.mean([result['ssim'] for result in results])
lpips = np.mean([result['lpips'] for result in results])
vprint(f"Final Result: {format_result(psnr, ssim, lpips)}, {time.time() - t_init:0.1f}s")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-dirA', default='./datasets/LOL/test/high/', type=str)
parser.add_argument('-dirB', default='./results/LOL/', type=str)
parser.add_argument('-type', default='png')
parser.add_argument('--use_gpu', default=True)
args = parser.parse_args()
dirA = args.dirA
dirB = args.dirB
type = args.type
use_gpu = args.use_gpu
if len(dirA) > 0 and len(dirB) > 0:
measure_dirs(dirA, dirB, use_gpu=use_gpu, verbose=True)