6
6
import os
7
7
import zfpy
8
8
import copy
9
- import zipfile
10
- import numpy as np
11
- import io
9
+
10
+
11
+ def zfp_compress (model , name = "generic_model_type" , tolerance = 1e-3 ):
12
+ zfp_model = copy .deepcopy (model )
13
+
14
+ org_size = save_pkl (f'models/{ name } .pkl' , model .state_dict ())
15
+ print (f"Original Size: { org_size } " )
16
+
17
+ compressed_params = apply_zfp (model , tolerance )
18
+ compressed_size = save_pkl (f"models/{ name } .pkl" , compressed_params )
19
+ print (f"Compressed Size: { compressed_size } " )
20
+
21
+ load_and_decompress (f"models/{ name } .pkl" , zfp_model )
22
+
23
+ return zfp_model , org_size , compressed_size
24
+
25
+
26
+ #HELPERS
12
27
13
28
def save_pkl (file : str , state_dict : dict ) -> float :
14
29
with open (file , 'wb' ) as f :
@@ -35,43 +50,4 @@ def load_and_decompress(file: str, model) -> None:
35
50
decompressed = zfpy .decompress_numpy (params [name ])
36
51
param .data = torch .tensor (decompressed ).to (device )
37
52
38
- def zfp_compress (model , name , tolerance = 1e-3 ):
39
- zfp_model = copy .deepcopy (model )
40
-
41
- org_size = save_pkl (f'models/{ name } .pkl' , model .state_dict ())
42
- print (f"Original Size: { org_size } " )
43
-
44
- compressed_params = apply_zfp (model , tolerance )
45
- compressed_size = save_pkl (f"models/{ name } .pkl" , compressed_params )
46
- print (f"Compressed Size: { compressed_size } " )
47
-
48
- load_and_decompress (f"models/{ name } .pkl" , zfp_model )
49
-
50
- return zfp_model , org_size , compressed_size
51
-
52
-
53
-
54
-
55
- # def apply_zip(model):
56
- # params = dict()
57
- # for name, param in model.named_parameters():
58
- # if param.requires_grad:
59
- # param_cpu = param.cpu().detach().numpy()
60
-
61
- # array_buffer = io.BytesIO()
62
- # np.save(array_buffer, param_cpu)
63
-
64
-
65
- # # params[name] = zfpy.compress_numpy(param_cpu, tolerance=tolerance)
66
- # return params
67
-
68
- # def zip_compress(model, name):
69
- # zip_model = copy.deepcopy(model)
70
-
71
- # pkl_path = f'models/{name}.pkl'
72
- # org_size = save_pkl(pkl_path, model.state_dict())
73
-
74
-
75
-
76
53
77
-
0 commit comments