1111
1212import subprocess
1313
14+
1415def load_exclude_authors (file_path ):
1516 path = Path (file_path )
1617 if not path .exists ():
1718 return []
1819 with open (path , "r" , encoding = "utf-8" ) as f :
1920 return [line .strip () for line in f if line .strip ()]
2021
22+
2123def get_last_commit_excluding (exclude_file ):
2224 exclude_authors = set (load_exclude_authors (exclude_file ))
23-
24- logs = subprocess .check_output ([
25- "git" , "log" ,
26- "--pretty=format:%H|%ae" , # commit hash | author email
27- ]).decode ("utf-8" ).splitlines ()
28-
25+
26+ logs = (
27+ subprocess .check_output (
28+ [
29+ "git" ,
30+ "log" ,
31+ "--pretty=format:%H|%ae" , # commit hash | author email
32+ ]
33+ )
34+ .decode ("utf-8" )
35+ .splitlines ()
36+ )
37+
2938 for line in logs :
3039 commit , email = line .split ("|" , 1 )
3140 if email not in exclude_authors :
3241 return commit
3342 return None
3443
44+
3545def classify_file_category (path ):
3646
3747 relative_path = Path (path ).relative_to (lib_path )
@@ -52,12 +62,12 @@ def fetch_option_flags(flags):
5262
5363 for flag in flags :
5464 flag_list .append (
55- {
56- "name" : flag ["name" ].replace ("\\ " , "" ),
57- "instruction" : flag ["name" ].split ("-" )[- 1 ],
58- "description" : flag ["description" ]
59- }
60- )
65+ {
66+ "name" : flag ["name" ].replace ("\\ " , "" ),
67+ "instruction" : flag ["name" ].split ("-" )[- 1 ],
68+ "description" : flag ["description" ],
69+ }
70+ )
6171
6272 return flag_list
6373
@@ -66,15 +76,15 @@ def fetch_all_files():
6676 main_files = [
6777 os .path .join (lib_path , "main.py" ),
6878 os .path .join (lib_path , "linear_trainer.py" ),
69- os .path .join (lib_path , "torch_trainer.py" )
79+ os .path .join (lib_path , "torch_trainer.py" ),
7080 ]
7181 lib_files = glob .glob (os .path .join (lib_path , "libmultilabel/**/*.py" ), recursive = True )
7282 file_set = set (map (os .path .abspath , main_files + lib_files ))
7383 return file_set
7484
7585
7686def find_config_usages_in_file (file_path , allowed_keys ):
77- pattern = re .compile (r' \bconfig\.([a-zA-Z_][a-zA-Z0-9_]*)' )
87+ pattern = re .compile (r" \bconfig\.([a-zA-Z_][a-zA-Z0-9_]*)" )
7888 detailed_results = {}
7989 try :
8090 with open (file_path , "r" , encoding = "utf-8" ) as f :
@@ -114,7 +124,7 @@ def move_duplicates_together(data, keep):
114124 duplicates = set ()
115125
116126 for i , key1 in enumerate (all_keys ):
117- for key2 in all_keys [i + 1 :]:
127+ for key2 in all_keys [i + 1 :]:
118128 duplicates |= data [key1 ] & data [key2 ]
119129
120130 data [keep ] |= duplicates
@@ -136,7 +146,7 @@ def classify(raw_flags):
136146 collected = {}
137147
138148 for file_path in file_set :
139- detailed_results = find_config_usages_in_file (file_path , allowed_keys )
149+ detailed_results = find_config_usages_in_file (file_path , allowed_keys )
140150 if detailed_results :
141151 usage_map [file_path ] = set (detailed_results .keys ())
142152 for k , v in detailed_results .items ():
@@ -163,7 +173,9 @@ def classify(raw_flags):
163173 if flag ["category" ] not in result :
164174 result [flag ["category" ]] = []
165175
166- result [flag ["category" ]].append ({"name" : flag ["name" ].replace ("--" , r"\-\-" ), "description" : flag ["description" ]})
176+ result [flag ["category" ]].append (
177+ {"name" : flag ["name" ].replace ("--" , r"\-\-" ), "description" : flag ["description" ]}
178+ )
167179
168180 result ["details" ] = []
169181 for k , v in collected .items ():
@@ -172,4 +184,4 @@ def classify(raw_flags):
172184 for i in v [1 :]:
173185 result ["details" ].append ({"name" : "" , "file" : i ["file" ], "location" : ", " .join (i ["lines" ])})
174186
175- return result
187+ return result
0 commit comments