Skip to content

Commit 39c4f52

Browse files
author
Winter Deng
committed
reformat changed code
1 parent ff9003b commit 39c4f52

File tree

3 files changed

+49
-24
lines changed

3 files changed

+49
-24
lines changed

docs/cli/classifier.py

Lines changed: 30 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -11,27 +11,37 @@
1111

1212
import subprocess
1313

14+
1415
def load_exclude_authors(file_path):
1516
path = Path(file_path)
1617
if not path.exists():
1718
return []
1819
with open(path, "r", encoding="utf-8") as f:
1920
return [line.strip() for line in f if line.strip()]
2021

22+
2123
def get_last_commit_excluding(exclude_file):
2224
exclude_authors = set(load_exclude_authors(exclude_file))
23-
24-
logs = subprocess.check_output([
25-
"git", "log",
26-
"--pretty=format:%H|%ae", # commit hash | author email
27-
]).decode("utf-8").splitlines()
28-
25+
26+
logs = (
27+
subprocess.check_output(
28+
[
29+
"git",
30+
"log",
31+
"--pretty=format:%H|%ae", # commit hash | author email
32+
]
33+
)
34+
.decode("utf-8")
35+
.splitlines()
36+
)
37+
2938
for line in logs:
3039
commit, email = line.split("|", 1)
3140
if email not in exclude_authors:
3241
return commit
3342
return None
3443

44+
3545
def classify_file_category(path):
3646

3747
relative_path = Path(path).relative_to(lib_path)
@@ -52,12 +62,12 @@ def fetch_option_flags(flags):
5262

5363
for flag in flags:
5464
flag_list.append(
55-
{
56-
"name": flag["name"].replace("\\", ""),
57-
"instruction": flag["name"].split("-")[-1],
58-
"description": flag["description"]
59-
}
60-
)
65+
{
66+
"name": flag["name"].replace("\\", ""),
67+
"instruction": flag["name"].split("-")[-1],
68+
"description": flag["description"],
69+
}
70+
)
6171

6272
return flag_list
6373

@@ -66,15 +76,15 @@ def fetch_all_files():
6676
main_files = [
6777
os.path.join(lib_path, "main.py"),
6878
os.path.join(lib_path, "linear_trainer.py"),
69-
os.path.join(lib_path, "torch_trainer.py")
79+
os.path.join(lib_path, "torch_trainer.py"),
7080
]
7181
lib_files = glob.glob(os.path.join(lib_path, "libmultilabel/**/*.py"), recursive=True)
7282
file_set = set(map(os.path.abspath, main_files + lib_files))
7383
return file_set
7484

7585

7686
def find_config_usages_in_file(file_path, allowed_keys):
77-
pattern = re.compile(r'\bconfig\.([a-zA-Z_][a-zA-Z0-9_]*)')
87+
pattern = re.compile(r"\bconfig\.([a-zA-Z_][a-zA-Z0-9_]*)")
7888
detailed_results = {}
7989
try:
8090
with open(file_path, "r", encoding="utf-8") as f:
@@ -114,7 +124,7 @@ def move_duplicates_together(data, keep):
114124
duplicates = set()
115125

116126
for i, key1 in enumerate(all_keys):
117-
for key2 in all_keys[i+1:]:
127+
for key2 in all_keys[i + 1 :]:
118128
duplicates |= data[key1] & data[key2]
119129

120130
data[keep] |= duplicates
@@ -136,7 +146,7 @@ def classify(raw_flags):
136146
collected = {}
137147

138148
for file_path in file_set:
139-
detailed_results = find_config_usages_in_file(file_path, allowed_keys)
149+
detailed_results = find_config_usages_in_file(file_path, allowed_keys)
140150
if detailed_results:
141151
usage_map[file_path] = set(detailed_results.keys())
142152
for k, v in detailed_results.items():
@@ -163,7 +173,9 @@ def classify(raw_flags):
163173
if flag["category"] not in result:
164174
result[flag["category"]] = []
165175

166-
result[flag["category"]].append({"name": flag["name"].replace("--", r"\-\-"), "description": flag["description"]})
176+
result[flag["category"]].append(
177+
{"name": flag["name"].replace("--", r"\-\-"), "description": flag["description"]}
178+
)
167179

168180
result["details"] = []
169181
for k, v in collected.items():
@@ -172,4 +184,4 @@ def classify(raw_flags):
172184
for i in v[1:]:
173185
result["details"].append({"name": "", "file": i["file"], "location": ", ".join(i["lines"])})
174186

175-
return result
187+
return result

docs/cli/genflags.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
import main
77
from classifier import classify
88

9+
910
class FakeParser(dict):
1011
def __init__(self):
1112
self.flags = []
@@ -32,9 +33,11 @@ def add_argument(
3233

3334
classified = classify(parser.flags)
3435

36+
3537
def width_title(key, title):
3638
return max(map(lambda f: len(f[key]), classified[title]))
3739

40+
3841
def print_table(title, flags, intro):
3942
print()
4043
print(intro)
@@ -51,21 +54,22 @@ def print_table(title, flags, intro):
5154
print("=" * wn, "=" * wd)
5255
print()
5356

57+
5458
print_table(
5559
"general",
5660
classified["general"],
5761
intro="**General options**:\n\
58-
Common configurations shared across both linear and neural network trainers."
62+
Common configurations shared across both linear and neural network trainers.",
5963
)
6064
print_table(
6165
"linear",
6266
classified["linear"],
6367
intro="**Linear options**:\n\
64-
Configurations specific to linear trainer."
68+
Configurations specific to linear trainer.",
6569
)
6670
print_table(
6771
"nn",
6872
classified["nn"],
6973
intro="**Neural network options**:\n\
70-
Configurations specific to torch (neural networks) trainer."
74+
Configurations specific to torch (neural networks) trainer.",
7175
)

main.py

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,11 @@ def add_all_arguments(parser):
4141
parser.add_argument("--checkpoint_path", help="The checkpoint to warm-up with (default: %(default)s)")
4242

4343
# data
44-
parser.add_argument("--data_name", default="unnamed_data", help="Dataset name for generating the output directory (default: %(default)s)")
44+
parser.add_argument(
45+
"--data_name",
46+
default="unnamed_data",
47+
help="Dataset name for generating the output directory (default: %(default)s)",
48+
)
4549
parser.add_argument("--training_file", help="Path to training data (default: %(default)s)")
4650
parser.add_argument("--val_file", help="Path to validation data (default: %(default)s)")
4751
parser.add_argument("--test_file", help="Path to test data (default: %(default)s)")
@@ -104,7 +108,9 @@ def add_all_arguments(parser):
104108
# pretrained vocab / embeddings
105109
parser.add_argument("--vocab_file", type=str, help="Path to a file holding vocabuaries (default: %(default)s)")
106110
parser.add_argument(
107-
"--embed_file", type=str, help="Path to a file holding pre-trained embeddings or the name of the pretrained GloVe embedding (default: %(default)s)"
111+
"--embed_file",
112+
type=str,
113+
help="Path to a file holding pre-trained embeddings or the name of the pretrained GloVe embedding (default: %(default)s)",
108114
)
109115

110116
# train
@@ -235,7 +241,10 @@ def add_all_arguments(parser):
235241
"--tree_max_depth", type=int, default=10, help="Maximum depth of the tree (default: %(default)s)"
236242
)
237243
parser.add_argument(
238-
"--tree_ensemble_models", type=int, default=1, help="Number of models in the tree ensemble (default: %(default)s)"
244+
"--tree_ensemble_models",
245+
type=int,
246+
default=1,
247+
help="Number of models in the tree ensemble (default: %(default)s)",
239248
)
240249
parser.add_argument(
241250
"--beam_width",

0 commit comments

Comments
 (0)