Skip to content

Commit a555707

Browse files
committed
typo fixes
1 parent 774b1b2 commit a555707

File tree

3 files changed

+6
-6
lines changed

3 files changed

+6
-6
lines changed

layers.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -258,12 +258,12 @@ def forward(self, x, gain, bias):
258258
# Simple function to handle groupnorm norm stylization
259259
def groupnorm(x, norm_style):
260260
# If number of channels specified in norm_style:
261-
if 'ch' in self.norm_style:
262-
ch = int(self.norm_style.split('_')[-1])
261+
if 'ch' in norm_style:
262+
ch = int(norm_style.split('_')[-1])
263263
groups = max(int(x.shape[1]) // ch, 1)
264264
# If number of groups specified in norm style
265-
elif 'grp' in self.norm_style:
266-
groups = int(self.norm_style.split('_')[-1])
265+
elif 'grp' in norm_style:
266+
groups = int(norm_style.split('_')[-1])
267267
# If neither, default to groups = 16
268268
else:
269269
groups = 16

train.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -210,7 +210,7 @@ def run(config):
210210
if config['G_eval_mode']:
211211
print('Switchin G to eval mode...')
212212
G.eval()
213-
train_fns.test(G, D, G_ema, state_dict, config, sample,
213+
train_fns.test(G, D, G_ema, z_, y_, state_dict, config, sample,
214214
get_inception_metrics, experiment_name, test_log)
215215
# Increment epoch counter at end of epoch
216216
state_dict['epoch'] += 1

train_fns.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ def save_and_sample(G, D, G_ema, z_, y_, fixed_z, fixed_y,
157157
are an improvement over the previous best (either in IS or FID,
158158
user-specified), logs the results, and saves a best_ copy if it's an
159159
improvement. '''
160-
def test(G, D, G_ema, state_dict, config, sample, get_inception_metrics,
160+
def test(G, D, G_ema, z_, y_, state_dict, config, sample, get_inception_metrics,
161161
experiment_name, test_log):
162162
print('Gathering inception metrics...')
163163
if config['accumulate_stats']:

0 commit comments

Comments
 (0)