@ -251,8 +251,8 @@ def run(rank, n_gpus, hps):
scaler = GradScaler(enabled=hps.train.fp16_run)
print("start training from epoch %s" % epoch_str)
for epoch in range(epoch_str, hps.train.epochs + 1):
print("start training from epoch %s"%epoch)
if rank == 0:
train_and_evaluate(
rank,
@ -225,8 +225,8 @@ def run(rank, n_gpus, hps):
net_d=optim_d=scheduler_d=None
@ -198,8 +198,8 @@ def run(rank, n_gpus, hps):
print("start training from epoch %s"%epoch_str)