mirror of
https://github.com/FunAudioLLM/CosyVoice.git
synced 2026-02-04 17:39:25 +08:00
remove unnecessary f0 loss in discrimnator
This commit is contained in:
@@ -95,6 +95,8 @@ def main():
|
|||||||
override_dict.pop('hift')
|
override_dict.pop('hift')
|
||||||
with open(args.config, 'r') as f:
|
with open(args.config, 'r') as f:
|
||||||
configs = load_hyperpyyaml(f, overrides=override_dict)
|
configs = load_hyperpyyaml(f, overrides=override_dict)
|
||||||
|
if gan is True:
|
||||||
|
configs['train_conf'] = configs['train_conf_gan']
|
||||||
configs['train_conf'].update(vars(args))
|
configs['train_conf'].update(vars(args))
|
||||||
|
|
||||||
# Init env for ddp
|
# Init env for ddp
|
||||||
|
|||||||
@@ -64,6 +64,5 @@ class HiFiGan(nn.Module):
|
|||||||
loss_tpr = tpr_loss(y_d_rs, y_d_gs, self.tpr_loss_tau)
|
loss_tpr = tpr_loss(y_d_rs, y_d_gs, self.tpr_loss_tau)
|
||||||
else:
|
else:
|
||||||
loss_tpr = torch.zeros(1).to(device)
|
loss_tpr = torch.zeros(1).to(device)
|
||||||
loss_f0 = F.l1_loss(generated_f0, pitch_feat)
|
loss = loss_disc + self.tpr_loss_weight * loss_tpr
|
||||||
loss = loss_disc + self.tpr_loss_weight * loss_tpr + loss_f0
|
return {'loss': loss, 'loss_disc': loss_disc, 'loss_tpr': loss_tpr}
|
||||||
return {'loss': loss, 'loss_disc': loss_disc, 'loss_tpr': loss_tpr, 'loss_f0': loss_f0}
|
|
||||||
|
|||||||
@@ -110,30 +110,29 @@ def wrap_cuda_model(args, model):
|
|||||||
|
|
||||||
|
|
||||||
def init_optimizer_and_scheduler(args, configs, model, gan):
|
def init_optimizer_and_scheduler(args, configs, model, gan):
|
||||||
key = 'train_conf_gan' if gan is True else 'train_conf'
|
if configs['train_conf']['optim'] == 'adam':
|
||||||
if configs[key]['optim'] == 'adam':
|
optimizer = optim.Adam(model.parameters(), **configs['train_conf']['optim_conf'])
|
||||||
optimizer = optim.Adam(model.parameters(), **configs[key]['optim_conf'])
|
elif configs['train_conf']['optim'] == 'adamw':
|
||||||
elif configs[key]['optim'] == 'adamw':
|
optimizer = optim.AdamW(model.parameters(), **configs['train_conf']['optim_conf'])
|
||||||
optimizer = optim.AdamW(model.parameters(), **configs[key]['optim_conf'])
|
|
||||||
else:
|
else:
|
||||||
raise ValueError("unknown optimizer: " + configs[key])
|
raise ValueError("unknown optimizer: " + configs['train_conf'])
|
||||||
|
|
||||||
if configs[key]['scheduler'] == 'warmuplr':
|
if configs['train_conf']['scheduler'] == 'warmuplr':
|
||||||
scheduler_type = WarmupLR
|
scheduler_type = WarmupLR
|
||||||
scheduler = WarmupLR(optimizer, **configs[key]['scheduler_conf'])
|
scheduler = WarmupLR(optimizer, **configs['train_conf']['scheduler_conf'])
|
||||||
elif configs[key]['scheduler'] == 'NoamHoldAnnealing':
|
elif configs['train_conf']['scheduler'] == 'NoamHoldAnnealing':
|
||||||
scheduler_type = NoamHoldAnnealing
|
scheduler_type = NoamHoldAnnealing
|
||||||
scheduler = NoamHoldAnnealing(optimizer, **configs[key]['scheduler_conf'])
|
scheduler = NoamHoldAnnealing(optimizer, **configs['train_conf']['scheduler_conf'])
|
||||||
elif configs[key]['scheduler'] == 'constantlr':
|
elif configs['train_conf']['scheduler'] == 'constantlr':
|
||||||
scheduler_type = ConstantLR
|
scheduler_type = ConstantLR
|
||||||
scheduler = ConstantLR(optimizer)
|
scheduler = ConstantLR(optimizer)
|
||||||
else:
|
else:
|
||||||
raise ValueError("unknown scheduler: " + configs[key])
|
raise ValueError("unknown scheduler: " + configs['train_conf'])
|
||||||
|
|
||||||
# use deepspeed optimizer for speedup
|
# use deepspeed optimizer for speedup
|
||||||
if args.train_engine == "deepspeed":
|
if args.train_engine == "deepspeed":
|
||||||
def scheduler(opt):
|
def scheduler(opt):
|
||||||
return scheduler_type(opt, **configs[key]['scheduler_conf'])
|
return scheduler_type(opt, **configs['train_conf']['scheduler_conf'])
|
||||||
model, optimizer, _, scheduler = deepspeed.initialize(
|
model, optimizer, _, scheduler = deepspeed.initialize(
|
||||||
args=args,
|
args=args,
|
||||||
model=model,
|
model=model,
|
||||||
@@ -143,24 +142,24 @@ def init_optimizer_and_scheduler(args, configs, model, gan):
|
|||||||
|
|
||||||
# currently we wrap generator and discriminator in one model, so we cannot use deepspeed
|
# currently we wrap generator and discriminator in one model, so we cannot use deepspeed
|
||||||
if gan is True:
|
if gan is True:
|
||||||
if configs[key]['optim_d'] == 'adam':
|
if configs['train_conf']['optim_d'] == 'adam':
|
||||||
optimizer_d = optim.Adam(model.module.discriminator.parameters(), **configs[key]['optim_conf'])
|
optimizer_d = optim.Adam(model.module.discriminator.parameters(), **configs['train_conf']['optim_conf'])
|
||||||
elif configs[key]['optim_d'] == 'adamw':
|
elif configs['train_conf']['optim_d'] == 'adamw':
|
||||||
optimizer_d = optim.AdamW(model.module.discriminator.parameters(), **configs[key]['optim_conf'])
|
optimizer_d = optim.AdamW(model.module.discriminator.parameters(), **configs['train_conf']['optim_conf'])
|
||||||
else:
|
else:
|
||||||
raise ValueError("unknown optimizer: " + configs[key])
|
raise ValueError("unknown optimizer: " + configs['train_conf'])
|
||||||
|
|
||||||
if configs[key]['scheduler_d'] == 'warmuplr':
|
if configs['train_conf']['scheduler_d'] == 'warmuplr':
|
||||||
scheduler_type = WarmupLR
|
scheduler_type = WarmupLR
|
||||||
scheduler_d = WarmupLR(optimizer_d, **configs[key]['scheduler_conf'])
|
scheduler_d = WarmupLR(optimizer_d, **configs['train_conf']['scheduler_conf'])
|
||||||
elif configs[key]['scheduler_d'] == 'NoamHoldAnnealing':
|
elif configs['train_conf']['scheduler_d'] == 'NoamHoldAnnealing':
|
||||||
scheduler_type = NoamHoldAnnealing
|
scheduler_type = NoamHoldAnnealing
|
||||||
scheduler_d = NoamHoldAnnealing(optimizer_d, **configs[key]['scheduler_conf'])
|
scheduler_d = NoamHoldAnnealing(optimizer_d, **configs['train_conf']['scheduler_conf'])
|
||||||
elif configs[key]['scheduler'] == 'constantlr':
|
elif configs['train_conf']['scheduler'] == 'constantlr':
|
||||||
scheduler_type = ConstantLR
|
scheduler_type = ConstantLR
|
||||||
scheduler_d = ConstantLR(optimizer_d)
|
scheduler_d = ConstantLR(optimizer_d)
|
||||||
else:
|
else:
|
||||||
raise ValueError("unknown scheduler: " + configs[key])
|
raise ValueError("unknown scheduler: " + configs['train_conf'])
|
||||||
else:
|
else:
|
||||||
optimizer_d, scheduler_d = None, None
|
optimizer_d, scheduler_d = None, None
|
||||||
return model, optimizer, scheduler, optimizer_d, scheduler_d
|
return model, optimizer, scheduler, optimizer_d, scheduler_d
|
||||||
|
|||||||
Reference in New Issue
Block a user