mirror of
https://github.com/FunAudioLLM/CosyVoice.git
synced 2026-02-04 17:39:25 +08:00
add constant lr scheduler
This commit is contained in:
@@ -715,3 +715,25 @@ class NoamHoldAnnealing(WarmupHoldPolicy):
|
||||
|
||||
def set_step(self, step: int):
|
||||
self.last_epoch = step
|
||||
|
||||
|
||||
class ConstantLR(_LRScheduler):
|
||||
"""The ConstantLR scheduler
|
||||
|
||||
This scheduler keeps a constant lr
|
||||
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
optimizer: torch.optim.Optimizer,
|
||||
):
|
||||
# __init__() must be invoked before setting field
|
||||
# because step() is also invoked in __init__()
|
||||
super().__init__(optimizer)
|
||||
|
||||
def get_lr(self):
|
||||
return self.base_lrs
|
||||
|
||||
def set_step(self, step: int):
|
||||
self.last_epoch = step
|
||||
|
||||
@@ -34,7 +34,7 @@ from torch.nn.utils import clip_grad_norm_
|
||||
from deepspeed.runtime.zero.stage_1_and_2 import estimate_zero2_model_states_mem_needs_all_live
|
||||
|
||||
from cosyvoice.dataset.dataset import Dataset
|
||||
from cosyvoice.utils.scheduler import WarmupLR, NoamHoldAnnealing
|
||||
from cosyvoice.utils.scheduler import WarmupLR, NoamHoldAnnealing, ConstantLR
|
||||
|
||||
|
||||
def init_distributed(args):
|
||||
@@ -122,6 +122,9 @@ def init_optimizer_and_scheduler(args, configs, model):
|
||||
elif configs['train_conf']['scheduler'] == 'NoamHoldAnnealing':
|
||||
scheduler_type = NoamHoldAnnealing
|
||||
scheduler = NoamHoldAnnealing(optimizer, **configs['train_conf']['scheduler_conf'])
|
||||
elif configs['train_conf']['scheduler'] == 'constantlr':
|
||||
scheduler_type = ConstantLR
|
||||
scheduler = ConstantLR(optimizer)
|
||||
else:
|
||||
raise ValueError("unknown scheduler: " + configs['train_conf'])
|
||||
|
||||
|
||||
Reference in New Issue
Block a user