[pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci
This commit is contained in:
pre-commit-ci[bot]
2024-10-02 13:21:37 +00:00
parent bd058a68f7
commit c2569a1018
5 changed files with 5 additions and 5 deletions

View File

@@ -1,8 +1,8 @@
""" from https://github.com/jik876/hifi-gan """ """ from https://github.com/jik876/hifi-gan """
import torch import torch
from torch import nn
import torch.nn.functional as F import torch.nn.functional as F
from torch import nn
from torch.nn import AvgPool1d, Conv1d, Conv2d, ConvTranspose1d from torch.nn import AvgPool1d, Conv1d, Conv2d, ConvTranspose1d
from torch.nn.utils import remove_weight_norm, spectral_norm, weight_norm from torch.nn.utils import remove_weight_norm, spectral_norm, weight_norm

View File

@@ -2,11 +2,11 @@ import math
from typing import Optional from typing import Optional
import torch import torch
from torch import nn
import torch.nn.functional as F import torch.nn.functional as F
from conformer import ConformerBlock from conformer import ConformerBlock
from diffusers.models.activations import get_activation from diffusers.models.activations import get_activation
from einops import pack, rearrange, repeat from einops import pack, rearrange, repeat
from torch import nn
from matcha.models.components.transformer import BasicTransformerBlock from matcha.models.components.transformer import BasicTransformerBlock

View File

@@ -3,8 +3,8 @@
import math import math
import torch import torch
from torch import nn
from einops import rearrange from einops import rearrange
from torch import nn
from matcha import utils from matcha import utils
from matcha.utils.model import sequence_mask from matcha.utils.model import sequence_mask

View File

@@ -1,7 +1,6 @@
from typing import Any, Dict, Optional from typing import Any, Dict, Optional
import torch import torch
from torch import nn
from diffusers.models.attention import ( from diffusers.models.attention import (
GEGLU, GEGLU,
GELU, GELU,
@@ -12,6 +11,7 @@ from diffusers.models.attention import (
from diffusers.models.attention_processor import Attention from diffusers.models.attention_processor import Attention
from diffusers.models.lora import LoRACompatibleLinear from diffusers.models.lora import LoRACompatibleLinear
from diffusers.utils.torch_utils import maybe_allow_in_graph from diffusers.utils.torch_utils import maybe_allow_in_graph
from torch import nn
class SnakeBeta(nn.Module): class SnakeBeta(nn.Module):

View File

@@ -4,11 +4,11 @@ import random
import torch import torch
from matcha.utils import monotonic_align
from matcha import utils from matcha import utils
from matcha.models.baselightningmodule import BaseLightningClass from matcha.models.baselightningmodule import BaseLightningClass
from matcha.models.components.flow_matching import CFM from matcha.models.components.flow_matching import CFM
from matcha.models.components.text_encoder import TextEncoder from matcha.models.components.text_encoder import TextEncoder
from matcha.utils import monotonic_align
from matcha.utils.model import ( from matcha.utils.model import (
denormalize, denormalize,
duration_loss, duration_loss,