disable consider-using-from-import instead

This commit is contained in:
Jim O'Regan
2024-10-02 14:29:06 +00:00
parent a220f283e3
commit b0ba920dc1
5 changed files with 5 additions and 5 deletions

View File

@@ -2,7 +2,7 @@
import torch import torch
import torch.nn.functional as F import torch.nn.functional as F
from torch import nn import torch.nn as nn # pylint: disable=consider-using-from-import
from torch.nn import AvgPool1d, Conv1d, Conv2d, ConvTranspose1d from torch.nn import AvgPool1d, Conv1d, Conv2d, ConvTranspose1d
from torch.nn.utils import remove_weight_norm, spectral_norm, weight_norm from torch.nn.utils import remove_weight_norm, spectral_norm, weight_norm

View File

@@ -2,11 +2,11 @@ import math
from typing import Optional from typing import Optional
import torch import torch
import torch.nn as nn # pylint: disable=consider-using-from-import
import torch.nn.functional as F import torch.nn.functional as F
from conformer import ConformerBlock from conformer import ConformerBlock
from diffusers.models.activations import get_activation from diffusers.models.activations import get_activation
from einops import pack, rearrange, repeat from einops import pack, rearrange, repeat
from torch import nn
from matcha.models.components.transformer import BasicTransformerBlock from matcha.models.components.transformer import BasicTransformerBlock

View File

@@ -3,8 +3,8 @@
import math import math
import torch import torch
import torch.nn as nn # pylint: disable=consider-using-from-import
from einops import rearrange from einops import rearrange
from torch import nn
from matcha import utils from matcha import utils
from matcha.utils.model import sequence_mask from matcha.utils.model import sequence_mask

View File

@@ -1,6 +1,7 @@
from typing import Any, Dict, Optional from typing import Any, Dict, Optional
import torch import torch
import torch.nn as nn # pylint: disable=consider-using-from-import
from diffusers.models.attention import ( from diffusers.models.attention import (
GEGLU, GEGLU,
GELU, GELU,
@@ -11,7 +12,6 @@ from diffusers.models.attention import (
from diffusers.models.attention_processor import Attention from diffusers.models.attention_processor import Attention
from diffusers.models.lora import LoRACompatibleLinear from diffusers.models.lora import LoRACompatibleLinear
from diffusers.utils.torch_utils import maybe_allow_in_graph from diffusers.utils.torch_utils import maybe_allow_in_graph
from torch import nn
class SnakeBeta(nn.Module): class SnakeBeta(nn.Module):

View File

@@ -4,11 +4,11 @@ import random
import torch import torch
import matcha.utils.monotonic_align as monotonic_align # pylint: disable=consider-using-from-import
from matcha import utils from matcha import utils
from matcha.models.baselightningmodule import BaseLightningClass from matcha.models.baselightningmodule import BaseLightningClass
from matcha.models.components.flow_matching import CFM from matcha.models.components.flow_matching import CFM
from matcha.models.components.text_encoder import TextEncoder from matcha.models.components.text_encoder import TextEncoder
from matcha.utils import monotonic_align
from matcha.utils.model import ( from matcha.utils.model import (
denormalize, denormalize,
duration_loss, duration_loss,