Skip to content

Commit

Permalink
fix lint
Browse files Browse the repository at this point in the history
  • Loading branch information
Mddct committed Oct 20, 2023
1 parent 000c7af commit 6967ef5
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 6 deletions.
5 changes: 2 additions & 3 deletions wenet/paraformer/experiment/lfr.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,8 @@ def forward(self, input: torch.Tensor,
device=input.device,
dtype=input_lens.dtype).unsqueeze(0).repeat(
B, 1) # [B, T_all_max]
index_mask = (index <
(self.left_padding_nums + input_lens).unsqueeze(1)
) #[B, T_all_max]
# [B, T_all_max]
index_mask = index < (self.left_padding_nums + input_lens).unsqueeze(1)

tail_index_mask = torch.logical_not(
index >= (T_all.unsqueeze(1))) & index_mask
Expand Down
8 changes: 5 additions & 3 deletions wenet/paraformer/experiment/paraformer.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,14 @@
import torch
import torchaudio
import torchaudio.compliance.kaldi as kaldi
from torch.nn import TransformerDecoder
import yaml
from wenet.cif.predictor import Predictor, cif
from wenet.paraformer.experiment.attention import DummyMultiHeadSANM, MultiHeadAttentionCross, MultiHeadedAttentionSANM
from wenet.paraformer.experiment.attention import (DummyMultiHeadSANM,
MultiHeadAttentionCross,
MultiHeadedAttentionSANM)
from wenet.paraformer.experiment.lfr import LFR
from wenet.paraformer.experiment.positionwise_feed_forward import PositionwiseFeedForwardDecoderSANM
from wenet.paraformer.experiment.positionwise_feed_forward import \
PositionwiseFeedForwardDecoderSANM
from wenet.transformer.encoder import BaseEncoder
from wenet.transformer.cmvn import GlobalCMVN
from wenet.transformer.decoder import TransformerDecoder
Expand Down

0 comments on commit 6967ef5

Please sign in to comment.