From 0cd30e48a9e913a4f1238c23647d38b614be0cc8 Mon Sep 17 00:00:00 2001 From: Hui Zhang Date: Sun, 24 Oct 2021 13:58:45 +0000 Subject: [PATCH] fix no_pos call --- deepspeech/models/lm/transformer.py | 3 +-- deepspeech/modules/encoder.py | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/deepspeech/models/lm/transformer.py b/deepspeech/models/lm/transformer.py index 9392a15e..dcae4ea0 100644 --- a/deepspeech/models/lm/transformer.py +++ b/deepspeech/models/lm/transformer.py @@ -45,8 +45,7 @@ class TransformerLM(nn.Layer, LMInterface, BatchScorerInterface): if pos_enc == "sinusoidal": pos_enc_layer_type = "abs_pos" elif pos_enc is None: - #TODO - pos_enc_layer_type = "None" + pos_enc_layer_type = "no_pos" else: raise ValueError(f"unknown pos-enc option: {pos_enc}") diff --git a/deepspeech/modules/encoder.py b/deepspeech/modules/encoder.py index bbe822af..a9b5e8a6 100644 --- a/deepspeech/modules/encoder.py +++ b/deepspeech/modules/encoder.py @@ -384,7 +384,7 @@ class TransformerEncoder(BaseEncoder): Args: xs (paddle.Tensor): Input tensor. (B, T, D) - masks (paddle.Tensor): Mask tensor. (B, 1, T) + masks (paddle.Tensor): Mask tensor. (B, T, T) cache (List[paddle.Tensor]): List of cache tensors. Returns: -- GitLab