From 9d09fceda38776d8dbc8489292801ea706dda675 Mon Sep 17 00:00:00 2001 From: Xinghai Sun Date: Mon, 21 Aug 2017 22:00:01 +0800 Subject: [PATCH] Replace activator BRelu with Relu. --- deep_speech_2/layer.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/deep_speech_2/layer.py b/deep_speech_2/layer.py index 1b1a5810..c4055aaa 100644 --- a/deep_speech_2/layer.py +++ b/deep_speech_2/layer.py @@ -117,7 +117,7 @@ def conv_group(input, num_stacks): num_channels_out=32, stride=(2, 2), padding=(5, 20), - act=paddle.activation.BRelu()) + act=paddle.activation.Relu()) for i in xrange(num_stacks - 1): conv = conv_bn_layer( input=conv, @@ -126,7 +126,7 @@ def conv_group(input, num_stacks): num_channels_out=32, stride=(1, 2), padding=(5, 10), - act=paddle.activation.BRelu()) + act=paddle.activation.Relu()) output_num_channels = 32 output_height = 160 // pow(2, num_stacks) + 1 return conv, output_num_channels, output_height @@ -153,13 +153,13 @@ def rnn_group(input, size, num_stacks, use_gru): name=str(i), input=output, size=size, - act=paddle.activation.BRelu()) + act=paddle.activation.Relu()) else: output = bidirectional_simple_rnn_bn_layer( name=str(i), input=output, size=size, - act=paddle.activation.BRelu()) + act=paddle.activation.Relu()) return output -- GitLab