From 4ad12a0bd51caab18f22561a44a4346bf215f860 Mon Sep 17 00:00:00 2001 From: ranqiu Date: Mon, 23 Oct 2017 19:46:21 +0800 Subject: [PATCH] Fix bugs of dot-product attention --- python/paddle/trainer_config_helpers/networks.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/python/paddle/trainer_config_helpers/networks.py b/python/paddle/trainer_config_helpers/networks.py index 120c9d11a5..3821d075cb 100644 --- a/python/paddle/trainer_config_helpers/networks.py +++ b/python/paddle/trainer_config_helpers/networks.py @@ -1457,11 +1457,13 @@ def dot_product_attention(encoded_sequence, expanded = expand_layer( input=transformed_state, - expanded_as=encoded_sequence, + expand_as=encoded_sequence, name='%s_expand' % name) m = linear_comb_layer( - weights=expanded, vectors=encoded_sequence, name='%s_dot-product') + weights=expanded, + vectors=encoded_sequence, + name='%s_dot-product' % name) attention_weight = fc_layer( input=m, -- GitLab