报错 :9577 elementwise_op_function.h:1068] Error, broadcast should not into w broadcast
Created by: lxk767363331
在resnet的bottleneck_block中添加cbam但是在ChannelAttention中会报错elementwise_op_function.h:1068] Error, broadcast should not into w broadcast不清楚为什么 。除去插入部分运行无问题,只运行SpatialAttention模块是正常运行的所以定位错误在ChannelAttention中。 代码如下: def sharedMLP(input, num_filters, ratio): conv = fluid.layers.conv2d(input=input, num_filters=num_filters // ratio, filter_size=1, stride=1, padding=0, act="relu") conv = fluid.layers.conv2d(input=conv, num_filters=num_filters, filter_size=1, stride=1, padding=0, act=None) return conv
def ChannelAttention(input, num_filters, ratio=4): max1 = fluid.layers.adaptive_pool2d(input, 1, pool_type='max') avg = fluid.layers.adaptive_pool2d(input, 1, pool_type='avg') max_out = sharedMLP(max1, num_filters, ratio) avg_out = sharedMLP(avg, num_filters, ratio) out = fluid.layers.sigmoid(avg_out + max_out) return out
def bottleneck_block(input, num_filters, stride=1, downsample=False, name=None): residual = input conv = conv_bn_layer( input=input, filter_size=1, num_filters=num_filters, name=name + '_conv1') conv = conv_bn_layer( input=conv, filter_size=3, num_filters=num_filters, stride=stride, name=name + '_conv2') conv = conv_bn_layer( input=conv, filter_size=1, num_filters=num_filters * 4, if_act=False, name=name + '_conv3') conv1=ChannelAttention(conv,num_filters=num_filters4,ratio=4) conv=conv1conv if downsample: residual = conv_bn_layer( input=input, filter_size=1, num_filters=num_filters * 4, if_act=False, name=name + '_downsample') return fluid.layers.elementwise_add(x=residual, y=conv, act='relu')