提交 ca843cc5 编写于 作者: 别团等shy哥发育's avatar 别团等shy哥发育

ECA-Net通道注意力机制复现

ECA-ResNet50花朵识别
上级 ac636c7a
...@@ -36,3 +36,4 @@ ...@@ -36,3 +36,4 @@
/经典网络/ShuffleNet/checkpoint/ /经典网络/ShuffleNet/checkpoint/
/经典网络/ShuffleNet/checkpoint_v2/ /经典网络/ShuffleNet/checkpoint_v2/
/经典网络/ResNext/checkpoint/ /经典网络/ResNext/checkpoint/
/经典网络/ECANet/checkpoint/
import math import math
import tensorflow as tf import tensorflow as tf
from keras import backend as K
from keras.layers import (Activation, Add, Concatenate, Conv1D, Conv2D, Dense, from keras.layers import (Activation, Add, Concatenate, Conv1D, Conv2D, Dense,
GlobalAveragePooling2D, GlobalMaxPooling2D, Lambda, BatchNormalization, GlobalAveragePooling2D, GlobalMaxPooling2D, Lambda, BatchNormalization,
Reshape, multiply,Input) Reshape, multiply,Input)
...@@ -10,16 +9,18 @@ from keras.models import Model ...@@ -10,16 +9,18 @@ from keras.models import Model
ECA模块的思想是非常简单的,它去除了原来SE模块中的全连接层, ECA模块的思想是非常简单的,它去除了原来SE模块中的全连接层,
直接在全局平均池化之后的特征上通过一个1D卷积进行学习。 直接在全局平均池化之后的特征上通过一个1D卷积进行学习。
''' '''
def eca_block(input_feature, b=1, gamma=2, name=""): def ECA_Block(input_feature, b=1, gamma=2, name=""):
channel = input_feature._keras_shape[-1] channel = input_feature.shape[-1]
# 根据公式计算自适应卷积核大小
kernel_size = int(abs((math.log(channel, 2) + b) / gamma)) kernel_size = int(abs((math.log(channel, 2) + b) / gamma))
# 如果是kernel_size是偶数,就使用,否则变成偶数
kernel_size = kernel_size if kernel_size % 2 else kernel_size + 1 kernel_size = kernel_size if kernel_size % 2 else kernel_size + 1
# [c] # [c]
avg_pool = GlobalAveragePooling2D()(input_feature) x = GlobalAveragePooling2D()(input_feature)
# [c,1] # [c,1]
x = Reshape((-1, 1))(avg_pool) x = Reshape((-1, 1))(x)
# #
x = Conv1D(1, kernel_size=kernel_size, padding="same", name="eca_layer_" + str(name), use_bias=False, )(x) x = Conv1D(1, kernel_size=kernel_size, padding="same",use_bias=False,name="eca_layer_" + str(name))(x)
x = Activation('sigmoid')(x) x = Activation('sigmoid')(x)
# [c,1]=>[1,1,c] # [c,1]=>[1,1,c]
x = Reshape((1, 1, -1))(x) x = Reshape((1, 1, -1))(x)
...@@ -29,6 +30,6 @@ def eca_block(input_feature, b=1, gamma=2, name=""): ...@@ -29,6 +30,6 @@ def eca_block(input_feature, b=1, gamma=2, name=""):
if __name__ == '__main__': if __name__ == '__main__':
inputs=Input([26,26,512]) inputs=Input([26,26,512])
x=eca_block(inputs) x=ECA_Block(inputs)
model=Model(inputs,x) model=Model(inputs,x)
model.summary() model.summary()
\ No newline at end of file
此差异已折叠。
import tensorflow as tf
import math
from tensorflow.keras.layers import GlobalAveragePooling2D,Conv1D,Reshape,Activation,multiply
from tensorflow.keras.layers import Input
from tensorflow.keras.models import Model
from plot_model import plot_model
'''
ECA模块的思想是非常简单的,它去除了原来SE模块中的全连接层,
直接在全局平均池化之后的特征上通过一个1D卷积进行学习。
'''
def eca_block(input_feature, b=1, gamma=2, name=""):
channel = input_feature.shape[-1]
# 根据公式计算自适应卷积核大小
kernel_size = int(abs((math.log(channel, 2) + b) / gamma))
# 如果是kernel_size是偶数,就使用,否则变成偶数
kernel_size = kernel_size if kernel_size % 2 else kernel_size + 1
# [c]
x = GlobalAveragePooling2D()(input_feature)
# [c,1]
x = Reshape((-1, 1))(x)
#
x = Conv1D(1, kernel_size=kernel_size, padding="same",use_bias=False,name="eca_layer_" + str(name))(x)
x = Activation('sigmoid')(x)
# [c,1]=>[1,1,c]
x = Reshape((1, 1, -1))(x)
output = multiply([input_feature, x])
return output
if __name__ == '__main__':
inputs=Input([26,25,512])
x=eca_block(inputs)
model=Model(inputs,x)
model.summary()
plot_model(model,to_file='img/ECANet-block.png')
\ No newline at end of file
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册