提交 82437970 编写于 作者: L Luo Tao

add relu in layer_math.py

上级 ca62c104
...@@ -39,6 +39,7 @@ register_unary_math_op('abs', act.AbsActivation()) ...@@ -39,6 +39,7 @@ register_unary_math_op('abs', act.AbsActivation())
register_unary_math_op('sigmoid', act.SigmoidActivation()) register_unary_math_op('sigmoid', act.SigmoidActivation())
register_unary_math_op('tanh', act.TanhActivation()) register_unary_math_op('tanh', act.TanhActivation())
register_unary_math_op('square', act.SquareActivation()) register_unary_math_op('square', act.SquareActivation())
register_unary_math_op('relu', act.ReluActivation())
def add(layeroutput, other): def add(layeroutput, other):
......
...@@ -7,8 +7,9 @@ x = layer_math.exp(x) ...@@ -7,8 +7,9 @@ x = layer_math.exp(x)
x = layer_math.log(x) x = layer_math.log(x)
x = layer_math.abs(x) x = layer_math.abs(x)
x = layer_math.sigmoid(x) x = layer_math.sigmoid(x)
x = layer_math.tanh(x)
x = layer_math.square(x) x = layer_math.square(x)
x = layer_math.square(x) x = layer_math.relu(x)
y = 1 + x y = 1 + x
y = y + 1 y = y + 1
y = x + y y = x + y
......
...@@ -65,13 +65,28 @@ layers { ...@@ -65,13 +65,28 @@ layers {
} }
} }
} }
layers {
name: "__tanh_0__"
type: "mixed"
size: 100
active_type: "tanh"
inputs {
input_layer_name: "__sigmoid_0__"
proj_conf {
type: "identity"
name: "___tanh_0__.w0"
input_size: 100
output_size: 100
}
}
}
layers { layers {
name: "__square_0__" name: "__square_0__"
type: "mixed" type: "mixed"
size: 100 size: 100
active_type: "square" active_type: "square"
inputs { inputs {
input_layer_name: "__sigmoid_0__" input_layer_name: "__tanh_0__"
proj_conf { proj_conf {
type: "identity" type: "identity"
name: "___square_0__.w0" name: "___square_0__.w0"
...@@ -81,15 +96,15 @@ layers { ...@@ -81,15 +96,15 @@ layers {
} }
} }
layers { layers {
name: "__square_1__" name: "__relu_0__"
type: "mixed" type: "mixed"
size: 100 size: 100
active_type: "square" active_type: "relu"
inputs { inputs {
input_layer_name: "__square_0__" input_layer_name: "__square_0__"
proj_conf { proj_conf {
type: "identity" type: "identity"
name: "___square_1__.w0" name: "___relu_0__.w0"
input_size: 100 input_size: 100
output_size: 100 output_size: 100
} }
...@@ -101,7 +116,7 @@ layers { ...@@ -101,7 +116,7 @@ layers {
size: 100 size: 100
active_type: "" active_type: ""
inputs { inputs {
input_layer_name: "__square_1__" input_layer_name: "__relu_0__"
} }
slope: 1.0 slope: 1.0
intercept: 1 intercept: 1
...@@ -123,7 +138,7 @@ layers { ...@@ -123,7 +138,7 @@ layers {
size: 100 size: 100
active_type: "" active_type: ""
inputs { inputs {
input_layer_name: "__square_1__" input_layer_name: "__relu_0__"
proj_conf { proj_conf {
type: "identity" type: "identity"
name: "___mixed_0__.w0" name: "___mixed_0__.w0"
...@@ -147,7 +162,7 @@ layers { ...@@ -147,7 +162,7 @@ layers {
size: 100 size: 100
active_type: "" active_type: ""
inputs { inputs {
input_layer_name: "__square_1__" input_layer_name: "__relu_0__"
} }
slope: -1.0 slope: -1.0
intercept: 0.0 intercept: 0.0
...@@ -339,8 +354,9 @@ sub_models { ...@@ -339,8 +354,9 @@ sub_models {
layer_names: "__log_0__" layer_names: "__log_0__"
layer_names: "__abs_0__" layer_names: "__abs_0__"
layer_names: "__sigmoid_0__" layer_names: "__sigmoid_0__"
layer_names: "__tanh_0__"
layer_names: "__square_0__" layer_names: "__square_0__"
layer_names: "__square_1__" layer_names: "__relu_0__"
layer_names: "__slope_intercept_layer_0__" layer_names: "__slope_intercept_layer_0__"
layer_names: "__slope_intercept_layer_1__" layer_names: "__slope_intercept_layer_1__"
layer_names: "__mixed_0__" layer_names: "__mixed_0__"
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册