提交 880774d1 编写于 作者: H Haonan

change the act.name for LinearActivation() to "linear" so that it won't

fail in hl_activetype; also fix the hasinputsset in submodel
上级 8d4c453b
......@@ -218,7 +218,7 @@ def Inputs(*args):
@config_func
def HasInputsSet():
return len(g_config.model_config.input_layer_names) != 0
return len(g_current_submodel.input_layer_names) != 0
# Define the name of the output layers of the NeuralNetwork.
......@@ -1120,14 +1120,14 @@ def parse_block_expand(block_expand, input_layer_name, block_expand_conf):
block_expand_conf.output_x = 0
else:
block_expand_conf.output_x = cnn_output_size(
block_expand.img_size_x, block_expand.block_x,
block_expand.img_size_x, block_expand.block_x,
block_expand.padding_x, block_expand.stride_x, False)
if block_expand_conf.img_size_y == 0:
block_expand_conf.output_y = 0
else:
block_expand_conf.output_y = cnn_output_size(
block_expand.img_size_y, block_expand.block_y,
block_expand.img_size_y, block_expand.block_y,
block_expand.padding_y, block_expand.stride_y, False)
def parse_maxout(maxout, input_layer_name, maxout_conf):
......@@ -1135,7 +1135,7 @@ def parse_maxout(maxout, input_layer_name, maxout_conf):
maxout_conf.groups = maxout.groups
maxout_conf.img_size_x = maxout.img_size_x
maxout_conf.img_size_y = maxout.img_size_y
# Define an evaluator
@config_func
def Evaluator(
......@@ -1773,7 +1773,7 @@ class MaxOutLayer(LayerBase):
self.config.inputs[0].maxout_conf)
maxout_conf = self.config.inputs[0].maxout_conf
self.set_layer_size(g_layer_map[input_layer.name].size / maxout_conf.groups)
# key: cost type
# value: cost class
g_cost_map = {}
......
......@@ -23,9 +23,9 @@ __all__ = ["TanhActivation", "SigmoidActivation",
class BaseActivation(object):
"""
A mark for activation class.
A mark for activation class.
Each activation inherit BaseActivation, which has two parameters.
:param name: activation name in paddle config.
:type name: basestring
:param support_hppl: True if supported by hppl. HPPL is a library used by paddle
......@@ -104,7 +104,7 @@ class IdentityActivation(BaseActivation):
Just do nothing for output both forward/backward.
"""
def __init__(self): BaseActivation.__init__(self, '', False)
def __init__(self): BaseActivation.__init__(self, 'linear', False)
LinearActivation = IdentityActivation
......@@ -194,7 +194,7 @@ class SquareActivation(BaseActivation):
class ExpActivation(BaseActivation):
"""
Exponential Activation.
.. math::
f(z) = e^z.
"""
......
......@@ -1657,7 +1657,7 @@ def img_pool_layer(input, pool_size, name=None,
:type pool_size_y: int|None
:param num_channels: number of input channel.
:type num_channels: int
:param pool_type: pooling type. MaxPooling or AveragePooling. Default is
:param pool_type: pooling type. MaxPooling or AvgPooling. Default is
MaxPooling.
:type pool_type: BasePoolingType
:param stride: stride width of pooling.
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册