From 332194c88132cdef60f6f2a14bb96d5f693ab279 Mon Sep 17 00:00:00 2001 From: Haichao-Zhang Date: Fri, 23 Sep 2016 18:15:37 -0700 Subject: [PATCH] add type compatible check for ParamAttr (#113) * add type compatible check for ParamAttr --- python/paddle/trainer_config_helpers/attrs.py | 52 ++++++++++++++++--- .../tests/layers_test_config.py | 14 ++++- 2 files changed, 58 insertions(+), 8 deletions(-) diff --git a/python/paddle/trainer_config_helpers/attrs.py b/python/paddle/trainer_config_helpers/attrs.py index 7b0a398d19..2b5b451edd 100644 --- a/python/paddle/trainer_config_helpers/attrs.py +++ b/python/paddle/trainer_config_helpers/attrs.py @@ -17,6 +17,42 @@ __all__ = ['ParamAttr', 'ExtraAttr', 'ParameterAttribute', 'ExtraLayerAttribute'] +def convert_and_compare(x, Type): + """ + Convert x to be the same type as Type and then convert back to + check whether there is a loss of information + :param x: object to be checked + :param Type: target type to check x over + + """ + return type(x)(Type(x))==x + +def is_compatible_with(x, Type): + """ + Check if x has a type compatible with Type + :param x: object to be checked + :param Type: target type to check x over + + """ + if type(x) == Type: + return True + try: + if float == Type or int == Type: + # avoid those types that can be converted to float/int but not very + # meaningful and could potentially lead to error + # i.e., str and bool typed value should not be used for initializing float/int variable + if not isinstance(x, str) and not isinstance(x, bool): + return convert_and_compare(x, Type) + elif bool == Type: + # should not use string type to initialize bool variable + if not isinstance(x, str): + return convert_and_compare(x, Type) + else: + return False + except: + return False + + class ParameterAttribute(object): """ Parameter Attributes object. To fine-tuning network training process, user @@ -65,14 +101,18 @@ class ParameterAttribute(object): elif initial_std is None and initial_mean is None and initial_max \ is None and initial_min is None: self.attr = {'initial_smart': True} - elif isinstance(initial_std, float) or isinstance(initial_mean, float): + elif is_compatible_with(initial_std, float) or \ + is_compatible_with(initial_mean, float): self.attr = dict() if initial_std is not None: self.attr['initial_std'] = initial_std if initial_mean is not None: self.attr['initial_mean'] = initial_mean self.attr['initial_strategy'] = 0 # Gauss Random - elif isinstance(initial_max, float) and isinstance(initial_min, float): + elif is_compatible_with(initial_max, float) and \ + is_compatible_with(initial_min, float): + initial_max = initial_max + initial_min = initial_min assert initial_min < initial_max initial_mean = (initial_max + initial_min) / 2 initial_std = initial_mean - initial_min @@ -83,16 +123,16 @@ class ParameterAttribute(object): else: raise RuntimeError("Unexpected branch.") - if not is_static and isinstance(l1_rate, float): + if not is_static and is_compatible_with(l1_rate, float): self.attr['decay_rate_l1'] = l1_rate - if not is_static and isinstance(l2_rate, float): + if not is_static and is_compatible_with(l2_rate, float): self.attr['decay_rate'] = l2_rate - if not is_static and isinstance(learning_rate, float): + if not is_static and is_compatible_with(learning_rate, float): self.attr['learning_rate'] = learning_rate - if not is_static and isinstance(momentum, float): + if not is_static and is_compatible_with(momentum, float): self.attr['momentum'] = momentum if name is not None: diff --git a/python/paddle/trainer_config_helpers/tests/layers_test_config.py b/python/paddle/trainer_config_helpers/tests/layers_test_config.py index 27b22ecb70..b9eaf2fce7 100644 --- a/python/paddle/trainer_config_helpers/tests/layers_test_config.py +++ b/python/paddle/trainer_config_helpers/tests/layers_test_config.py @@ -39,10 +39,20 @@ print_layer(input=[out]) outputs(classification_cost(out, data_layer(name="label", size=num_classes))) dotmul = mixed_layer(input=[dotmul_operator(x=x1, y=y1), - dotmul_projection(input=y1)]) + dotmul_projection(input=y1)]) + +proj_with_attr_init = mixed_layer(input=full_matrix_projection(input=y1, + param_attr=ParamAttr(learning_rate = 0, + initial_mean = 0, + initial_std = 0)), + bias_attr = ParamAttr(initial_mean=0, initial_std=0, learning_rate=0), + act = LinearActivation(), + size = 5, + name='proj_with_attr_init') + # for ctc -tmp = fc_layer(input=[x1, dotmul], +tmp = fc_layer(input=[x1, dotmul, proj_with_attr_init], size=num_classes + 1, act=SoftmaxActivation()) ctc = ctc_layer(input=tmp, -- GitLab