diff --git a/python/paddle/v2/__init__.py b/python/paddle/v2/__init__.py index c0a2bdc4259c3f48e8e7fdf4c8d8b2584253875a..30d0b2a398bd0e39895daf9b1421ec736ab8da83 100644 --- a/python/paddle/v2/__init__.py +++ b/python/paddle/v2/__init__.py @@ -22,7 +22,7 @@ import py_paddle.swig_paddle as api __all__ = [ 'optimizer', 'layer', 'activation', 'parameters', 'init', 'trainer', - 'event', 'data_type.py' + 'event', 'data_type' ] diff --git a/python/paddle/v2/layer.py b/python/paddle/v2/layer.py index 4d052c983c2c07730fe5111ccb961de68e73fb8f..9e999fccadf16d6645958baa6f6ef0e828846606 100644 --- a/python/paddle/v2/layer.py +++ b/python/paddle/v2/layer.py @@ -67,7 +67,7 @@ paddle.v2.parameters.create, no longer exposed to users. """ import paddle.trainer_config_helpers as conf_helps -from . import data_type as v2_data +import data_type as data_type from paddle.trainer_config_helpers.config_parser_utils import \ parse_network_config as __parse__ from paddle.trainer_config_helpers.default_decorators import wrap_name_default @@ -166,7 +166,7 @@ So we also need to implement some special LayerV2. class DataLayerV2(Layer): def __init__(self, name, type, **kwargs): - assert isinstance(type, v2_data.InputType) + assert isinstance(type, data_type.InputType) self.type = type self.__method_name__ = 'data_layer' @@ -198,8 +198,8 @@ cross_entropy_cost = __convert_to_v2__( parent_names=['input', 'label']) if __name__ == '__main__': - pixel = data(name='pixel', type=v2_data.dense_vector(784)) - label = data(name='label', type=v2_data.integer_value(10)) + pixel = data(name='pixel', type=data_type.dense_vector(784)) + label = data(name='label', type=data_type.integer_value(10)) hidden = fc(input=pixel, size=100, act=conf_helps.SigmoidActivation()) inference = fc(input=hidden, size=10, act=conf_helps.SoftmaxActivation()) maxid = max_id(input=inference)