From 16bebc43881928428f843fff854e39e2a2c9ccbc Mon Sep 17 00:00:00 2001 From: Luo Tao Date: Tue, 6 Dec 2016 15:59:57 +0800 Subject: [PATCH] some tiny fix --- demo/seqToseq/dataprovider.py | 25 +- doc_theme/static/css/override.css | 421 +------------------------ python/paddle/trainer/config_parser.py | 9 +- 3 files changed, 18 insertions(+), 437 deletions(-) diff --git a/demo/seqToseq/dataprovider.py b/demo/seqToseq/dataprovider.py index 5174092df26..127c3672c77 100755 --- a/demo/seqToseq/dataprovider.py +++ b/demo/seqToseq/dataprovider.py @@ -24,21 +24,20 @@ def hook(settings, src_dict_path, trg_dict_path, is_generating, file_list, # job_mode = 1: training mode # job_mode = 0: generating mode settings.job_mode = not is_generating - settings.src_dict = dict() - with open(src_dict_path, "r") as fin: - settings.src_dict = { - line.strip(): line_count - for line_count, line in enumerate(fin) - } - settings.trg_dict = dict() - with open(trg_dict_path, "r") as fin: - settings.trg_dict = { - line.strip(): line_count - for line_count, line in enumerate(fin) - } + + def fun(dict_path): + out_dict = dict() + with open(dict_path, "r") as fin: + out_dict = { + line.strip(): line_count + for line_count, line in enumerate(fin) + } + return out_dict + + settings.src_dict = fun(src_dict_path) + settings.trg_dict = fun(trg_dict_path) settings.logger.info("src dict len : %d" % (len(settings.src_dict))) - settings.sample_count = 0 if settings.job_mode: settings.slots = { diff --git a/doc_theme/static/css/override.css b/doc_theme/static/css/override.css index 460460805fb..438a87848a0 100644 --- a/doc_theme/static/css/override.css +++ b/doc_theme/static/css/override.css @@ -1,422 +1,3 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - body { padding-top: 80px; background-image: none !important; @@ -922,4 +503,4 @@ a, a:focus, a:hover, a:visited { background-color: transparent; color: #ff9711; padding: 0; -} \ No newline at end of file +} diff --git a/python/paddle/trainer/config_parser.py b/python/paddle/trainer/config_parser.py index 01ff586c4f1..a977e9b65de 100644 --- a/python/paddle/trainer/config_parser.py +++ b/python/paddle/trainer/config_parser.py @@ -1872,7 +1872,7 @@ class BatchNormLayer(LayerBase): image_conf = self.config.inputs[0].image_conf parse_image(self.inputs[0].image, input_layer.name, image_conf) self.set_cnn_layer(name, image_conf.img_size_y, image_conf.img_size, - image_conf.channels) + image_conf.channels, False) psize = self.calc_parameter_size(image_conf) dims = [1, psize] @@ -3387,10 +3387,11 @@ def parse_config(config_file, config_arg_str): # config = parse_config(configs, "is_predict=1") # # then you get config proto object. if hasattr(config_file, '__call__'): - config_file.func_globals.update(make_config_environment("", config_args)) - config_file() + config_file.func_globals.update( + make_config_environment("", config_args)) + config_file() else: - execfile(config_file, make_config_environment(config_file, config_args)) + execfile(config_file, make_config_environment(config_file, config_args)) for k, v in settings.iteritems(): if v is None: continue -- GitLab