提交 27d1b8db 编写于 作者: W wangxiao

python3 ok

上级 c21afb28
......@@ -24,6 +24,7 @@ import paddle.fluid as fluid
import paddle.fluid.layers as layers
from paddle.fluid.layer_helper import LayerHelper as LayerHelper
from functools import reduce # py3
def layer_norm(x, begin_norm_axis=1, epsilon=1e-6, param_attr=None, bias_attr=None):
helper = LayerHelper('layer_norm', **locals())
mean = layers.reduce_mean(x, dim=begin_norm_axis, keep_dim=True)
......
......@@ -95,7 +95,7 @@ def _try_float(s):
def _check_conf(conf, checklist=None):
assert isinstance(conf, dict), "{} is not a dict.".format(conf)
ret = {}
for k,v in conf.items():
for k,v in list(conf.items()):
if isinstance(v, str):
v = _try_float(v)
ret[k] = v
......
......@@ -92,7 +92,7 @@ class TaskInstance(object):
output_vars = self._task_layer[phase].build(net_inputs, scope_name=scope)
if phase == 'pred':
if output_vars is not None:
self._pred_fetch_name_list, self._pred_fetch_var_list = zip(*output_vars.items())
self._pred_fetch_name_list, self._pred_fetch_var_list = list(zip(*list(output_vars.items())))
else:
self._pred_fetch_name_list = []
self._pred_fetch_var_list = []
......@@ -113,7 +113,7 @@ class TaskInstance(object):
fluid.io.save_inference_model(dirpath, self._pred_input_varname_list, self._pred_fetch_var_list, self._exe, prog)
conf = {}
for k, strv in self._save_protocol.items():
for k, strv in list(self._save_protocol.items()): # py3
exec('v={}'.format(strv))
conf[k] = v
with open(os.path.join(dirpath, '__conf__'), 'w') as writer:
......@@ -123,7 +123,7 @@ class TaskInstance(object):
def load(self, infer_model_path=None):
if infer_model_path is None:
infer_model_path = self._save_infermodel_path
for k,v in json.load(open(os.path.join(infer_model_path, '__conf__'))).items():
or k,v in list(json.load(open(os.path.join(infer_model_path, '__conf__'))).items()): # py3
strv = self._save_protocol[k]
exec('{}=v'.format(strv))
pred_prog, self._pred_input_varname_list, self._pred_fetch_var_list = \
......@@ -167,13 +167,13 @@ class TaskInstance(object):
@property
def pred_input(self):
return zip(*[self._pred_input_name_list, self._pred_input_varname_list])
return list(zip(*[self._pred_input_name_list, self._pred_input_varname_list])) # py3
@pred_input.setter
def pred_input(self, val):
assert isinstance(val, dict)
self._pred_input_name_list, self._pred_input_varname_list = \
zip(*[[k, v.name] for k,v in val.items()])
list(zip(*[[k, v.name] for k,v in list(val.items())])) # py3s
@property
def pred_fetch_list(self):
......
......@@ -111,7 +111,7 @@ class FullTokenizer(object):
def __init__(self, vocab_file, do_lower_case=True):
self.vocab = load_vocab(vocab_file)
self.inv_vocab = {v: k for k, v in self.vocab.items()}
self.inv_vocab = {v: k for k, v in list(self.vocab.items())}
self.basic_tokenizer = BasicTokenizer(do_lower_case=do_lower_case)
self.wordpiece_tokenizer = WordpieceTokenizer(vocab=self.vocab)
......@@ -135,7 +135,7 @@ class CharTokenizer(object):
def __init__(self, vocab_file, do_lower_case=True):
self.vocab = load_vocab(vocab_file)
self.inv_vocab = {v: k for k, v in self.vocab.items()}
self.inv_vocab = {v: k for k, v in list(self.vocab.items())}
self.wordpiece_tokenizer = WordpieceTokenizer(vocab=self.vocab)
def tokenize(self, text):
......
......@@ -115,7 +115,7 @@ class FullTokenizer(object):
def __init__(self, vocab_file, do_lower_case=True):
self.vocab = load_vocab(vocab_file)
self.inv_vocab = {v: k for k, v in self.vocab.items()}
self.inv_vocab = {v: k for k, v in list(self.vocab.items())}
self.basic_tokenizer = BasicTokenizer(do_lower_case=do_lower_case)
self.wordpiece_tokenizer = WordpieceTokenizer(vocab=self.vocab)
......@@ -139,7 +139,7 @@ class CharTokenizer(object):
def __init__(self, vocab_file, do_lower_case=True):
self.vocab = load_vocab(vocab_file)
self.inv_vocab = {v: k for k, v in self.vocab.items()}
self.inv_vocab = {v: k for k, v in list(self.vocab.items())}
self.wordpiece_tokenizer = WordpieceTokenizer(vocab=self.vocab)
def tokenize(self, text):
......
......@@ -87,7 +87,7 @@ def create_iterator_fn(iterator, iterator_prefix, shape_and_dtypes, outname_to_p
outputs = next(iterator) # dict type
prefix = iterator_prefixe
for outname, val in outputs.items():
for outname, val in list(outputs.items()):
task_outname = prefix + '/' + outname
if outname in outname_to_pos:
......@@ -121,7 +121,7 @@ def create_joint_iterator_fn(iterators, iterator_prefixes, joint_shape_and_dtype
outputs = next(iterators[id]) # dict type
outbuf[id] = outputs
prefix = iterator_prefixes[id]
for outname, val in outputs.items():
for outname, val in list(outputs.items()):
task_outname = prefix + '/' + outname
if outname in outname_to_pos:
......@@ -176,7 +176,7 @@ def create_joint_iterator_fn(iterators, iterator_prefixes, joint_shape_and_dtype
has_show_warn = True
prefix = iterator_prefixes[id]
for outname, val in outputs.items():
for outname, val in list(outputs.items()):
if v > 0:
print('reader generate: '+outname)
task_outname = prefix + '/' + outname
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册