diff --git a/python/paddle/fluid/tests/unittests/test_prelu_op.py b/python/paddle/fluid/tests/unittests/test_prelu_op.py index 1e3e40d54a78045c8d8fdd9a3a3715107d1e7a80..48a6b0577b6787d2e1231fdcbe6d2c1bb46414ed 100644 --- a/python/paddle/fluid/tests/unittests/test_prelu_op.py +++ b/python/paddle/fluid/tests/unittests/test_prelu_op.py @@ -16,6 +16,7 @@ from __future__ import print_function import unittest import numpy as np +import six from op_test import OpTest @@ -62,17 +63,20 @@ class PReluTest(OpTest): # TODO(minqiyang): Resume these test cases after fixing Python3 CI job issues -# class TestCase1(PReluTest): -# def initTestCase(self): -# self.attrs = {'mode': "all"} +if six.PY2: -# class TestCase2(PReluTest): -# def initTestCase(self): -# self.attrs = {'mode': "channel"} + class TestCase1(PReluTest): + def initTestCase(self): + self.attrs = {'mode': "all"} + + class TestCase2(PReluTest): + def initTestCase(self): + self.attrs = {'mode': "channel"} + + class TestCase3(PReluTest): + def initTestCase(self): + self.attrs = {'mode': "element"} -# class TestCase3(PReluTest): -# def initTestCase(self): -# self.attrs = {'mode': "element"} if __name__ == "__main__": unittest.main() diff --git a/python/paddle/fluid/transpiler/details/program_utils.py b/python/paddle/fluid/transpiler/details/program_utils.py index f0fafaa84a73d641ff6ceb74def6addaea759516..a83aa0f11eed9bfc1674d8d75dcfacc297f056b0 100644 --- a/python/paddle/fluid/transpiler/details/program_utils.py +++ b/python/paddle/fluid/transpiler/details/program_utils.py @@ -153,7 +153,7 @@ def block_to_code(block, block_idx): indent += 1 # sort all vars - all_vars = sorted(block.vars.iteritems(), key=lambda x: x[0]) + all_vars = sorted(six.iteritems(block.vars), key=lambda x: x[0]) for var in all_vars: print("{}{}".format(get_indent_space(indent), variable_to_code(var[1]))) diff --git a/python/paddle/fluid/transpiler/distribute_transpiler.py b/python/paddle/fluid/transpiler/distribute_transpiler.py index 8a330e0dee7eda02d0858446778363f2235a3d73..d4d218d547a394a56c040ade2a9ba703b691b86b 100644 --- a/python/paddle/fluid/transpiler/distribute_transpiler.py +++ b/python/paddle/fluid/transpiler/distribute_transpiler.py @@ -300,7 +300,7 @@ class DistributeTranspiler(object): input_deps = grad_name_to_send_dummy_out.values() program.global_block().append_op( type="send_barrier", - inputs={"X": input_deps}, + inputs={"X": list(input_deps)}, outputs={"Out": send_barrier_out}, attrs={ "endpoints": pserver_endpoints, @@ -401,7 +401,7 @@ class DistributeTranspiler(object): Args: recv_vars (list): Variable list to recv for current trainer_id - eplist (list): A list of strings indicating + eplist (list): A list of strings indicating Returns: Program: trainer side startup program. @@ -455,7 +455,7 @@ class DistributeTranspiler(object): if len(splited_var) <= 1: continue # NOTE: if enable memory optimization, origin vars maybe removed. - if startup_program.global_block().vars.has_key(varname): + if varname in startup_program.global_block().vars: orig_param = startup_program.global_block().vars[varname] else: origin_param_var = self.origin_program.global_block().vars[ @@ -690,7 +690,7 @@ class DistributeTranspiler(object): Args: endpoint (str): current pserver endpoint. - + Returns: tuple: (main_program, startup_program), of type "Program" """ @@ -713,7 +713,7 @@ class DistributeTranspiler(object): endpoint (str): current pserver endpoint. pserver_program (Program): deprecated, call get_pserver_program first. startup_program (Program): deprecated, should pass startup_program - when initalizing + when initalizing Returns: Program: parameter server side startup program.