未验证 提交 06b55eaa 编写于 作者: Z zhangchunle 提交者: GitHub

fix function-redefined 1 (#34507)

上级 2ad1e4c7
......@@ -678,7 +678,7 @@ class TestAddQuantDequantPass(unittest.TestCase):
self.residual_block_quant(
quantizable_op_type, skip_pattern='skip_quant', for_ci=True)
def test_residual_block_skip_pattern(self):
def test_residual_block_skip_pattern_1(self):
quantizable_op_type = ['elementwise_add', 'pool2d', 'mul', 'matmul']
self.residual_block_quant(
quantizable_op_type,
......
......@@ -406,7 +406,7 @@ class TestForEnumerateVarNumpyWithBreak(TestForIterVarNumpy):
self.dygraph_func = for_enumerate_var_numpy_with_break
class TestForEnumerateVarNumpyWithBreak(TestForIterVarNumpy):
class TestForEnumerateVarNumpyWithContinue(TestForIterVarNumpy):
def set_test_func(self):
self.dygraph_func = for_enumerate_var_numpy_with_continue
......@@ -416,7 +416,7 @@ class TestForEnumerateVarNumpyWithStartAndBreak(TestForIterVarNumpy):
self.dygraph_func = for_enumerate_var_numpy_with_start_break
class TestForEnumerateVarNumpyWithStartAndBreak(TestForIterVarNumpy):
class TestForEnumerateVarNumpyWithStartAndContinue(TestForIterVarNumpy):
def set_test_func(self):
self.dygraph_func = for_enumerate_var_numpy_with_start_continue
......
......@@ -80,7 +80,7 @@ class TestDropoutOpInput1d(TestDropoutOp):
}
class TestDropoutOpInput1d(TestDropoutOp):
class TestDropoutOpInput1d_1(TestDropoutOp):
# the input is 1-D
def setUp(self):
self.op_type = "dropout"
......
......@@ -73,7 +73,7 @@ class TestSequenceReverse3(TestSequenceReverseBase):
self.lod = [3, 0, 6, 3]
class TestSequenceReverse3(TestSequenceReverseBase):
class TestSequenceReverse4(TestSequenceReverseBase):
def initParameters(self):
self.size = (12, 10)
self.lod = [0, 2, 10, 0]
......
......@@ -167,7 +167,7 @@ class TestIdentityActivation(TestConv2DFusionOp):
self.activation = 'identity'
class TestIdentityActivation(TestConv2DFusionOp):
class TestIdentityActivation1(TestConv2DFusionOp):
def init_activation(self):
self.activation = 'identity'
self.add_residual_data = False
......
......@@ -52,7 +52,7 @@ class TestConvDoubleGradCheck(unittest.TestCase):
self.func(p)
class TestConvDoubleGradCheck(unittest.TestCase):
class TestConvDoubleGradCheckTest0(unittest.TestCase):
@prog_scope()
def func(self, place):
shape = [2, 4, 3, 3]
......
......@@ -50,26 +50,6 @@ class TestDistGeoClipByGlobalNorm(TestFleetBase):
self._sync_mode = False
self._grad_clip_mode = 2
def check_with_place(self,
model_file,
delta=1e-3,
check_error_log=False,
need_envs={}):
required_envs = {
"PATH": os.getenv("PATH", ""),
"PYTHONPATH": os.getenv("PYTHONPATH", ""),
"LD_LIBRARY_PATH": os.getenv("LD_LIBRARY_PATH", ""),
"FLAGS_rpc_deadline": "5000", # 5sec to fail fast
"http_proxy": ""
}
required_envs.update(need_envs)
tr0_losses, tr1_losses = self._run_cluster(model_file, required_envs)
def test_dist_train(self):
self.check_with_place(
"dist_fleet_ctr.py", delta=1e-5, check_error_log=True)
class TestDistASyncClipByValue(TestFleetBase):
def _setup_config(self):
......
......@@ -149,10 +149,10 @@ class API_TestTensorEye(unittest.TestCase):
self.assertRaises(TypeError, test_num_columns_type_check)
def test_num_columns_type_check():
def test_num_columns_type_check1():
paddle.eye(10, num_columns=10, dtype="int8")
self.assertRaises(TypeError, test_num_columns_type_check)
self.assertRaises(TypeError, test_num_columns_type_check1)
if __name__ == "__main__":
......
......@@ -77,40 +77,6 @@ class TestFleetRecomputeMetaOptimizer(TestFleetMetaOptimizer):
]
self.assertIn('subprog', ''.join(outs))
def test_recompute_optimizer_backward(self):
""" test recompute optimizer backward """
train_prog, startup_prog = fluid.Program(), fluid.Program()
avg_cost, strategy = self.net(train_prog, startup_prog)
self.set_strategy(strategy, 'recompute')
opt = fluid.optimizer.MomentumOptimizer(
learning_rate=0.001, momentum=0.9)
opt = RecomputeOptimizer(opt)
opt.user_defined_strategy = strategy
params_grads = opt.backward(avg_cost, startup_prog)
outs = [
op.output('Out')[0] for op in avg_cost.block.ops if op.type == 'mul'
]
self.assertIn('subprog', ''.join(outs))
def test_recompute_optimizer_backward(self):
""" test recompute optimizer backward """
train_prog, startup_prog = fluid.Program(), fluid.Program()
avg_cost, strategy = self.net(train_prog, startup_prog)
self.set_strategy(strategy, 'recompute')
opt = fluid.optimizer.MomentumOptimizer(
learning_rate=0.001, momentum=0.9)
opt = RecomputeOptimizer(opt)
opt.user_defined_strategy = strategy
params_grads = opt.backward(avg_cost, startup_prog)
outs = [
op.output('Out')[0] for op in avg_cost.block.ops if op.type == 'mul'
]
self.assertIn('subprog', ''.join(outs))
def test_recompute_optimizer(self):
train_prog, startup_prog = fluid.Program(), fluid.Program()
avg_cost, strategy = self.net(train_prog, startup_prog)
......
......@@ -275,10 +275,10 @@ class TestGathertError(unittest.TestCase):
self.assertRaises(TypeError, test_axis_dtype)
def test_axis_dtype():
def test_axis_dtype1():
paddle.gather(x, index, axis=axis)
self.assertRaises(TypeError, test_axis_dtype)
self.assertRaises(TypeError, test_axis_dtype1)
def test_error2(self):
with fluid.program_guard(fluid.Program(), fluid.Program()):
......
......@@ -76,7 +76,7 @@ def TestHuberLossOp2(TestHuberLossOp):
return (6, 6)
def TestHuberLossOp2(TestHuberLossOp):
def TestHuberLossOp3(TestHuberLossOp):
def set_shape(self):
return (6, 6, 1)
......
......@@ -165,7 +165,7 @@ class TestMeshgridOp7(unittest.TestCase):
assert np.array_equal(res_4.shape, [100, 200])
class TestMeshgridOp7(unittest.TestCase):
class TestMeshgridOp8(unittest.TestCase):
def test_api_with_dygraph_tuple_input(self):
input_3 = np.random.randint(0, 100, [100, ]).astype('int32')
input_4 = np.random.randint(0, 100, [200, ]).astype('int32')
......
......@@ -52,7 +52,7 @@ class TestExportWithTensor(unittest.TestCase):
paddle.onnx.export(model, 'linear_net', input_spec=[self.x_spec])
class TestExportWithTensor(unittest.TestCase):
class TestExportWithTensor1(unittest.TestCase):
def setUp(self):
self.x = paddle.to_tensor(np.random.random((1, 128)))
......
......@@ -221,7 +221,7 @@ class TestGeneratorSeed(unittest.TestCase):
self.assertTrue(np.allclose(x1_np, x2_np))
self.assertTrue(np.allclose(x_np, x3_np))
def test_generator_uniform_random_static(self):
def test_generator_uniform_random_static_1(self):
fluid.disable_dygraph()
gen = paddle.seed(123123143)
......@@ -255,7 +255,7 @@ class TestGeneratorSeed(unittest.TestCase):
self.assertTrue(np.allclose(out1_res2, out2_res2))
self.assertTrue(not np.allclose(out1_res2, out1_res1))
def test_generator_randint_dygraph(self):
def test_generator_randint_dygraph_1(self):
"""Test Generator seed."""
fluid.enable_dygraph()
......@@ -405,7 +405,7 @@ class TestGeneratorSeed(unittest.TestCase):
self.assertTrue(np.allclose(x1_np, x2_np))
self.assertTrue(np.allclose(x_np, x3_np))
def test_generator_randperm_static(self):
def test_generator_randperm_static_1(self):
fluid.disable_dygraph()
......
......@@ -294,7 +294,7 @@ class TestDygraph(unittest.TestCase):
shape = [3, 5, 9, 10]
output = paddle.scatter_nd(index, updates, shape)
def test_dygraph(self):
def test_dygraph_1(self):
with fluid.dygraph.guard(fluid.CPUPlace()):
x = paddle.rand(shape=[3, 5, 9, 10], dtype='float32')
updates = paddle.rand(shape=[3, 9, 10], dtype='float32')
......
......@@ -38,7 +38,7 @@ class TestDocstring(pylint.testutils.CheckerTestCase):
assert len(got) == 1
assert 'W9001' == got[0][0]
def test_one_line(self):
def test_one_line_1(self):
func_node = astroid.extract_node('''
def test():
"""get news"""
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册