diff --git a/paddle/operators/momentum_op.cc b/paddle/operators/momentum_op.cc index e8ce16f4cfcf83fd13e4d3a5318a4ae0c8c8449c..19954006195c1e9fd34328b52ed2a9eade526235 100644 --- a/paddle/operators/momentum_op.cc +++ b/paddle/operators/momentum_op.cc @@ -75,7 +75,7 @@ class MomentumOpMaker : public framework::OpProtoAndCheckerMaker { AddOutput("VelocityOut", "(Tensor) Output updated velocity"); AddAttr("mu", "(float) Momentum coefficient"); - AddAttr("useNesterov", + AddAttr("use_nesterov", "(bool, default false) " "Use Nesterov Momentum") .SetDefault(false); diff --git a/paddle/operators/momentum_op.h b/paddle/operators/momentum_op.h index e6d6d1da3df9f7e43a93fcc2e12658a01a491f81..8f7f5eb5c21c0342f57a47b85d28f4454f4566c2 100644 --- a/paddle/operators/momentum_op.h +++ b/paddle/operators/momentum_op.h @@ -34,7 +34,7 @@ class MomentumOpKernel : public framework::OpKernel { velocity_out->mutable_data(ctx.GetPlace()); float mu = ctx.Attr("mu"); - bool use_nesterov = ctx.Attr("useNesterov"); + bool use_nesterov = ctx.Attr("use_nesterov"); auto p_out = framework::EigenVector::Flatten(*param_out); auto v_out = framework::EigenVector::Flatten(*velocity_out); diff --git a/python/paddle/v2/framework/optimizer.py b/python/paddle/v2/framework/optimizer.py index f20865d604f68c8398f299bf8edfd020bfa4e4c5..5b4cdecf2c4285618131657a09fbe437191ea75a 100644 --- a/python/paddle/v2/framework/optimizer.py +++ b/python/paddle/v2/framework/optimizer.py @@ -297,7 +297,7 @@ class MomentumOptimizer(Optimizer): "VelocityOut": velocity_acc }, attrs={"mu": self._momentum, - "useNesterov": self._use_nesterov}) + "use_nesterov": self._use_nesterov}) return momentum_op diff --git a/python/paddle/v2/framework/tests/test_momentum_op.py b/python/paddle/v2/framework/tests/test_momentum_op.py index 654d31975aab4578055e7e70ade202bd2c3d93cb..638095f7564c8761151a7794f98f9ca797b0083b 100644 --- a/python/paddle/v2/framework/tests/test_momentum_op.py +++ b/python/paddle/v2/framework/tests/test_momentum_op.py @@ -37,7 +37,7 @@ class TestMomentumOp1(OpTest): class TestMomentumOp2(OpTest): - '''Test Momentum with defaukt values for attributes + '''Test Momentum with default values for attributes ''' def setUp(self): @@ -57,7 +57,7 @@ class TestMomentumOp2(OpTest): 'LearningRate': learning_rate } - self.attrs = {'mu': mu, 'useNesterov': use_nesterov} + self.attrs = {'mu': mu, 'use_nesterov': use_nesterov} velocity_out = mu * velocity + grad if use_nesterov: diff --git a/python/paddle/v2/framework/tests/test_optimizer.py b/python/paddle/v2/framework/tests/test_optimizer.py index 9333df8f7f347a080cfb035ccd0c575ded7c423a..a39e7402600c7a94301de030c90ea51264248cf1 100644 --- a/python/paddle/v2/framework/tests/test_optimizer.py +++ b/python/paddle/v2/framework/tests/test_optimizer.py @@ -98,7 +98,7 @@ class TestMomentumOptimizer(unittest.TestCase): self.assertEqual(len(opts), 1) sgd_op = opts[0] self.assertEqual(sgd_op.type, "momentum") - self.assertFalse(sgd_op.attr('useNesterov')) + self.assertFalse(sgd_op.attr('use_nesterov')) # Check accumulators accumulators = momentum_optimizer.get_accumulators() @@ -143,7 +143,7 @@ class TestMomentumOptimizer(unittest.TestCase): self.assertEqual(len(opts), 1) sgd_op = opts[0] self.assertEqual(sgd_op.type, "momentum") - self.assertTrue(sgd_op.attr('useNesterov')) + self.assertTrue(sgd_op.attr('use_nesterov')) # Check accumulators accumulators = momentum_optimizer.get_accumulators()