parameter_optimizer_test.cc 4.6 KB
Newer Older
D
dzhwinter 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13
//  Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserve.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//    http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
L
liaogang 已提交
14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29
/*
  Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

  Licensed under the Apache License, Version 2.0 (the "License");
  you may not use this file except in compliance with the License.
  You may obtain a copy of the License at

  http://www.apache.org/licenses/LICENSE-2.0

  Unless required by applicable law or agreed to in writing, software
  distributed under the License is distributed on an "AS IS" BASIS,
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
*/

D
dzhwinter 已提交
30 31
#include "parameter_optimizer.h"
#include <cmath>
D
dzhwinter 已提交
32
#include <map>
D
dzhwinter 已提交
33 34
#include <vector>
#include "gtest/gtest.h"
D
dzhwinter 已提交
35
#include "lr_policy.h"
D
dzhwinter 已提交
36

L
liaogang 已提交
37 38 39
paddle::optimizer::Tensor* FillTensor(size_t size) {
  paddle::optimizer::Tensor* param = new paddle::optimizer::Tensor(size);
  paddle::optimizer::Tensor& p = *param;
D
dzhwinter 已提交
40
  for (size_t i = 0; i < p.size(); ++i) {
D
dzhwinter 已提交
41 42 43 44 45
    p[i] = (float)rand() / (float)RAND_MAX;
  }
  return param;
}

L
liaogang 已提交
46 47 48
paddle::optimizer::Tensor* FixedTensor(size_t size) {
  paddle::optimizer::Tensor* param = new paddle::optimizer::Tensor(size);
  paddle::optimizer::Tensor& p = *param;
D
dzhwinter 已提交
49
  for (size_t i = 0; i < p.size(); ++i) {
D
dzhwinter 已提交
50 51 52 53 54 55 56
    p[i] = i;
  }
  return param;
}

class OptimizerTest : public testing::Test {
public:
L
liaogang 已提交
57
  virtual ~OptimizerTest() {}
L
liaogang 已提交
58
  // init paddle::optimizer::Tensor shape
D
dzhwinter 已提交
59
  const size_t kSize = 5;
D
dzhwinter 已提交
60 61

  virtual void SetUp() {
62 63
    CreateSGD();
    CreateAdam();
D
dzhwinter 已提交
64 65 66
  }
  virtual void TearDown() {}

67
  void CreateSGD() {
L
liaogang 已提交
68 69
    paddle::optimizer::Tensor* parameter = FixedTensor(kSize);
    config_.set_optimizer(paddle::OptimizerConfig::SGD);
D
dzhwinter 已提交
70 71 72
    config_.mutable_sgd()->set_momentum(0.0);
    config_.mutable_sgd()->set_decay(0.0);
    config_.mutable_sgd()->set_nesterov(false);
L
liaogang 已提交
73
    config_.set_lr_policy(paddle::OptimizerConfig::Const);
D
dzhwinter 已提交
74
    config_.mutable_const_lr()->set_learning_rate(0.1);
D
dzhwinter 已提交
75
    std::string str = config_.SerializeAsString();
L
liaogang 已提交
76 77
    paddle::optimizer::ParameterOptimizer* opt =
        paddle::optimizer::ParameterOptimizer::Create(str, parameter);
D
dzhwinter 已提交
78
    opts_.push_back(opt);
D
dzhwinter 已提交
79 80
  }

81
  void CreateAdam() {
L
liaogang 已提交
82 83
    paddle::optimizer::Tensor* parameter = FixedTensor(kSize);
    config_.set_optimizer(paddle::OptimizerConfig::Adam);
D
dzhwinter 已提交
84 85 86 87
    config_.mutable_adam()->set_beta_1(0.9);
    config_.mutable_adam()->set_beta_2(0.1);
    config_.mutable_adam()->set_epsilon(1e-3);
    config_.mutable_adam()->set_decay(0.0);
L
liaogang 已提交
88
    config_.set_lr_policy(paddle::OptimizerConfig::Const);
D
dzhwinter 已提交
89
    config_.mutable_const_lr()->set_learning_rate(0.1);
D
dzhwinter 已提交
90
    std::string str = config_.SerializeAsString();
L
liaogang 已提交
91 92
    paddle::optimizer::ParameterOptimizer* opt =
        paddle::optimizer::ParameterOptimizer::Create(str, parameter);
D
dzhwinter 已提交
93
    opts_.push_back(opt);
D
dzhwinter 已提交
94 95
  }

96
  void TestGetWeight() {
L
liaogang 已提交
97
    paddle::optimizer::Tensor* p = FixedTensor(kSize);
D
dzhwinter 已提交
98
    for (size_t i = 0; i < opts_.size(); ++i) {
99
      int s = 0;
D
dzhwinter 已提交
100
      float* newp = (float*)opts_[i]->get_weight(&s);
101
      EXPECT_EQ(static_cast<size_t>(s), kSize);
D
dzhwinter 已提交
102
      for (size_t j = 0; j < kSize; ++j) {
D
dzhwinter 已提交
103 104 105 106
        EXPECT_EQ(newp[j], (*p)[j]);
      }
    }
  }
D
dzhwinter 已提交
107

108
  void TestUpdate() {
L
liaogang 已提交
109
    paddle::optimizer::Tensor* g = FixedTensor(kSize);
D
dzhwinter 已提交
110 111 112 113 114 115
    for (size_t i = 0; i < opts_.size(); ++i) {
      opts_[i]->Update(g);
    }
  }

  void TestCheckPoint() {
116
    paddle::optimizer::Tensor* p = FixedTensor(kSize);
D
dzhwinter 已提交
117
    for (size_t i = 0; i < opts_.size(); ++i) {
118 119 120
      auto state = opts_[i]->SerializeState();
      opts_[i]->DeserializeState(state);
      auto state1 = opts_[i]->SerializeState();
D
dzhwinter 已提交
121
      opts_[i]->DeserializeState(state);
122 123 124 125
      EXPECT_EQ(state, state1);

      int s = 0;
      float* newp = (float*)opts_[i]->get_weight(&s);
D
dzhwinter 已提交
126
      EXPECT_EQ(static_cast<size_t>(s), kSize);
127 128 129
      for (size_t j = 0; j < kSize; ++j) {
        EXPECT_EQ(newp[j], (*p)[j]);
      }
D
dzhwinter 已提交
130 131 132 133
    }
  }

private:
L
liaogang 已提交
134 135
  std::vector<paddle::optimizer::ParameterOptimizer*> opts_;
  paddle::OptimizerConfig config_;
D
dzhwinter 已提交
136 137
};

D
dzhwinter 已提交
138 139
TEST_F(OptimizerTest, TestGetWeight) { TestGetWeight(); }

140
TEST_F(OptimizerTest, TestUpdate) { TestUpdate(); }
D
dzhwinter 已提交
141

D
dzhwinter 已提交
142
TEST_F(OptimizerTest, TestCheckPoint) { TestCheckPoint(); }