parameter_optimizer_test.cpp 3.7 KB
Newer Older
L
liaogang 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
/*
  Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve.

  Licensed under the Apache License, Version 2.0 (the "License");
  you may not use this file except in compliance with the License.
  You may obtain a copy of the License at

  http://www.apache.org/licenses/LICENSE-2.0

  Unless required by applicable law or agreed to in writing, software
  distributed under the License is distributed on an "AS IS" BASIS,
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License.
*/

D
dzhwinter 已提交
17 18
#include "parameter_optimizer.h"
#include <cmath>
D
dzhwinter 已提交
19
#include <map>
D
dzhwinter 已提交
20 21
#include <vector>
#include "gtest/gtest.h"
D
dzhwinter 已提交
22
#include "lr_policy.h"
D
dzhwinter 已提交
23

L
liaogang 已提交
24 25 26
paddle::optimizer::Tensor* FillTensor(size_t size) {
  paddle::optimizer::Tensor* param = new paddle::optimizer::Tensor(size);
  paddle::optimizer::Tensor& p = *param;
D
dzhwinter 已提交
27
  for (size_t i = 0; i < p.size(); ++i) {
D
dzhwinter 已提交
28 29 30 31 32
    p[i] = (float)rand() / (float)RAND_MAX;
  }
  return param;
}

L
liaogang 已提交
33 34 35
paddle::optimizer::Tensor* FixedTensor(size_t size) {
  paddle::optimizer::Tensor* param = new paddle::optimizer::Tensor(size);
  paddle::optimizer::Tensor& p = *param;
D
dzhwinter 已提交
36
  for (size_t i = 0; i < p.size(); ++i) {
D
dzhwinter 已提交
37 38 39 40 41 42 43
    p[i] = i;
  }
  return param;
}

class OptimizerTest : public testing::Test {
public:
L
liaogang 已提交
44
  virtual ~OptimizerTest() {}
L
liaogang 已提交
45
  // init paddle::optimizer::Tensor shape
D
dzhwinter 已提交
46
  const size_t kSize = 5;
D
dzhwinter 已提交
47 48

  virtual void SetUp() {
49 50
    CreateSGD();
    CreateAdam();
D
dzhwinter 已提交
51 52 53
  }
  virtual void TearDown() {}

54
  void CreateSGD() {
L
liaogang 已提交
55 56
    paddle::optimizer::Tensor* parameter = FixedTensor(kSize);
    config_.set_optimizer(paddle::OptimizerConfig::SGD);
D
dzhwinter 已提交
57 58 59
    config_.mutable_sgd()->set_momentum(0.0);
    config_.mutable_sgd()->set_decay(0.0);
    config_.mutable_sgd()->set_nesterov(false);
L
liaogang 已提交
60
    config_.set_lr_policy(paddle::OptimizerConfig::Const);
D
dzhwinter 已提交
61
    config_.mutable_const_lr()->set_learning_rate(0.1);
D
dzhwinter 已提交
62
    std::string str = config_.SerializeAsString();
L
liaogang 已提交
63 64
    paddle::optimizer::ParameterOptimizer* opt =
        paddle::optimizer::ParameterOptimizer::Create(str, parameter);
D
dzhwinter 已提交
65
    opts_.push_back(opt);
D
dzhwinter 已提交
66 67
  }

68
  void CreateAdam() {
L
liaogang 已提交
69 70
    paddle::optimizer::Tensor* parameter = FixedTensor(kSize);
    config_.set_optimizer(paddle::OptimizerConfig::Adam);
D
dzhwinter 已提交
71 72 73 74
    config_.mutable_adam()->set_beta_1(0.9);
    config_.mutable_adam()->set_beta_2(0.1);
    config_.mutable_adam()->set_epsilon(1e-3);
    config_.mutable_adam()->set_decay(0.0);
L
liaogang 已提交
75
    config_.set_lr_policy(paddle::OptimizerConfig::Const);
D
dzhwinter 已提交
76
    config_.mutable_const_lr()->set_learning_rate(0.1);
D
dzhwinter 已提交
77
    std::string str = config_.SerializeAsString();
L
liaogang 已提交
78 79
    paddle::optimizer::ParameterOptimizer* opt =
        paddle::optimizer::ParameterOptimizer::Create(str, parameter);
D
dzhwinter 已提交
80
    opts_.push_back(opt);
D
dzhwinter 已提交
81 82
  }

83
  void TestGetWeight() {
L
liaogang 已提交
84
    paddle::optimizer::Tensor* p = FixedTensor(kSize);
D
dzhwinter 已提交
85
    for (size_t i = 0; i < opts_.size(); ++i) {
86
      int s = 0;
D
dzhwinter 已提交
87 88
      float* newp = (float*)opts_[i]->get_weight(&s);
      for (size_t j = 0; j < kSize; ++j) {
D
dzhwinter 已提交
89 90 91 92
        EXPECT_EQ(newp[j], (*p)[j]);
      }
    }
  }
D
dzhwinter 已提交
93

94
  void TestUpdate() {
L
liaogang 已提交
95
    paddle::optimizer::Tensor* g = FixedTensor(kSize);
D
dzhwinter 已提交
96 97 98 99 100 101 102 103 104 105
    for (size_t i = 0; i < opts_.size(); ++i) {
      opts_[i]->Update(g);
    }
  }

  void TestCheckPoint() {
    for (size_t i = 0; i < opts_.size(); ++i) {
      int state_len = 0;
      std::string state = opts_[i]->SerializeState(&state_len);
      opts_[i]->DeserializeState(state);
D
dzhwinter 已提交
106 107 108 109
    }
  }

private:
L
liaogang 已提交
110 111
  std::vector<paddle::optimizer::ParameterOptimizer*> opts_;
  paddle::OptimizerConfig config_;
D
dzhwinter 已提交
112 113
};

D
dzhwinter 已提交
114 115
TEST_F(OptimizerTest, TestGetWeight) { TestGetWeight(); }

116
TEST_F(OptimizerTest, TestUpdate) { TestUpdate(); }
D
dzhwinter 已提交
117

D
dzhwinter 已提交
118 119
TEST_F(OptimizerTest, TestCheckPoint) { TestCheckPoint(); }

D
dzhwinter 已提交
120 121 122 123
int main(int argc, char** argv) {
  testing::InitGoogleTest(&argc, argv);
  return RUN_ALL_TESTS();
}