parameter_optimizer_test.cc 4.0 KB
Newer Older
D
dzhwinter 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13
//  Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserve.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
//    http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
L
liaogang 已提交
14

D
dzhwinter 已提交
15 16
#include "parameter_optimizer.h"
#include <cmath>
D
dzhwinter 已提交
17
#include <map>
D
dzhwinter 已提交
18 19
#include <vector>
#include "gtest/gtest.h"
D
dzhwinter 已提交
20
#include "lr_policy.h"
D
dzhwinter 已提交
21

L
liaogang 已提交
22 23 24
paddle::optimizer::Tensor* FillTensor(size_t size) {
  paddle::optimizer::Tensor* param = new paddle::optimizer::Tensor(size);
  paddle::optimizer::Tensor& p = *param;
D
dzhwinter 已提交
25
  for (size_t i = 0; i < p.size(); ++i) {
D
dzhwinter 已提交
26 27 28 29 30
    p[i] = (float)rand() / (float)RAND_MAX;
  }
  return param;
}

L
liaogang 已提交
31 32 33
paddle::optimizer::Tensor* FixedTensor(size_t size) {
  paddle::optimizer::Tensor* param = new paddle::optimizer::Tensor(size);
  paddle::optimizer::Tensor& p = *param;
D
dzhwinter 已提交
34
  for (size_t i = 0; i < p.size(); ++i) {
D
dzhwinter 已提交
35 36 37 38 39 40 41
    p[i] = i;
  }
  return param;
}

class OptimizerTest : public testing::Test {
public:
L
liaogang 已提交
42
  virtual ~OptimizerTest() {}
L
liaogang 已提交
43
  // init paddle::optimizer::Tensor shape
D
dzhwinter 已提交
44
  const size_t kSize = 5;
D
dzhwinter 已提交
45 46

  virtual void SetUp() {
47 48
    CreateSGD();
    CreateAdam();
D
dzhwinter 已提交
49 50 51
  }
  virtual void TearDown() {}

52
  void CreateSGD() {
L
liaogang 已提交
53 54
    paddle::optimizer::Tensor* parameter = FixedTensor(kSize);
    config_.set_optimizer(paddle::OptimizerConfig::SGD);
D
dzhwinter 已提交
55 56 57
    config_.mutable_sgd()->set_momentum(0.0);
    config_.mutable_sgd()->set_decay(0.0);
    config_.mutable_sgd()->set_nesterov(false);
L
liaogang 已提交
58
    config_.set_lr_policy(paddle::OptimizerConfig::Const);
D
dzhwinter 已提交
59
    config_.mutable_const_lr()->set_learning_rate(0.1);
D
dzhwinter 已提交
60
    std::string str = config_.SerializeAsString();
L
liaogang 已提交
61 62
    paddle::optimizer::ParameterOptimizer* opt =
        paddle::optimizer::ParameterOptimizer::Create(str, parameter);
D
dzhwinter 已提交
63
    opts_.push_back(opt);
D
dzhwinter 已提交
64 65
  }

66
  void CreateAdam() {
L
liaogang 已提交
67 68
    paddle::optimizer::Tensor* parameter = FixedTensor(kSize);
    config_.set_optimizer(paddle::OptimizerConfig::Adam);
D
dzhwinter 已提交
69 70 71 72
    config_.mutable_adam()->set_beta_1(0.9);
    config_.mutable_adam()->set_beta_2(0.1);
    config_.mutable_adam()->set_epsilon(1e-3);
    config_.mutable_adam()->set_decay(0.0);
L
liaogang 已提交
73
    config_.set_lr_policy(paddle::OptimizerConfig::Const);
D
dzhwinter 已提交
74
    config_.mutable_const_lr()->set_learning_rate(0.1);
D
dzhwinter 已提交
75
    std::string str = config_.SerializeAsString();
L
liaogang 已提交
76 77
    paddle::optimizer::ParameterOptimizer* opt =
        paddle::optimizer::ParameterOptimizer::Create(str, parameter);
D
dzhwinter 已提交
78
    opts_.push_back(opt);
D
dzhwinter 已提交
79 80
  }

81
  void TestGetWeight() {
L
liaogang 已提交
82
    paddle::optimizer::Tensor* p = FixedTensor(kSize);
D
dzhwinter 已提交
83
    for (size_t i = 0; i < opts_.size(); ++i) {
84
      int s = 0;
D
dzhwinter 已提交
85
      float* newp = (float*)opts_[i]->get_weight(&s);
86
      EXPECT_EQ(static_cast<size_t>(s), kSize);
D
dzhwinter 已提交
87
      for (size_t j = 0; j < kSize; ++j) {
D
dzhwinter 已提交
88 89 90 91
        EXPECT_EQ(newp[j], (*p)[j]);
      }
    }
  }
D
dzhwinter 已提交
92

93
  void TestUpdate() {
L
liaogang 已提交
94
    paddle::optimizer::Tensor* g = FixedTensor(kSize);
D
dzhwinter 已提交
95 96 97 98 99 100
    for (size_t i = 0; i < opts_.size(); ++i) {
      opts_[i]->Update(g);
    }
  }

  void TestCheckPoint() {
101
    paddle::optimizer::Tensor* p = FixedTensor(kSize);
D
dzhwinter 已提交
102
    for (size_t i = 0; i < opts_.size(); ++i) {
103 104 105
      auto state = opts_[i]->SerializeState();
      opts_[i]->DeserializeState(state);
      auto state1 = opts_[i]->SerializeState();
D
dzhwinter 已提交
106
      opts_[i]->DeserializeState(state);
107 108 109 110
      EXPECT_EQ(state, state1);

      int s = 0;
      float* newp = (float*)opts_[i]->get_weight(&s);
D
dzhwinter 已提交
111
      EXPECT_EQ(static_cast<size_t>(s), kSize);
112 113 114
      for (size_t j = 0; j < kSize; ++j) {
        EXPECT_EQ(newp[j], (*p)[j]);
      }
D
dzhwinter 已提交
115 116 117 118
    }
  }

private:
L
liaogang 已提交
119 120
  std::vector<paddle::optimizer::ParameterOptimizer*> opts_;
  paddle::OptimizerConfig config_;
D
dzhwinter 已提交
121 122
};

D
dzhwinter 已提交
123 124
TEST_F(OptimizerTest, TestGetWeight) { TestGetWeight(); }

125
TEST_F(OptimizerTest, TestUpdate) { TestUpdate(); }
D
dzhwinter 已提交
126

D
dzhwinter 已提交
127
TEST_F(OptimizerTest, TestCheckPoint) { TestCheckPoint(); }