parameter_optimizer_test.cpp 2.7 KB
Newer Older
D
dzhwinter 已提交
1 2 3 4 5 6 7 8 9 10 11 12
#include "parameter_optimizer.h"
#include <cmath>
#include <tuple>
#include <vector>
#include "adadelta_optimizer.h"
#include "adagrad_optimizer.h"
#include "adam_optimizer.h"
#include "gtest/gtest.h"
#include "sgd_optimizer.h"
using namespace paddle;
using namespace paddle::optimizer;

13
Tensor* FillTensor(size_t size) {
D
dzhwinter 已提交
14
  Tensor* param = new Tensor(size);
D
dzhwinter 已提交
15 16 17 18 19 20 21
  Tensor& p = *param;
  for (auto i = 0; i < p.size(); ++i) {
    p[i] = (float)rand() / (float)RAND_MAX;
  }
  return param;
}

22
Tensor* FixedTensor(size_t size) {
D
dzhwinter 已提交
23
  Tensor* param = new Tensor(size);
D
dzhwinter 已提交
24 25 26 27 28 29 30 31 32 33 34 35 36
  Tensor& p = *param;
  for (auto i = 0; i < p.size(); ++i) {
    p[i] = i;
  }
  return param;
}

class OptimizerTest : public testing::Test {
public:
  // init tensor shape
  const size_t size = 5;

  virtual void SetUp() {
37 38
    CreateSGD();
    CreateAdam();
D
dzhwinter 已提交
39 40 41
  }
  virtual void TearDown() {}

42
  void CreateSGD() {
D
dzhwinter 已提交
43
    config.set_optimizer(OptimizerConfig::SGD);
D
dzhwinter 已提交
44 45 46
    config.mutable_sgd()->set_momentum(0.0);
    config.mutable_sgd()->set_decay(0.0);
    config.mutable_sgd()->set_nesterov(false);
D
dzhwinter 已提交
47
    config.set_lr_policy(OptimizerConfig::ConstLr);
D
dzhwinter 已提交
48 49 50
    config.mutable_const_lr()->set_learning_rate(0.1);

    ParameterOptimizer* opt =
D
dzhwinter 已提交
51
        ParameterOptimizer::Create(config.SerializeAsString());
D
dzhwinter 已提交
52 53 54
    opts.push_back(opt);
  }

55
  void CreateAdam() {
D
dzhwinter 已提交
56
    config.set_optimizer(OptimizerConfig::Adam);
D
dzhwinter 已提交
57 58 59 60
    config.mutable_adam()->set_beta_1(0.9);
    config.mutable_adam()->set_beta_2(0.1);
    config.mutable_adam()->set_epsilon(1e-3);
    config.mutable_adam()->set_decay(0.0);
D
dzhwinter 已提交
61
    config.set_lr_policy(OptimizerConfig::ConstLr);
D
dzhwinter 已提交
62 63
    config.mutable_const_lr()->set_learning_rate(0.1);
    ParameterOptimizer* opt =
D
dzhwinter 已提交
64
        ParameterOptimizer::Create(config.SerializeAsString());
D
dzhwinter 已提交
65 66
    opts.push_back(opt);
  }
67 68
  void TestSetWeight() {
    Tensor* p = FillTensor(size);
D
dzhwinter 已提交
69 70 71 72 73
    for (size_t i = 0; i < opts.size(); ++i) {
      opts[i]->set_weight(p);
    }
  }

74 75
  void TestGetWeight() {
    Tensor* p = FixedTensor(size);
D
dzhwinter 已提交
76 77 78 79
    for (size_t i = 0; i < opts.size(); ++i) {
      opts[i]->set_weight(p);
    }
    for (size_t i = 0; i < opts.size(); ++i) {
80 81
      int s = 0;
      float* newp = (float*)opts[i]->get_weight(&s);
D
dzhwinter 已提交
82 83 84 85 86
      for (size_t j = 0; j < size; ++j) {
        EXPECT_EQ(newp[j], (*p)[j]);
      }
    }
  }
87 88
  void TestUpdate() {
    Tensor* g = FixedTensor(size);
D
dzhwinter 已提交
89
    for (size_t i = 0; i < opts.size(); ++i) {
D
dzhwinter 已提交
90
      opts[i]->Update(g);
D
dzhwinter 已提交
91 92 93 94 95 96 97 98 99
    }
  }

private:
  std::vector<ParameterOptimizer*> opts;
  OptimizerConfig config;
};

TEST_F(OptimizerTest, test_set_get_weight) {
100 101
  TestSetWeight();
  TestGetWeight();
D
dzhwinter 已提交
102
}
103
TEST_F(OptimizerTest, TestUpdate) { TestUpdate(); }
D
dzhwinter 已提交
104 105 106 107 108

int main(int argc, char** argv) {
  testing::InitGoogleTest(&argc, argv);
  return RUN_ALL_TESTS();
}