parameter_optimizer_test.cpp 2.7 KB
Newer Older
D
dzhwinter 已提交
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44
#include "parameter_optimizer.h"
#include <cmath>
#include <tuple>
#include <vector>
#include "adadelta_optimizer.h"
#include "adagrad_optimizer.h"
#include "adam_optimizer.h"
#include "gtest/gtest.h"
#include "sgd_optimizer.h"
using namespace paddle;
using namespace paddle::optimizer;

Tensor* fill_n_Tensor(size_t size) {
  real* ptr = new real[size];
  Tensor* param = new Tensor(ptr, size);
  Tensor& p = *param;
  for (auto i = 0; i < p.size(); ++i) {
    p[i] = (float)rand() / (float)RAND_MAX;
  }
  return param;
}

Tensor* fix_n_Tensor(size_t size) {
  real* ptr = new real[size];
  Tensor* param = new Tensor(ptr, size);
  Tensor& p = *param;
  for (auto i = 0; i < p.size(); ++i) {
    p[i] = i;
  }
  return param;
}

class OptimizerTest : public testing::Test {
public:
  // init tensor shape
  const size_t size = 5;

  virtual void SetUp() {
    create_sgd();
    create_adam();
  }
  virtual void TearDown() {}

  void create_sgd() {
D
dzhwinter 已提交
45
    config.set_optimizer(OptimizerConfig::SGD);
D
dzhwinter 已提交
46 47 48
    config.mutable_sgd()->set_momentum(0.0);
    config.mutable_sgd()->set_decay(0.0);
    config.mutable_sgd()->set_nesterov(false);
D
dzhwinter 已提交
49
    config.set_lr_policy(OptimizerConfig::ConstLr);
D
dzhwinter 已提交
50 51 52
    config.mutable_const_lr()->set_learning_rate(0.1);

    ParameterOptimizer* opt =
D
dzhwinter 已提交
53
        ParameterOptimizer::Create(config.SerializeAsString());
D
dzhwinter 已提交
54 55 56 57
    opts.push_back(opt);
  }

  void create_adam() {
D
dzhwinter 已提交
58
    config.set_optimizer(OptimizerConfig::Adam);
D
dzhwinter 已提交
59 60 61 62
    config.mutable_adam()->set_beta_1(0.9);
    config.mutable_adam()->set_beta_2(0.1);
    config.mutable_adam()->set_epsilon(1e-3);
    config.mutable_adam()->set_decay(0.0);
D
dzhwinter 已提交
63
    config.set_lr_policy(OptimizerConfig::ConstLr);
D
dzhwinter 已提交
64 65
    config.mutable_const_lr()->set_learning_rate(0.1);
    ParameterOptimizer* opt =
D
dzhwinter 已提交
66
        ParameterOptimizer::Create(config.SerializeAsString());
D
dzhwinter 已提交
67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90
    opts.push_back(opt);
  }
  void test_set_weight() {
    Tensor* p = fill_n_Tensor(size);
    for (size_t i = 0; i < opts.size(); ++i) {
      opts[i]->set_weight(p);
    }
  }

  void test_get_weight() {
    Tensor* p = fix_n_Tensor(size);
    for (size_t i = 0; i < opts.size(); ++i) {
      opts[i]->set_weight(p);
    }
    for (size_t i = 0; i < opts.size(); ++i) {
      real* newp = (real*)opts[i]->get_weight();
      for (size_t j = 0; j < size; ++j) {
        EXPECT_EQ(newp[j], (*p)[j]);
      }
    }
  }
  void test_update() {
    Tensor* g = fix_n_Tensor(size);
    for (size_t i = 0; i < opts.size(); ++i) {
D
dzhwinter 已提交
91
      opts[i]->Update(g);
D
dzhwinter 已提交
92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109
    }
  }

private:
  std::vector<ParameterOptimizer*> opts;
  OptimizerConfig config;
};

TEST_F(OptimizerTest, test_set_get_weight) {
  test_set_weight();
  test_get_weight();
}
TEST_F(OptimizerTest, test_update) { test_update(); }

int main(int argc, char** argv) {
  testing::InitGoogleTest(&argc, argv);
  return RUN_ALL_TESTS();
}