未验证 提交 31ee4fa5 编写于 作者: R rical730 提交者: GitHub

add gaussian sampling method with noise table (#241)

* add gaussian sampling method with noise table

* add table sampling test and agent param_size test

* add model param_size test

* get param size of paddle demo

* rename gaussian_table_sampling to cached_gaussian_sampling

* add unittest to sampling method and uniform indentation to 2 spaces
Co-authored-by: NTomorrowIsAnOtherDay <2466956298@qq.com>
上级 65554cd6
...@@ -84,13 +84,11 @@ public: ...@@ -84,13 +84,11 @@ public:
if (!done) { if (!done) {
reward = 1.0; reward = 1.0;
} } else if (steps_beyond_done == -1) {
else if (steps_beyond_done == -1) {
// Pole just fell! // Pole just fell!
steps_beyond_done = 0; steps_beyond_done = 0;
reward = 0; reward = 0;
} } else {
else {
if (steps_beyond_done == 0) { if (steps_beyond_done == 0) {
assert(false); // Can't do this assert(false); // Can't do this
} }
......
seed: 1024 seed: 1024
gaussian_sampling { gaussian_sampling {
std: 0.5 std: 0.5
cached: true
cache_size : 100000
} }
optimizer { optimizer {
type: "Adam" type: "Adam"
......
...@@ -12,11 +12,14 @@ ...@@ -12,11 +12,14 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#include <map>
#include "optimizer.h"
#ifndef ADAM_OPTIMIZER_H #ifndef ADAM_OPTIMIZER_H
#define ADAM_OPTIMIZER_H #define ADAM_OPTIMIZER_H
#include <map>
#include <cmath>
#include "optimizer.h"
namespace DeepES{ namespace DeepES{
/*@brief AdamOptimizer. /*@brief AdamOptimizer.
......
// Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#ifndef CACHED_GAUSSIAN_SAMPLING_H
#define CACHED_GAUSSIAN_SAMPLING_H
#include <random>
#include <stdio.h>
#include <stdlib.h>
#include <time.h>
#include "sampling_method.h"
#include "utils.h"
#include <glog/logging.h>
namespace DeepES{
class CachedGaussianSampling: public SamplingMethod {
public:
CachedGaussianSampling();
~CachedGaussianSampling();
/*Initialize the sampling algorithm given the config with the protobuf format.
*DeepES library uses only one configuration file for all sampling algorithms.
A defalut configuration file can be found at: . // TODO: where?
Usally you won't have to modify the configuration items of other algorithms
if you are not using them.
*/
bool load_config(const DeepESConfig& config);
/*@brief generate Gaussian noise and the related key.
*
*@Args:
* key: a unique key associated with the sampled noise.
* noise: a pointer pointed to the memory that stores the noise
* size: the number of float to be sampled.
*
*@return:
* success: generate Gaussian successfully or not.
*/
bool sampling(int* key, float* noise, int64_t size);
/*@brief reconstruct the Gaussion noise given the key.
* This function is often used for updating the neuron network parameters in the offline environment.
*
*@Args:
* key: a unique key associated with the sampled noise.
* noise: a pointer pointed to the memory that stores the noise
* size: the number of float to be sampled.
*
*@return:
* success: reconstruct Gaussian successfully or not.
*/
bool resampling(int key, float* noise, int64_t size);
private:
float _std;
int _cache_size;
float* _noise_cache = nullptr;
bool _create_noise_cache();
};
}
#endif
...@@ -12,9 +12,15 @@ ...@@ -12,9 +12,15 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
// //
#ifndef _GAUSSIAN_SAMPLING_H #ifndef GAUSSIAN_SAMPLING_H
#define _GAUSSIAN_SAMPLING_H #define GAUSSIAN_SAMPLING_H
#include <random>
#include <stdio.h>
#include <stdlib.h>
#include <time.h>
#include "sampling_method.h" #include "sampling_method.h"
#include "utils.h"
namespace DeepES{ namespace DeepES{
...@@ -24,24 +30,26 @@ public: ...@@ -24,24 +30,26 @@ public:
GaussianSampling() {} GaussianSampling() {}
~GaussianSampling() {} ~GaussianSampling() {}
/*Initialize the sampling algorithm given the config with the protobuf format. /*Initialize the sampling algorithm given the config with the protobuf format.
*DeepES library uses only one configuration file for all sampling algorithms. A defalut *DeepES library uses only one configuration file for all sampling algorithms.
configuration file can be found at: . Usally you won't have to modify the configuration items of other algorithms A defalut configuration file can be found at: . // TODO: where?
Usally you won't have to modify the configuration items of other algorithms
if you are not using them. if you are not using them.
*/ */
void load_config(const DeepESConfig& config); bool load_config(const DeepESConfig& config);
/*@brief add Gaussian noise to the parameter. /*@brief generate Gaussian noise and the related key.
* *
*@Args: *@Args:
* param: a pointer pointed to the memory of the parameter. * key: a unique key associated with the sampled noise.
* size: the number of floats of the parameter. * noise: a pointer pointed to the memory that stores the noise
* noisy_param: The pointer pointed to updated parameter. * size: the number of float to be sampled.
* *
*@return: *@return:
* success: load configuration successfully or not. * success: generate Gaussian successfully or not.
*/ */
int sampling(float* noise, int64_t size); bool sampling(int* key, float* noise, int64_t size);
/*@brief reconstruct the Gaussion noise given the key. /*@brief reconstruct the Gaussion noise given the key.
* This function is often used for updating the neuron network parameters in the offline environment. * This function is often used for updating the neuron network parameters in the offline environment.
...@@ -50,6 +58,9 @@ public: ...@@ -50,6 +58,9 @@ public:
* key: a unique key associated with the sampled noise. * key: a unique key associated with the sampled noise.
* noise: a pointer pointed to the memory that stores the noise * noise: a pointer pointed to the memory that stores the noise
* size: the number of float to be sampled. * size: the number of float to be sampled.
*
*@return:
* success: reconstruct Gaussian successfully or not.
*/ */
bool resampling(int key, float* noise, int64_t size); bool resampling(int key, float* noise, int64_t size);
......
...@@ -12,11 +12,13 @@ ...@@ -12,11 +12,13 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#ifndef OPTIMIZER_H
#define OPTIMIZER_H
#include <map> #include <map>
#include <glog/logging.h> #include <glog/logging.h>
#ifndef OPTIMIZER_H
#define OPTIMIZER_H
namespace DeepES{ namespace DeepES{
/*@brief Optimizer. Base class for optimizers. /*@brief Optimizer. Base class for optimizers.
......
...@@ -21,6 +21,7 @@ ...@@ -21,6 +21,7 @@
#include "sgd_optimizer.h" #include "sgd_optimizer.h"
#include "adam_optimizer.h" #include "adam_optimizer.h"
#include "deepes.pb.h" #include "deepes.pb.h"
#include <glog/logging.h>
namespace DeepES{ namespace DeepES{
/* @brief: create an optimizer according to the configuration" /* @brief: create an optimizer according to the configuration"
......
...@@ -17,8 +17,8 @@ ...@@ -17,8 +17,8 @@
#include "paddle_api.h" #include "paddle_api.h"
#include "optimizer_factory.h" #include "optimizer_factory.h"
#include "sampling_factory.h"
#include "utils.h" #include "utils.h"
#include "gaussian_sampling.h"
#include "deepes.pb.h" #include "deepes.pb.h"
#include <vector> #include <vector>
...@@ -74,6 +74,11 @@ class ESAgent { ...@@ -74,6 +74,11 @@ class ESAgent {
*/ */
std::shared_ptr<PaddlePredictor> get_predictor(); std::shared_ptr<PaddlePredictor> get_predictor();
// get param size of model
int64_t param_size() {
return _param_size;
}
protected: protected:
......
// Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef SAMPLING_FACTORY_H
#define SAMPLING_FACTORY_H
#include <algorithm>
#include <memory>
#include "sampling_method.h"
#include "gaussian_sampling.h"
#include "cached_gaussian_sampling.h"
#include "deepes.pb.h"
#include <glog/logging.h>
namespace DeepES{
/* @brief: create an sampling_method according to the configuration"
* @args:
* config: configuration for the DeepES
*
*/
std::shared_ptr<SamplingMethod> create_sampling_method(const DeepESConfig& Config);
}//namespace
#endif
...@@ -12,8 +12,8 @@ ...@@ -12,8 +12,8 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#ifndef _SAMPLING_METHOD_H #ifndef SAMPLING_METHOD_H
#define _SAMPLING_METHOD_H #define SAMPLING_METHOD_H
#include <string> #include <string>
#include <random> #include <random>
...@@ -39,23 +39,24 @@ public: ...@@ -39,23 +39,24 @@ public:
virtual ~SamplingMethod() {} virtual ~SamplingMethod() {}
/*Initialize the sampling algorithm given the config with the protobuf format. /*Initialize the sampling algorithm given the config with the protobuf format.
*DeepES library uses only one configuration file for all sampling algorithms. A defalut *DeepES library uses only one configuration file for all sampling algorithms.
configuration file can be found at: . Usally you won't have to modify the configuration items of other algorithms A defalut configuration file can be found at: . // TODO: where?
Usally you won't have to modify the configuration items of other algorithms
if you are not using them. if you are not using them.
*/ */
virtual void load_config(const DeepESConfig& config)=0; virtual bool load_config(const DeepESConfig& config)=0;
/*@brief add Gaussian noise to the parameter. /*@brief generate Gaussian noise and the related key.
* *
*@Args: *@Args:
* param: a pointer pointed to the memory of the parameter. * key: a unique key associated with the sampled noise.
* size: the number of floats of the parameter. * noise: a pointer pointed to the memory that stores the noise
* noisy_param: The pointer pointed to updated parameter. * size: the number of float to be sampled.
* *
*@return: *@return:
* success: load configuration successfully or not. * success: generate Gaussian successfully or not.
*/ */
virtual int sampling(float* noise, int64_t size)=0; virtual bool sampling(int* key, float* noise, int64_t size)=0;
/*@brief reconstruct the Gaussion noise given the key. /*@brief reconstruct the Gaussion noise given the key.
* This function is often used for updating the neuron network parameters in the offline environment. * This function is often used for updating the neuron network parameters in the offline environment.
...@@ -64,6 +65,9 @@ public: ...@@ -64,6 +65,9 @@ public:
* key: a unique key associated with the sampled noise. * key: a unique key associated with the sampled noise.
* noise: a pointer pointed to the memory that stores the noise * noise: a pointer pointed to the memory that stores the noise
* size: the number of float to be sampled. * size: the number of float to be sampled.
*
*@return:
* success: reconstruct Gaussian successfully or not.
*/ */
virtual bool resampling(int key, float* noise, int64_t size)=0; virtual bool resampling(int key, float* noise, int64_t size)=0;
......
...@@ -12,11 +12,13 @@ ...@@ -12,11 +12,13 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#ifndef SGD_OPTIMIZER_H
#define SGD_OPTIMIZER_H
#include <map> #include <map>
#include <cmath>
#include "optimizer.h" #include "optimizer.h"
#ifndef SGD_OPTIMIZER_H
#define SGD_OPTIMIZER_H
namespace DeepES{ namespace DeepES{
/*@brief SGDOptimizer. /*@brief SGDOptimizer.
......
...@@ -18,8 +18,8 @@ ...@@ -18,8 +18,8 @@
#include <memory> #include <memory>
#include <string> #include <string>
#include "optimizer_factory.h" #include "optimizer_factory.h"
#include "sampling_factory.h"
#include "utils.h" #include "utils.h"
#include "gaussian_sampling.h"
#include "deepes.pb.h" #include "deepes.pb.h"
namespace DeepES{ namespace DeepES{
...@@ -47,8 +47,7 @@ public: ...@@ -47,8 +47,7 @@ public:
_is_sampling_agent = false; _is_sampling_agent = false;
_config = std::make_shared<DeepESConfig>(); _config = std::make_shared<DeepESConfig>();
load_proto_conf(config_path, *_config); load_proto_conf(config_path, *_config);
_sampling_method = std::make_shared<GaussianSampling>(); _sampling_method = create_sampling_method(*_config);
_sampling_method->load_config(*_config);
_optimizer = create_optimizer(_config->optimizer()); _optimizer = create_optimizer(_config->optimizer());
// Origin agent can't be used to sample, so keep it same with _model for evaluating. // Origin agent can't be used to sample, so keep it same with _model for evaluating.
_sampling_model = model; _sampling_model = model;
...@@ -111,6 +110,7 @@ public: ...@@ -111,6 +110,7 @@ public:
int key = noisy_info[i].key(0); int key = noisy_info[i].key(0);
float reward = noisy_rewards[i]; float reward = noisy_rewards[i];
bool success = _sampling_method->resampling(key, _noise, _param_size); bool success = _sampling_method->resampling(key, _noise, _param_size);
CHECK(success) << "[DeepES] resampling error occurs at sample: " << i;
for (int64_t j = 0; j < _param_size; ++j) { for (int64_t j = 0; j < _param_size; ++j) {
_neg_gradients[j] += _noise[j] * reward; _neg_gradients[j] += _noise[j] * reward;
} }
...@@ -134,14 +134,18 @@ public: ...@@ -134,14 +134,18 @@ public:
// copied parameters = original parameters + noise // copied parameters = original parameters + noise
bool add_noise(SamplingInfo& sampling_info) { bool add_noise(SamplingInfo& sampling_info) {
bool success = true;
if (!_is_sampling_agent) { if (!_is_sampling_agent) {
LOG(ERROR) << "[DeepES] Original ESAgent cannot call add_noise function, please use cloned ESAgent."; LOG(ERROR) << "[DeepES] Original ESAgent cannot call add_noise function, please use cloned ESAgent.";
return false; success = false;
return success;
} }
auto sampling_params = _sampling_model->named_parameters(); auto sampling_params = _sampling_model->named_parameters();
auto params = _model->named_parameters(); auto params = _model->named_parameters();
int key = _sampling_method->sampling(_noise, _param_size); int key = 0;
success = _sampling_method->sampling(&key, _noise, _param_size);
CHECK(success) << "[DeepES] sampling error occurs while add_noise.";
sampling_info.add_key(key); sampling_info.add_key(key);
int64_t counter = 0; int64_t counter = 0;
for (auto& param: sampling_params) { for (auto& param: sampling_params) {
...@@ -155,9 +159,13 @@ public: ...@@ -155,9 +159,13 @@ public:
} }
counter += tensor.size(0); counter += tensor.size(0);
} }
return true; return success;
} }
// get param size of model
int64_t param_size() {
return _param_size;
}
private: private:
......
...@@ -14,6 +14,7 @@ ...@@ -14,6 +14,7 @@
#ifndef UTILS_H #ifndef UTILS_H
#define UTILS_H #define UTILS_H
#include <string> #include <string>
#include <fstream> #include <fstream>
#include <algorithm> #include <algorithm>
......
...@@ -12,7 +12,6 @@ ...@@ -12,7 +12,6 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#include <cmath>
#include "adam_optimizer.h" #include "adam_optimizer.h"
namespace DeepES { namespace DeepES {
......
// Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "cached_gaussian_sampling.h"
namespace DeepES{
CachedGaussianSampling::CachedGaussianSampling() {}
CachedGaussianSampling::~CachedGaussianSampling() {
delete[] _noise_cache;
}
bool CachedGaussianSampling::load_config(const DeepESConfig& config) {
bool success = true;
_std = config.gaussian_sampling().std();
success = set_seed(config.seed());
CHECK(success) << "[DeepES] Fail to set seed while load config.";
_cache_size = config.gaussian_sampling().cache_size();
_noise_cache = new float [_cache_size];
memset(_noise_cache, 0, _cache_size * sizeof(float));
success = _create_noise_cache();
CHECK(success) << "[DeepES] Fail to create noise_cache while load config.";
return success;
}
bool CachedGaussianSampling::sampling(int* key, float* noise, int64_t size) {
bool success = true;
if (_noise_cache == nullptr) {
LOG(ERROR) << "[DeepES] Please use load_config() first.";
success = false;
return success;
}
if (noise == nullptr) {
LOG(ERROR) << "[DeepES] Input noise array cannot be nullptr.";
success = false;
return success;
}
if ((size >= _cache_size) || (size < 0)) {
LOG(ERROR) << "[DeepES] Input size " << size << " is out of bounds [0, " << _cache_size << "), cache_size: " << _cache_size;
success = false;
return success;
}
int rand_key = rand();
std::default_random_engine generator(rand_key);
std::uniform_int_distribution<unsigned int> uniform(0, _cache_size - size);
int index = uniform(generator);
*key = index;
for (int64_t i = 0; i < size; ++i) {
*(noise + i) = *(_noise_cache + index + i);
}
return success;
}
bool CachedGaussianSampling::resampling(int key, float* noise, int64_t size) {
bool success = true;
if (_noise_cache == nullptr) {
LOG(ERROR) << "[DeepES] Please use load_config() first.";
success = false;
return success;
}
if (noise == nullptr) {
LOG(ERROR) << "[DeepES] Input noise array cannot be nullptr.";
success = false;
return success;
}
if ((size >= _cache_size) || (size < 0)) {
LOG(ERROR) << "[DeepES] Input size " << size << " is out of bounds [0, " << _cache_size << "), cache_size: " << _cache_size;
success = false;
return success;
}
if ((key > _cache_size - size) || (key < 0)) {
LOG(ERROR) << "[DeepES] Resampling key " << key << " is out of bounds [0, " << _cache_size - size << "], cache_size: " << _cache_size << ", size: " << size;
success = false;
return success;
}
for (int64_t i = 0; i < size; ++i) {
*(noise + i) = *(_noise_cache + key + i);
}
return success;
}
bool CachedGaussianSampling::_create_noise_cache() {
std::default_random_engine generator(_seed);
std::normal_distribution<float> norm;
for (int64_t i = 0; i < _cache_size; ++i) {
*(_noise_cache + i) = norm(generator) * _std;
}
return true;
}
}
...@@ -12,36 +12,40 @@ ...@@ -12,36 +12,40 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#include <random>
#include <stdio.h>
#include <stdlib.h>
#include <time.h>
#include "gaussian_sampling.h" #include "gaussian_sampling.h"
#include "utils.h"
namespace DeepES{ namespace DeepES{
void GaussianSampling::load_config(const DeepESConfig& config) { bool GaussianSampling::load_config(const DeepESConfig& config) {
bool success = true;
_std = config.gaussian_sampling().std(); _std = config.gaussian_sampling().std();
set_seed(config.seed()); success = set_seed(config.seed());
return success;
} }
int GaussianSampling::sampling(float* noise, int64_t size) { bool GaussianSampling::sampling(int* key, float* noise, int64_t size) {
int key = rand(); bool success = true;
std::default_random_engine generator(key); if (noise == nullptr) {
LOG(ERROR) << "[DeepES] Input noise array cannot be nullptr.";
success = false;
return success;
}
int rand_key = rand();
*key = rand_key;
std::default_random_engine generator(rand_key);
std::normal_distribution<float> norm; std::normal_distribution<float> norm;
for (int64_t i = 0; i < size; ++i) { for (int64_t i = 0; i < size; ++i) {
*(noise + i) = norm(generator) * _std; *(noise + i) = norm(generator) * _std;
} }
return key; return success;
} }
bool GaussianSampling::resampling(int key, float* noise, int64_t size) { bool GaussianSampling::resampling(int key, float* noise, int64_t size) {
bool success = true; bool success = true;
if (noise == nullptr) { if (noise == nullptr) {
LOG(ERROR) << "[DeepES] Input noise array cannot be nullptr.";
success = false; success = false;
} } else {
else {
std::default_random_engine generator(key); std::default_random_engine generator(key);
std::normal_distribution<float> norm; std::normal_distribution<float> norm;
for (int64_t i = 0; i < size; ++i) { for (int64_t i = 0; i < size; ++i) {
......
...@@ -24,13 +24,13 @@ std::shared_ptr<Optimizer> create_optimizer(const OptimizerConfig& optimizer_con ...@@ -24,13 +24,13 @@ std::shared_ptr<Optimizer> create_optimizer(const OptimizerConfig& optimizer_con
if (opt_type == "sgd") { if (opt_type == "sgd") {
optimizer = std::make_shared<SGDOptimizer>(optimizer_config.base_lr(), \ optimizer = std::make_shared<SGDOptimizer>(optimizer_config.base_lr(), \
optimizer_config.momentum()); optimizer_config.momentum());
}else if (opt_type == "adam") { } else if (opt_type == "adam") {
optimizer = std::make_shared<AdamOptimizer>(optimizer_config.base_lr(), \ optimizer = std::make_shared<AdamOptimizer>(optimizer_config.base_lr(), \
optimizer_config.beta1(), \ optimizer_config.beta1(), \
optimizer_config.beta2(), \ optimizer_config.beta2(), \
optimizer_config.epsilon()); optimizer_config.epsilon());
}else { } else {
// TODO: NotImplementedError LOG(ERROR) << "type of OptimizerConfig must be SGD or Adam."; // NotImplementedError
} }
return optimizer; return optimizer;
} }
......
...@@ -52,8 +52,7 @@ ESAgent::ESAgent(const std::string& model_dir, const std::string& config_path) { ...@@ -52,8 +52,7 @@ ESAgent::ESAgent(const std::string& model_dir, const std::string& config_path) {
_config = std::make_shared<DeepESConfig>(); _config = std::make_shared<DeepESConfig>();
load_proto_conf(config_path, *_config); load_proto_conf(config_path, *_config);
_sampling_method = std::make_shared<GaussianSampling>(); _sampling_method = create_sampling_method(*_config);
_sampling_method->load_config(*_config);
_optimizer = create_optimizer(_config->optimizer()); _optimizer = create_optimizer(_config->optimizer());
...@@ -101,6 +100,7 @@ bool ESAgent::update( ...@@ -101,6 +100,7 @@ bool ESAgent::update(
int key = noisy_info[i].key(0); int key = noisy_info[i].key(0);
float reward = noisy_rewards[i]; float reward = noisy_rewards[i];
bool success = _sampling_method->resampling(key, _noise, _param_size); bool success = _sampling_method->resampling(key, _noise, _param_size);
CHECK(success) << "[DeepES] resampling error occurs at sample: " << i;
for (int64_t j = 0; j < _param_size; ++j) { for (int64_t j = 0; j < _param_size; ++j) {
_neg_gradients[j] += _noise[j] * reward; _neg_gradients[j] += _noise[j] * reward;
} }
...@@ -123,12 +123,16 @@ bool ESAgent::update( ...@@ -123,12 +123,16 @@ bool ESAgent::update(
} }
bool ESAgent::add_noise(SamplingInfo& sampling_info) { bool ESAgent::add_noise(SamplingInfo& sampling_info) {
bool success = true;
if (!_is_sampling_agent) { if (!_is_sampling_agent) {
LOG(ERROR) << "[DeepES] Original ESAgent cannot call add_noise function, please use cloned ESAgent."; LOG(ERROR) << "[DeepES] Original ESAgent cannot call add_noise function, please use cloned ESAgent.";
return false; success = false;
return success;
} }
int key = _sampling_method->sampling(_noise, _param_size); int key = 0;
success = _sampling_method->sampling(&key, _noise, _param_size);
CHECK(success) << "[DeepES] sampling error occurs while add_noise.";
int model_iter_id = _config->async_es().model_iter_id(); int model_iter_id = _config->async_es().model_iter_id();
sampling_info.add_key(key); sampling_info.add_key(key);
sampling_info.set_model_iter_id(model_iter_id); sampling_info.set_model_iter_id(model_iter_id);
...@@ -144,7 +148,7 @@ bool ESAgent::add_noise(SamplingInfo& sampling_info) { ...@@ -144,7 +148,7 @@ bool ESAgent::add_noise(SamplingInfo& sampling_info) {
counter += tensor_size; counter += tensor_size;
} }
return true; return success;
} }
std::shared_ptr<PaddlePredictor> ESAgent::get_predictor() { std::shared_ptr<PaddlePredictor> ESAgent::get_predictor() {
......
...@@ -29,6 +29,8 @@ message DeepESConfig { ...@@ -29,6 +29,8 @@ message DeepESConfig {
message GaussianSamplingConfig { message GaussianSamplingConfig {
optional float std = 1 [default = 1.0]; optional float std = 1 [default = 1.0];
optional bool cached = 2 [default = false];
optional int32 cache_size = 3 [default = 100000];
} }
message OptimizerConfig{ message OptimizerConfig{
......
// Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "sampling_factory.h"
namespace DeepES{
std::shared_ptr<SamplingMethod> create_sampling_method(const DeepESConfig& config) {
std::shared_ptr<SamplingMethod> sampling_method;
bool cached = config.gaussian_sampling().cached();
if (cached) {
sampling_method = std::make_shared<CachedGaussianSampling>();
} else {
sampling_method = std::make_shared<GaussianSampling>();
}
bool success = sampling_method->load_config(config);
if(success) {
return sampling_method;
} else {
LOG(ERROR) << "[DeepES] Fail to create sampling_method";
return nullptr;
}
}
}//namespace
...@@ -12,7 +12,6 @@ ...@@ -12,7 +12,6 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
#include <cmath>
#include "sgd_optimizer.h" #include "sgd_optimizer.h"
namespace DeepES { namespace DeepES {
......
...@@ -21,15 +21,14 @@ struct Model : public torch::nn::Module{ ...@@ -21,15 +21,14 @@ struct Model : public torch::nn::Module{
Model() = delete; Model() = delete;
Model(const int obs_dim, const int act_dim) { Model(const int obs_dim, const int act_dim, const int h1_size, const int h2_size) {
_obs_dim = obs_dim; _obs_dim = obs_dim;
_act_dim = act_dim; _act_dim = act_dim;
int hid1_size = 30; _h1_size = h1_size;
int hid2_size = 15; _h2_size = h2_size;
fc1 = register_module("fc1", torch::nn::Linear(obs_dim, hid1_size)); fc1 = register_module("fc1", torch::nn::Linear(obs_dim, h1_size));
fc2 = register_module("fc2", torch::nn::Linear(hid1_size, hid2_size)); fc2 = register_module("fc2", torch::nn::Linear(h1_size, h2_size));
fc3 = register_module("fc3", torch::nn::Linear(hid2_size, act_dim)); fc3 = register_module("fc3", torch::nn::Linear(h2_size, act_dim));
} }
torch::Tensor forward(torch::Tensor x) { torch::Tensor forward(torch::Tensor x) {
...@@ -41,7 +40,7 @@ struct Model : public torch::nn::Module{ ...@@ -41,7 +40,7 @@ struct Model : public torch::nn::Module{
} }
std::shared_ptr<Model> clone() { std::shared_ptr<Model> clone() {
std::shared_ptr<Model> model = std::make_shared<Model>(_obs_dim, _act_dim); std::shared_ptr<Model> model = std::make_shared<Model>(_obs_dim, _act_dim, _h1_size, _h2_size);
std::vector<torch::Tensor> parameters1 = parameters(); std::vector<torch::Tensor> parameters1 = parameters();
std::vector<torch::Tensor> parameters2 = model->parameters(); std::vector<torch::Tensor> parameters2 = model->parameters();
for (int i = 0; i < parameters1.size(); ++i) { for (int i = 0; i < parameters1.size(); ++i) {
...@@ -58,6 +57,8 @@ struct Model : public torch::nn::Module{ ...@@ -58,6 +57,8 @@ struct Model : public torch::nn::Module{
int _act_dim; int _act_dim;
int _obs_dim; int _obs_dim;
int _h1_size;
int _h2_size;
torch::nn::Linear fc1{nullptr}, fc2{nullptr}, fc3{nullptr}; torch::nn::Linear fc1{nullptr}, fc2{nullptr}, fc3{nullptr};
}; };
......
seed : 1024
gaussian_sampling {
std: 0.005
cached: true
cache_size : 100000
}
optimizer {
type: "Adam",
base_lr: 0.005,
momentum: 0.9,
beta1: 0.9,
beta2: 0.999,
epsilon: 1e-8,
}
...@@ -2,6 +2,7 @@ seed : 1024 ...@@ -2,6 +2,7 @@ seed : 1024
gaussian_sampling { gaussian_sampling {
std: 0.005 std: 0.005
cached: false
} }
optimizer { optimizer {
......
...@@ -30,12 +30,12 @@ TEST(SGDOptimizersTest, Method_update) { ...@@ -30,12 +30,12 @@ TEST(SGDOptimizersTest, Method_update) {
float sgd_grad[10] = {-0.11992419,-0.0 , 0.07681337,-0.06616384, 0.00249889, 0.01158612,-0.3067452 , 0.36048946,-0.15820622,-0.20014143}; float sgd_grad[10] = {-0.11992419,-0.0 , 0.07681337,-0.06616384, 0.00249889, 0.01158612,-0.3067452 , 0.36048946,-0.15820622,-0.20014143};
float sgd_new[10] = { 0.01199242, 0.0 , 0.0344831 , 0.05776198, 0.04206595, 0.00973154, 0.09637211,-0.03477474, 0.014892306, 0.03129495}; float sgd_new[10] = { 0.01199242, 0.0 , 0.0344831 , 0.05776198, 0.04206595, 0.00973154, 0.09637211,-0.03477474, 0.014892306, 0.03129495};
EXPECT_TRUE(optimizer->update(sgd_wei, sgd_grad, 10, "test")); EXPECT_TRUE(optimizer->update(sgd_wei, sgd_grad, 10, "fc1"));
for (int i = 0; i < 10; ++i) { for (int i = 0; i < 10; ++i) {
EXPECT_FLOAT_EQ(sgd_new[i], sgd_wei[i]) << " i: " << i ; EXPECT_FLOAT_EQ(sgd_new[i], sgd_wei[i]) << " i: " << i ;
} }
EXPECT_TRUE(optimizer->update(sgd_wei, sgd_grad, 10, "test")); EXPECT_TRUE(optimizer->update(sgd_wei, sgd_grad, 10, "fc1"));
EXPECT_FALSE(optimizer->update(sgd_wei, sgd_grad, 9, "test")); EXPECT_FALSE(optimizer->update(sgd_wei, sgd_grad, 9, "fc1"));
} }
TEST(AdamOptimizersTest, Method_update) { TEST(AdamOptimizersTest, Method_update) {
...@@ -48,12 +48,12 @@ TEST(AdamOptimizersTest, Method_update) { ...@@ -48,12 +48,12 @@ TEST(AdamOptimizersTest, Method_update) {
float adam_grad[10] = {-0.11992419,-0.0 , 0.07681337,-0.06616384, 0.00249889, 0.01158612,-0.3067452 , 0.36048946,-0.15820622,-0.20014143}; float adam_grad[10] = {-0.11992419,-0.0 , 0.07681337,-0.06616384, 0.00249889, 0.01158612,-0.3067452 , 0.36048946,-0.15820622,-0.20014143};
float adam_new[10] = { 0.99999736, 0. ,-0.95783144, 1.05114082,-0.95755763,-0.98908256, 1.06569656,-0.99872491, 0.99906968, 1.01127923}; float adam_new[10] = { 0.99999736, 0. ,-0.95783144, 1.05114082,-0.95755763,-0.98908256, 1.06569656,-0.99872491, 0.99906968, 1.01127923};
EXPECT_TRUE(optimizer->update(adam_wei, adam_grad, 10, "test")); EXPECT_TRUE(optimizer->update(adam_wei, adam_grad, 10, "fc1"));
for (int i = 0; i < 10; ++i) { for (int i = 0; i < 10; ++i) {
EXPECT_FLOAT_EQ(adam_new[i], adam_wei[i]) << " i: " << i ; EXPECT_FLOAT_EQ(adam_new[i], adam_wei[i]) << " i: " << i ;
} }
EXPECT_TRUE(optimizer->update(adam_wei, adam_grad, 10, "test")); EXPECT_TRUE(optimizer->update(adam_wei, adam_grad, 10, "fc1"));
EXPECT_FALSE(optimizer->update(adam_wei, adam_grad, 9, "test")); EXPECT_FALSE(optimizer->update(adam_wei, adam_grad, 9, "fc1"));
} }
} // namespace } // namespace
......
// Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "gtest/gtest.h"
#include <vector>
#include "sampling_method.h"
#include "gaussian_sampling.h"
#include "cached_gaussian_sampling.h"
#include <memory>
namespace DeepES {
class SamplingTest : public ::testing::Test {
protected:
void init_sampling_method(bool cached) {
config = std::make_shared<DeepESConfig>();
config->set_seed(1024);
auto sampling_config = config->mutable_gaussian_sampling();
sampling_config->set_std(1.0);
sampling_config->set_cached(cached);
sampling_config->set_cache_size(cache_size);
if (cached) {
sampler = std::make_shared<CachedGaussianSampling>();
} else {
sampler = std::make_shared<GaussianSampling>();
}
}
std::shared_ptr<SamplingMethod> sampler;
std::shared_ptr<DeepESConfig> config;
float array[3] = {1.0, 2.0, 3.0};
int cache_size = 100; // default cache_size 100
int key = 0;
};
TEST_F(SamplingTest, GaussianSampling_load_config) {
init_sampling_method(false);
EXPECT_TRUE(sampler->load_config(*config));
}
TEST_F(SamplingTest, GaussianSampling_sampling) {
init_sampling_method(false);
sampler->load_config(*config);
EXPECT_FALSE(sampler->sampling(&key, nullptr, 0));
EXPECT_TRUE(sampler->sampling(&key, array, 3));
}
TEST_F(SamplingTest, GaussianSampling_resampling) {
init_sampling_method(false);
sampler->load_config(*config);
EXPECT_FALSE(sampler->resampling(0, nullptr, 0));
EXPECT_TRUE(sampler->resampling(0, array, 3));
}
TEST_F(SamplingTest, CachedGaussianSampling_load_config) {
init_sampling_method(true);
EXPECT_TRUE(sampler->load_config(*config));
}
TEST_F(SamplingTest, CachedGaussianSampling_sampling) {
init_sampling_method(true);
EXPECT_FALSE(sampler->sampling(&key, array, 0));
sampler->load_config(*config);
EXPECT_FALSE(sampler->sampling(&key, nullptr, 0));
EXPECT_FALSE(sampler->sampling(&key, array, -1));
EXPECT_FALSE(sampler->sampling(&key, array, cache_size));
EXPECT_TRUE(sampler->sampling(&key, array, 0));
EXPECT_TRUE(sampler->sampling(&key, array, 3));
}
TEST_F(SamplingTest, CachedGaussianSampling_resampling) {
init_sampling_method(true);
EXPECT_FALSE(sampler->resampling(0, array, 0));
sampler->load_config(*config);
EXPECT_FALSE(sampler->resampling(0, nullptr, 0));
EXPECT_FALSE(sampler->resampling(0, array, -1));
EXPECT_FALSE(sampler->resampling(0, array, cache_size));
EXPECT_TRUE(sampler->resampling(0, array, 0));
EXPECT_TRUE(sampler->resampling(0, array, 1));
EXPECT_TRUE(sampler->resampling(0, array, 2));
EXPECT_FALSE(sampler->resampling(-1, array, 3));
EXPECT_TRUE(sampler->resampling(0, array, 3));
EXPECT_TRUE(sampler->resampling(1, array, 3));
EXPECT_TRUE(sampler->resampling(2, array, 3));
EXPECT_TRUE(sampler->resampling(cache_size-3, array, 3));
EXPECT_FALSE(sampler->resampling(cache_size-2, array, 3));
EXPECT_FALSE(sampler->resampling(cache_size-1, array, 3));
EXPECT_FALSE(sampler->resampling(cache_size, array, 3));
EXPECT_FALSE(sampler->resampling(cache_size-3, array, cache_size-1));
}
} // namespace
...@@ -57,19 +57,23 @@ protected: ...@@ -57,19 +57,23 @@ protected:
float test_lo = test_loss(); float test_lo = test_loss();
if ( train_lo > test_lo) { if ( train_lo > test_lo) {
return train_lo - test_lo; return train_lo - test_lo;
} } else {
else {
return test_lo - train_lo; return test_lo - train_lo;
} }
} }
void SetUp() override { void init_agent(const int in_dim, const int out_dim, const int h1_size, const int h2_size) {
std::shared_ptr<Model> model = std::make_shared<Model>(in_dim, out_dim, h1_size, h2_size);
agent = std::make_shared<ESAgent<Model>>(model, "../test/prototxt/torch_sin_config.prototxt");
}
void train_agent(std::string config_path) {
std::default_random_engine generator(0); // fix seed std::default_random_engine generator(0); // fix seed
std::uniform_real_distribution<float> uniform(-3.0, 9.0); std::uniform_real_distribution<float> uniform(-3.0, 9.0);
std::normal_distribution<float> norm; std::normal_distribution<float> norm;
for (int i = 0; i < train_data_size; ++i) { for (int i = 0; i < train_data_size; ++i) {
float x_i = uniform(generator); // generate data between [-3, 9] float x_i = uniform(generator); // generate data between [-3, 9]
float y_i = sin(x_i) + norm(generator)*0.05; // noise std 0.05 float y_i = sin(x_i) + norm(generator) * 0.05; // label noise std 0.05
x_list.push_back(x_i); x_list.push_back(x_i);
y_list.push_back(y_i); y_list.push_back(y_i);
} }
...@@ -80,8 +84,8 @@ protected: ...@@ -80,8 +84,8 @@ protected:
test_y_list.push_back(y_i); test_y_list.push_back(y_i);
} }
std::shared_ptr<Model> model = std::make_shared<Model>(1, 1); std::shared_ptr<Model> model = std::make_shared<Model>(1, 1, 10, 5);
agent = std::make_shared<ESAgent<Model>>(model, "../test/torch_sin_config.prototxt"); agent = std::make_shared<ESAgent<Model>>(model, config_path);
// Clone agents to sample (explore). // Clone agents to sample (explore).
std::vector<std::shared_ptr<ESAgent<Model>>> sampling_agents; std::vector<std::shared_ptr<ESAgent<Model>>> sampling_agents;
...@@ -125,12 +129,29 @@ protected: ...@@ -125,12 +129,29 @@ protected:
std::shared_ptr<ESAgent<Model>> agent; std::shared_ptr<ESAgent<Model>> agent;
}; };
TEST_F(TorchDemoTest, TrainingEffectUseNormalSampling) {
train_agent("../test/prototxt/torch_sin_config.prototxt");
EXPECT_LT(train_loss(), 0.05);
EXPECT_LT(test_loss(), 0.05);
EXPECT_LT(train_test_gap(), 0.03);
}
TEST_F(TorchDemoTest, TrainingEffectTest) { TEST_F(TorchDemoTest, TrainingEffectTestUseTableSampling) {
train_agent("../test/prototxt/torch_sin_cached_config.prototxt");
EXPECT_LT(train_loss(), 0.05); EXPECT_LT(train_loss(), 0.05);
EXPECT_LT(test_loss(), 0.05); EXPECT_LT(test_loss(), 0.05);
EXPECT_LT(train_test_gap(), 0.03); EXPECT_LT(train_test_gap(), 0.03);
} }
TEST_F(TorchDemoTest,ParamSizeTest) {
init_agent(1, 1, 10, 5);
EXPECT_EQ(agent->param_size(), 81);
init_agent(2, 3, 10, 5);
EXPECT_EQ(agent->param_size(), 103);
init_agent(1, 1, 1, 1);
EXPECT_EQ(agent->param_size(), 6);
init_agent(100, 2, 256, 64);
EXPECT_EQ(agent->param_size(), 42434);
}
} // namespace } // namespace
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册