未验证 提交 15c68e58 编写于 作者: R rical730 提交者: GitHub

Add UnitTest for DeepES (#231)

* Add UnitTest for DeepES

* update Dockerfile

* only unittest

* remove the commented lines and empty functions.

* update CMakefile

* add optimizer unittest and torch demo test

* use optimizer_factory and add fail unittest case

* remove duplicate files, and simplify unittest

* teamcity test

* delete test case

* remove comment line

* shield progress display of downloading and unzip libtorch

* remove deepes torch demo test

* delete useless comment
上级 f46ad361
......@@ -18,3 +18,11 @@
FROM parl/parl-test:cuda9.0-cudnn7-v2
COPY ./requirements.txt /root/
RUN apt-get install -y libgflags-dev libgoogle-glog-dev libomp-dev unzip
RUN apt-get install -y libgtest-dev && cd /usr/src/gtest && mkdir build \
&& cd build && cmake .. && make && cp libgtest*.a /usr/local/lib
RUN wget https://github.com/google/protobuf/releases/download/v2.4.1/protobuf-2.4.1.tar.gz \
&& tar -zxvf protobuf-2.4.1.tar.gz \
&& cd protobuf-2.4.1 && ./configure && make && make install
......@@ -134,6 +134,19 @@ EOF
rm -rf ${REPO_ROOT}/build
}
function run_deepes_test {
cd ${REPO_ROOT}/deepes
cat <<EOF
========================================
Running DeepES test...
========================================
EOF
sh test/run_test.sh
rm -rf ${REPO_ROOT}/deepes/build
rm -rf ${REPO_ROOT}/deepes/libtorch
}
function main() {
set -e
local CMD=$1
......@@ -158,7 +171,7 @@ function main() {
echo ========================================
pip install .
if [ \( $env == "py27" -o $env == "py36" -o $env == "py37" \) ]
then
then
pip install -r .teamcity/requirements.txt
run_test_with_cpu $env
run_test_with_cpu $env "DIS_TESTING_SERIALLY"
......@@ -176,6 +189,7 @@ function main() {
/root/miniconda3/envs/empty_env/bin/pip install .
run_import_test
run_docs_test
run_deepes_test
;;
*)
print_usage
......
......@@ -63,7 +63,7 @@ int main(int argc, char* argv[]) {
std::vector<float> noisy_rewards(ITER, 0.0f);
noisy_keys.resize(ITER);
for (int epoch = 0; epoch < 1000; ++epoch) {
for (int epoch = 0; epoch < 100; ++epoch) {
#pragma omp parallel for schedule(dynamic, 1)
for (int i = 0; i < ITER; ++i) {
auto sampling_agent = sampling_agents[i];
......
......@@ -27,7 +27,7 @@ namespace DeepES{
Args:
reward: an array of rewards
*/
void compute_centered_ranks(std::vector<float> &reward) ;
bool compute_centered_ranks(std::vector<float> &reward);
/* Load a protobuf-based configuration from the file.
* Args:
......
......@@ -23,8 +23,11 @@ elif [ $1 = "torch" ]; then
#---------------libtorch-------------#
if [ ! -d "./libtorch" ];then
echo "Cannot find the torch library: ./libtorch"
echo "Please put the torch libraray to current folder according the instruction in README"
exit 1
echo "Downloading Torch library"
wget -q https://download.pytorch.org/libtorch/cpu/libtorch-cxx11-abi-shared-with-deps-1.4.0%2Bcpu.zip
unzip -q libtorch-cxx11-abi-shared-with-deps-1.4.0+cpu.zip
rm -rf libtorch-cxx11-abi-shared-with-deps-1.4.0+cpu.zip
echo "Torch library Downloaded"
fi
FLAGS=" -DWITH_TORCH=ON"
else
......@@ -32,7 +35,7 @@ else
exit 0
fi
#export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH
export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH
#----------------protobuf-------------#
cp ./src/proto/deepes.proto ./
......
......@@ -16,6 +16,7 @@
namespace DeepES{
std::shared_ptr<Optimizer> create_optimizer(const OptimizerConfig& optimizer_config) {
std::shared_ptr<Optimizer> optimizer;
std::string opt_type = optimizer_config.type();
......
......@@ -16,7 +16,7 @@
namespace DeepES {
void compute_centered_ranks(std::vector<float> &reward) {
bool compute_centered_ranks(std::vector<float> &reward) {
std::vector<std::pair<float, int>> reward_index;
float gap = 1.0 / (reward.size() - 1);
float normlized_rank = -0.5;
......@@ -31,6 +31,7 @@ void compute_centered_ranks(std::vector<float> &reward) {
reward[id] = normlized_rank;
normlized_rank += gap;
}
return true;
}
}//namespace
cmake_minimum_required (VERSION 2.6)
project (DeepES)
set(TARGET unit_test_main)
set(CMAKE_CXX_STANDARD 11)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_CXX_EXTENSIONS OFF)
find_package(GTest REQUIRED)
find_package(OpenMP)
if (OPENMP_FOUND)
set (CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${OpenMP_C_FLAGS}")
set (CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OpenMP_CXX_FLAGS}")
set (CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${OpenMP_EXE_LINKER_FLAGS}")
endif()
file(GLOB core_src "../src/*.cc" "../src/*.cpp" "../benchmark/*.cc")
file(GLOB test_src "../test/src/*.cc")
include_directories("../include")
include_directories("../benchmark")
include_directories("../test/include")
add_executable(${TARGET} "unit_test.cc" ${core_src} ${test_src} ${lib_src}) # ${demo_src}
target_link_libraries(${TARGET} gflags protobuf pthread glog gtest) # "${TORCH_LIBRARIES}"
#!/bin/bash
export LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH
#----------------protobuf-------------#
cp ./src/proto/deepes.proto ./
protoc deepes.proto --cpp_out ./
mv deepes.pb.h ./include
mv deepes.pb.cc ./src
#----------------build---------------#
rm -rf build
mkdir build
cd build
cmake ../test # -DWITH_TORCH=ON
make -j10
./unit_test_main
// Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "gtest/gtest.h"
#include <vector>
#include "optimizer_factory.h"
#include <memory>
namespace DeepES {
TEST(SGDOptimizersTest, Method_update) {
std::shared_ptr<DeepESConfig> config = std::make_shared<DeepESConfig>();
auto optimizer_config = config->mutable_optimizer();
optimizer_config->set_base_lr(1.0);
optimizer_config->set_type("sgd");
std::shared_ptr<Optimizer> optimizer = create_optimizer(config->optimizer());
float sgd_wei[10] = { 0.0 , 0.0 , 0.04216444, 0.0511456 , 0.04231584, 0.01089015, 0.06569759, 0.00127421,-0.00092832, 0.01128081};
float sgd_grad[10] = {-0.11992419,-0.0 , 0.07681337,-0.06616384, 0.00249889, 0.01158612,-0.3067452 , 0.36048946,-0.15820622,-0.20014143};
float sgd_new[10] = { 0.01199242, 0.0 , 0.0344831 , 0.05776198, 0.04206595, 0.00973154, 0.09637211,-0.03477474, 0.014892306, 0.03129495};
EXPECT_TRUE(optimizer->update(sgd_wei, sgd_grad, 10, "test"));
for (int i = 0; i < 10; ++i) {
EXPECT_FLOAT_EQ(sgd_new[i], sgd_wei[i]) << " i: " << i ;
}
EXPECT_TRUE(optimizer->update(sgd_wei, sgd_grad, 10, "test"));
EXPECT_FALSE(optimizer->update(sgd_wei, sgd_grad, 9, "test"));
}
TEST(AdamOptimizersTest, Method_update) {
std::shared_ptr<DeepESConfig> config = std::make_shared<DeepESConfig>();
auto optimizer_config = config->mutable_optimizer();
optimizer_config->set_base_lr(1.0);
optimizer_config->set_type("adam");
std::shared_ptr<Optimizer> optimizer = create_optimizer(config->optimizer());
float adam_wei[10] = { 0.0 , 0.0 , 0.04216444, 0.0511456 , 0.04231584, 0.01089015, 0.06569759, 0.00127421,-0.00092832, 0.01128081};
float adam_grad[10] = {-0.11992419,-0.0 , 0.07681337,-0.06616384, 0.00249889, 0.01158612,-0.3067452 , 0.36048946,-0.15820622,-0.20014143};
float adam_new[10] = { 0.99999736, 0. ,-0.95783144, 1.05114082,-0.95755763,-0.98908256, 1.06569656,-0.99872491, 0.99906968, 1.01127923};
EXPECT_TRUE(optimizer->update(adam_wei, adam_grad, 10, "test"));
for (int i = 0; i < 10; ++i) {
EXPECT_FLOAT_EQ(adam_new[i], adam_wei[i]) << " i: " << i ;
}
EXPECT_TRUE(optimizer->update(adam_wei, adam_grad, 10, "test"));
EXPECT_FALSE(optimizer->update(adam_wei, adam_grad, 9, "test"));
}
}
// Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "gtest/gtest.h"
#include <vector>
#include "utils.h"
namespace DeepES {
// Tests that the Utils::compute_centered_rank() method.
TEST(UtilsTest, Method_compute_centered_ranks) {
float a[5] = {9.0, 8.0, 7.0, 6.0, 5.0};
std::vector<float> reward_vec(a, a+5);
EXPECT_EQ(compute_centered_ranks(reward_vec), true);
}
}
// Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "gtest/gtest.h"
int main(int argc, char **argv) {
::testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册