提交 1733a777 编写于 作者: Y Yu Yang

Using ApproximatelyEquals for trainer config unittest.

* Make float equal works well.
  * compare 0.01 and 0.009999999999 will return true.
上级 f8ec510a
...@@ -4,7 +4,11 @@ add_test(NAME layers_test ...@@ -4,7 +4,11 @@ add_test(NAME layers_test
python ${PROJ_ROOT}/python/paddle/trainer_config_helpers/tests/layers_test.py python ${PROJ_ROOT}/python/paddle/trainer_config_helpers/tests/layers_test.py
WORKING_DIRECTORY ${PROJ_ROOT}/python/paddle) WORKING_DIRECTORY ${PROJ_ROOT}/python/paddle)
add_paddle_exe(protobuf_equal
ProtobufEqualMain.cpp)
add_test(NAME test_layerHelpers add_test(NAME test_layerHelpers
COMMAND COMMAND
${PROJ_ROOT}/python/paddle/trainer_config_helpers/tests/configs/run_tests.sh ${PROJ_ROOT}/python/paddle/trainer_config_helpers/tests/configs/run_tests.sh
${CMAKE_CURRENT_BINARY_DIR}/protobuf_equal
) )
/* Copyright (c) 2016 Baidu, Inc. All Rights Reserve.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include <google/protobuf/text_format.h>
#include <google/protobuf/util/message_differencer.h>
#include <fstream>
#include <iostream>
#include "TrainerConfig.pb.h"
bool loadPb(google::protobuf::Message* conf, const std::string& filename) {
std::ifstream fin;
fin.open(filename.c_str());
if (fin.is_open()) {
std::string str((std::istreambuf_iterator<char>(fin)),
std::istreambuf_iterator<char>());
bool ok = google::protobuf::TextFormat::ParseFromString(str, conf);
fin.close();
return ok;
} else {
return false;
}
}
int main(int argc, char** argv) {
std::unique_ptr<google::protobuf::Message> config1;
std::unique_ptr<google::protobuf::Message> config2;
if (argc == 3) {
config1.reset(new paddle::ModelConfig());
config2.reset(new paddle::ModelConfig());
} else if (argc == 4) {
config1.reset(new paddle::TrainerConfig());
config2.reset(new paddle::TrainerConfig());
}
if (!config1 || !config2) {
return 1;
} else if (!loadPb(config1.get(), argv[1])) {
return 2;
} else if (!loadPb(config2.get(), argv[2])) {
return 3;
} else {
if (google::protobuf::util::MessageDifferencer::ApproximatelyEquals(
*config1, *config2)) {
return 0;
} else {
return 4;
}
}
}
#!/bin/bash
export configs=(test_fc layer_activations projections test_print_layer
test_sequence_pooling test_lstmemory_layer test_grumemory_layer
last_first_seq test_expand_layer test_ntm_layers test_hsigmoid
img_layers img_trans_layers util_layers simple_rnn_layers unused_layers test_cost_layers
test_rnn_group shared_fc shared_lstm test_cost_layers_with_weight
test_spp_layer test_bilinear_interp test_maxout test_bi_grumemory math_ops)
export whole_configs=(test_split_datasource)
...@@ -5,24 +5,16 @@ cd `dirname $0` ...@@ -5,24 +5,16 @@ cd `dirname $0`
export PYTHONPATH=$PWD/../../../../ export PYTHONPATH=$PWD/../../../../
protostr=$PWD/protostr protostr=$PWD/protostr
. file_list.sh
configs=(test_fc layer_activations projections test_print_layer
test_sequence_pooling test_lstmemory_layer test_grumemory_layer
last_first_seq test_expand_layer test_ntm_layers test_hsigmoid
img_layers img_trans_layers util_layers simple_rnn_layers unused_layers test_cost_layers
test_rnn_group shared_fc shared_lstm test_cost_layers_with_weight
test_spp_layer test_bilinear_interp test_maxout test_bi_grumemory math_ops)
whole_configs=(test_split_datasource)
for conf in ${configs[*]} for conf in ${configs[*]}
do do
echo "Generating " $conf echo "Generating " $conf
python -m paddle.utils.dump_config $conf.py > $protostr/$conf.protostr.unitest python -m paddle.utils.dump_config $conf.py > $protostr/$conf.protostr.unittest
done done
for conf in ${whole_configs[*]} for conf in ${whole_configs[*]}
do do
echo "Generating " $conf echo "Generating " $conf
python -m paddle.utils.dump_config $conf.py "" --whole > $protostr/$conf.protostr.unitest python -m paddle.utils.dump_config $conf.py "" --whole > $protostr/$conf.protostr.unittest
done done
...@@ -9,9 +9,27 @@ files=`ls $protostr | grep -v "unitest"` ...@@ -9,9 +9,27 @@ files=`ls $protostr | grep -v "unitest"`
./generate_protostr.sh ./generate_protostr.sh
for file in $files . ./file_list.sh
do
base_protostr=$protostr/$file if [ -z $1 ]; then
new_protostr=$protostr/$file.unitest for file in $files
diff $base_protostr $new_protostr -u do
done base_protostr=$protostr/$file
new_protostr=$protostr/$file.unittest
diff $base_protostr $new_protostr -u
done
else
for file in ${configs[*]}
do
if ! $1 $protostr/$file.protostr $protostr/$file.protostr.unittest; then
diff $protostr/$file.protostr $protostr/$file.protostr.unittest -u
fi
done
for file in ${whole_configs[*]}
do
if ! $1 $protostr/$file.protostr $protostr/$file.protostr.unittest --whole; then
diff $protostr/$file.protostr $protostr/$file.protostr.unittest -u
fi
done
fi
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册