提交 af86c3c4 编写于 作者: Q qingqing01 提交者: GitHub

Merge pull request #657 from reyoung/feature/protobuf_equal

Using ApproximatelyEquals for trainer config unittest.
......@@ -12,6 +12,17 @@ include(package)
find_package(SWIG 2.0)
find_package(CUDA QUIET)
find_package(Protobuf REQUIRED)
# Check protobuf library version.
execute_process(COMMAND ${PROTOBUF_PROTOC_EXECUTABLE} --version
OUTPUT_VARIABLE PROTOBUF_VERSION)
string(REPLACE "libprotoc " "" PROTOBUF_VERSION ${PROTOBUF_VERSION})
set(PROTOBUF_3 OFF)
if (${PROTOBUF_VERSION} VERSION_GREATER "3.0.0" OR ${PROTOBUF_VERSION} VERSION_EQUAL "3.0.0")
set(PROTOBUF_3 ON)
endif()
find_package(PythonLibs 2.7 REQUIRED)
find_package(PythonInterp 2.7 REQUIRED)
find_package(ZLIB REQUIRED)
......
execute_process(COMMAND ${PROTOBUF_PROTOC_EXECUTABLE} --version
OUTPUT_VARIABLE PROTOBUF_VERSION)
string(REPLACE "libprotoc " "" PROTOBUF_VERSION ${PROTOBUF_VERSION})
set(PROTOBUF_3 OFF)
if (${PROTOBUF_VERSION} VERSION_GREATER "3.0.0" OR ${PROTOBUF_VERSION} VERSION_EQUAL "3.0.0")
set(PROTOBUF_3 ON)
endif()
set(proto_filenames
DataConfig.proto
DataFormat.proto
......
......@@ -4,7 +4,17 @@ add_test(NAME layers_test
python ${PROJ_ROOT}/python/paddle/trainer_config_helpers/tests/layers_test.py
WORKING_DIRECTORY ${PROJ_ROOT}/python/paddle)
add_test(NAME test_layerHelpers
if (PROTOBUF_3)
add_paddle_exe(protobuf_equal
ProtobufEqualMain.cpp)
add_test(NAME test_layerHelpers
COMMAND
${PROJ_ROOT}/python/paddle/trainer_config_helpers/tests/configs/run_tests.sh
)
${CMAKE_CURRENT_BINARY_DIR}/protobuf_equal
)
else()
add_test(NAME test_layerHelpers
COMMAND
${PROJ_ROOT}/python/paddle/trainer_config_helpers/tests/configs/run_tests.sh
)
endif()
/* Copyright (c) 2016 Baidu, Inc. All Rights Reserve.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include <google/protobuf/text_format.h>
#include <google/protobuf/util/message_differencer.h>
#include <fstream>
#include <iostream>
#include "TrainerConfig.pb.h"
bool loadPb(google::protobuf::Message* conf, const std::string& filename) {
std::ifstream fin;
fin.open(filename.c_str());
if (fin.is_open()) {
std::string str((std::istreambuf_iterator<char>(fin)),
std::istreambuf_iterator<char>());
bool ok = google::protobuf::TextFormat::ParseFromString(str, conf);
fin.close();
return ok;
} else {
return false;
}
}
int main(int argc, char** argv) {
std::unique_ptr<google::protobuf::Message> config1;
std::unique_ptr<google::protobuf::Message> config2;
if (argc == 3) {
config1.reset(new paddle::ModelConfig());
config2.reset(new paddle::ModelConfig());
} else if (argc == 4) {
config1.reset(new paddle::TrainerConfig());
config2.reset(new paddle::TrainerConfig());
}
if (!config1 || !config2) {
return 1;
} else if (!loadPb(config1.get(), argv[1])) {
return 2;
} else if (!loadPb(config2.get(), argv[2])) {
return 3;
} else {
if (google::protobuf::util::MessageDifferencer::ApproximatelyEquals(
*config1, *config2)) {
return 0;
} else {
return 4;
}
}
}
#!/bin/bash
export configs=(test_fc layer_activations projections test_print_layer
test_sequence_pooling test_lstmemory_layer test_grumemory_layer
last_first_seq test_expand_layer test_ntm_layers test_hsigmoid
img_layers img_trans_layers util_layers simple_rnn_layers unused_layers test_cost_layers
test_rnn_group shared_fc shared_lstm test_cost_layers_with_weight
test_spp_layer test_bilinear_interp test_maxout test_bi_grumemory math_ops)
export whole_configs=(test_split_datasource)
......@@ -5,24 +5,16 @@ cd `dirname $0`
export PYTHONPATH=$PWD/../../../../
protostr=$PWD/protostr
configs=(test_fc layer_activations projections test_print_layer
test_sequence_pooling test_lstmemory_layer test_grumemory_layer
last_first_seq test_expand_layer test_ntm_layers test_hsigmoid
img_layers img_trans_layers util_layers simple_rnn_layers unused_layers test_cost_layers
test_rnn_group shared_fc shared_lstm test_cost_layers_with_weight
test_spp_layer test_bilinear_interp test_maxout test_bi_grumemory math_ops)
whole_configs=(test_split_datasource)
. file_list.sh
for conf in ${configs[*]}
do
echo "Generating " $conf
python -m paddle.utils.dump_config $conf.py > $protostr/$conf.protostr.unitest
python -m paddle.utils.dump_config $conf.py > $protostr/$conf.protostr.unittest
done
for conf in ${whole_configs[*]}
do
echo "Generating " $conf
python -m paddle.utils.dump_config $conf.py "" --whole > $protostr/$conf.protostr.unitest
python -m paddle.utils.dump_config $conf.py "" --whole > $protostr/$conf.protostr.unittest
done
......@@ -9,9 +9,27 @@ files=`ls $protostr | grep -v "unitest"`
./generate_protostr.sh
for file in $files
do
. ./file_list.sh
if [ -z $1 ]; then
for file in $files
do
base_protostr=$protostr/$file
new_protostr=$protostr/$file.unitest
new_protostr=$protostr/$file.unittest
diff $base_protostr $new_protostr -u
done
done
else
for file in ${configs[*]}
do
if ! $1 $protostr/$file.protostr $protostr/$file.protostr.unittest; then
diff $protostr/$file.protostr $protostr/$file.protostr.unittest -u
fi
done
for file in ${whole_configs[*]}
do
if ! $1 $protostr/$file.protostr $protostr/$file.protostr.unittest --whole; then
diff $protostr/$file.protostr $protostr/$file.protostr.unittest -u
fi
done
fi
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册