提交 70b78b74 编写于 作者: W wangyang59

fix a bug in BatchNormBaseLayer.cpp and add a test for it

上级 e316c41a
...@@ -68,10 +68,10 @@ void BatchNormBaseLayer::calFeatureMapSize() { ...@@ -68,10 +68,10 @@ void BatchNormBaseLayer::calFeatureMapSize() {
} else { } else {
imageH_ = inputLayers_[0]->getOutput().getFrameHeight(); imageH_ = inputLayers_[0]->getOutput().getFrameHeight();
imageW_ = inputLayers_[0]->getOutput().getFrameWidth(); imageW_ = inputLayers_[0]->getOutput().getFrameWidth();
getOutput().setFrameHeight(imageH_);
getOutput().setFrameWidth(imageW_);
} }
imgPixels_ = imageH_ * imageW_; imgPixels_ = imageH_ * imageW_;
getOutput().setFrameHeight(imageH_);
getOutput().setFrameWidth(imageW_);
} }
} // namespace paddle } // namespace paddle
...@@ -39,9 +39,17 @@ add_unittest_without_exec(test_ConvUnify ...@@ -39,9 +39,17 @@ add_unittest_without_exec(test_ConvUnify
test_ConvUnify.cpp test_ConvUnify.cpp
LayerGradUtil.cpp LayerGradUtil.cpp
TestUtil.cpp) TestUtil.cpp)
add_test(NAME test_ConvUnify add_test(NAME test_ConvUnify
COMMAND test_ConvUnify) COMMAND test_ConvUnify)
################# test_BatchNorm #######################
add_unittest_without_exec(test_BatchNorm
test_BatchNorm.cpp
LayerGradUtil.cpp
TestUtil.cpp)
add_test(NAME test_BatchNorm
COMMAND test_BatchNorm)
################## test_Evaluator ####################### ################## test_Evaluator #######################
add_unittest(test_Evaluator add_unittest(test_Evaluator
test_Evaluator.cpp test_Evaluator.cpp
......
/* Copyright (c) 2016 Baidu, Inc. All Rights Reserve.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include <gtest/gtest.h>
#include <vector>
#include <string>
#include "paddle/gserver/layers/DataLayer.h"
#include "ModelConfig.pb.h"
#include "paddle/trainer/Trainer.h"
#include "paddle/utils/GlobalConstants.h"
#include "paddle/gserver/layers/ExpandConvTransLayer.h"
#include "TestUtil.h"
#include "LayerGradUtil.h"
using namespace paddle; // NOLINT
using namespace std; // NOLINT
P_DECLARE_bool(use_gpu);
P_DECLARE_int32(gpu_id);
P_DECLARE_double(checkgrad_eps);
P_DECLARE_bool(thread_local_rand_use_global_seed);
P_DECLARE_bool(prev_batch_state);
// Test that the convTrans forward is the same as conv backward
TEST(Layer, batchNorm) {
FLAGS_use_gpu = false;
TestConfig configBN;
const int CHANNELS = 6272;
const int IMG_SIZE = 1;
configBN.layerConfig.set_type("batch_norm");
configBN.layerConfig.set_name("bn");
configBN.layerConfig.set_size(CHANNELS * IMG_SIZE * IMG_SIZE);
configBN.layerConfig.set_active_type("relu");
configBN.biasSize = CHANNELS;
configBN.inputDefs.push_back({INPUT_DATA, "layer_0",
/* dim= */ IMG_SIZE * IMG_SIZE * CHANNELS,
/* paraSize= */ CHANNELS});
configBN.inputDefs.push_back({INPUT_DATA, "layer_1_running_mean",
1, CHANNELS});
configBN.inputDefs.back().isStatic = true;
configBN.inputDefs.push_back({INPUT_DATA, "layer_2_running_var",
1, CHANNELS});
configBN.inputDefs.back().isStatic = true;
LayerInputConfig* input = configBN.layerConfig.add_inputs();
configBN.layerConfig.add_inputs();
configBN.layerConfig.add_inputs();
ImageConfig* img_conf = input->mutable_image_conf();
img_conf->set_channels(CHANNELS);
img_conf->set_img_size(IMG_SIZE);
// Setting up conv-layer config
TestConfig config;
config.biasSize = 64;
config.layerConfig.set_type("exconv");
config.layerConfig.set_num_filters(64);
config.layerConfig.set_partial_sum(1);
config.layerConfig.set_shared_biases(true);
config.inputDefs.push_back({INPUT_DATA, "bn", 6272, 204800});
input = config.layerConfig.add_inputs();
ConvConfig* conv = input->mutable_conv_conf();
conv->set_filter_size(5);
conv->set_filter_size_y(5);
conv->set_channels(128);
conv->set_padding(1);
conv->set_padding_y(1);
conv->set_stride(2);
conv->set_stride_y(2);
conv->set_groups(1);
conv->set_filter_channels(conv->channels() / conv->groups());
conv->set_img_size(7);
conv->set_output_x(3);
config.layerConfig.set_size(conv->output_x() * conv->output_x() *
config.layerConfig.num_filters());
config.layerConfig.set_name("conv");
// data layer initialize
std::vector<DataLayerPtr> dataLayers;
LayerMap layerMap;
vector<Argument> datas;
initDataLayer(configBN, &dataLayers, &datas, &layerMap, "batch_norm",
100, false, false);
// test layer initialize
std::vector<ParameterPtr> parameters;
LayerPtr bnLayer;
initTestLayer(configBN, &layerMap, &parameters, &bnLayer);
std::vector<ParameterPtr> parameters2;
LayerPtr convLayer;
initTestLayer(config, &layerMap, &parameters2, &convLayer);
// Set convLayer outputGrad as convTransLayer input value
bnLayer->forward(PASS_GC);
convLayer->forward(PASS_GC);
CHECK_EQ(convLayer->getOutputValue()->getHeight(), 100);
CHECK_EQ(convLayer->getOutputValue()->getWidth(), 576);
}
int main(int argc, char** argv) {
testing::InitGoogleTest(&argc, argv);
initMain(argc, argv);
FLAGS_thread_local_rand_use_global_seed = true;
srand(1);
return RUN_ALL_TESTS();
}
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册