diff --git a/paddle/gserver/layers/MKLDNNFcLayer.h b/paddle/gserver/layers/MKLDNNFcLayer.h index dffae27d7ba00b04eb2dbc52738dffd3245b15b4..7954852a23f81d36d5fb0ae6a19768f419886fb1 100644 --- a/paddle/gserver/layers/MKLDNNFcLayer.h +++ b/paddle/gserver/layers/MKLDNNFcLayer.h @@ -29,7 +29,10 @@ protected: // input layer size, can not be change after init size_t iLayerSize_; // == ic * ih * iw + // if has already init the weight bool hasInitedWgt_; + + // if input layer has image size info (ih>1 && iw>1) bool hasSpatial_; // fc weight and bias diff --git a/paddle/gserver/tests/MKLDNNTester.cpp b/paddle/gserver/tests/MKLDNNTester.cpp index d91e4ed60c94522aa7efe35d7c93467f7364d406..99c8c4948c9b05ad15d1217ebb70026bbd48453f 100644 --- a/paddle/gserver/tests/MKLDNNTester.cpp +++ b/paddle/gserver/tests/MKLDNNTester.cpp @@ -123,7 +123,8 @@ void MKLDNNTester::checkForward() { } void MKLDNNTester::checkBackwardData() { - const bool isBN = dnnLayer_->getType() == "mkldnn_batch_norm"; + // TODO(TJ): uncomment me when batch norm ready + // const bool isBN = dnnLayer_->getType() == "mkldnn_batch_norm"; for (size_t i = 0; i < dataLayers_[DNN].size(); ++i) { const MatrixPtr& dnnDiff = dataLayers_[DNN][i]->getOutputGrad(); const MatrixPtr& refDiff = dataLayers_[REF][i]->getOutputGrad(); @@ -134,10 +135,11 @@ void MKLDNNTester::checkBackwardData() { double delta = compareMatrix(dnnDiff, refDiff); EXPECT_LE(fabs(delta), eps_); - if (isBN) { - // the other two inputs in batch norm are for moving mean and var - break; - } + // TODO(TJ): uncomment me when batch norm ready + // if (isBN) { + // // the other two inputs in batch norm are for moving mean and var + // break; + // } } } diff --git a/paddle/gserver/tests/MKLDNNTester.h b/paddle/gserver/tests/MKLDNNTester.h index d21f92d426ed00406e9a5f715883490344f3bf0b..522eeaf24b1949abac057a1e59e9977610be23c0 100644 --- a/paddle/gserver/tests/MKLDNNTester.h +++ b/paddle/gserver/tests/MKLDNNTester.h @@ -27,9 +27,9 @@ namespace paddle { */ class MKLDNNTester { enum { - DNN = 0, - REF = 1, - NUM = 2, + DNN = 0, // MKLDNN layer + REF = 1, // Reference layer + NUM = 2, // Number of total }; protected: @@ -107,7 +107,8 @@ private: * Get delta percent * if many(>failRate) wrong(abs(dnn-ref)/abs(ref)>thres) points return the * max(diff/ref) - * else return sum(abs(a-b)) / sum(abs(b)) should smaller than eps + * else return sum(abs(a-b)) / sum(abs(b)) + * The return value should smaller than eps when passing. */ double getDelta(const real* d1, const real* d2,