diff --git a/paddle/gserver/layers/CRFLayer.cpp b/paddle/gserver/layers/CRFLayer.cpp index 16cc7384884f7f12f5d022afb7bdb6de6c7fc2c9..0b544420097e9150f8489731b6379dea633e992c 100644 --- a/paddle/gserver/layers/CRFLayer.cpp +++ b/paddle/gserver/layers/CRFLayer.cpp @@ -42,8 +42,7 @@ bool CRFLayer::init(const LayerMap& layerMap, CHECK_EQ(parameters_[0]->getSize(), numClasses_ * (numClasses_ + 2)); parameter_ = parameters_[0]; - weight_.reset( - new Weight(numClasses_ + 2, numClasses_, parameter_)); + weight_.reset(new Weight(numClasses_ + 2, numClasses_, parameter_)); // We don't need sequenceStartPositions because each sample of output_ is // for the cost of one sequence. @@ -95,16 +94,18 @@ void CRFLayer::backward(const UpdateCallback& callback) { for (int i = 0; i < numSequences; ++i) { crfs_[i].backward(output.value->getData() + numClasses_ * starts[i], label.ids->getData() + starts[i], - starts[i + 1] - starts[i], needWGrad); - real instanceWeight = weightLayer_ ? - getInputValue(*weightLayer_)->getElement(i, 0) : real(1.0f); + starts[i + 1] - starts[i], + needWGrad); + real instanceWeight = weightLayer_ + ? getInputValue(*weightLayer_)->getElement(i, 0) + : real(1.0f); instanceWeight *= coeff_; MatrixPtr grad = output.grad->subRowMatrix(starts[i], starts[i + 1]); grad->add(*crfs_[i].getXGrad(), real(1.0f), instanceWeight); if (needWGrad) { - weight_->getWGrad()->add(*crfs_[i].getWGrad(), real(1.0f), - instanceWeight); + weight_->getWGrad()->add( + *crfs_[i].getWGrad(), real(1.0f), instanceWeight); } } diff --git a/paddle/gserver/layers/CRFLayer.h b/paddle/gserver/layers/CRFLayer.h index 000c48e2d55d1352eb38d648b5c55c00fb4c3050..3c7192913fa76d79ee7b194dcd600f06d5e10a74 100644 --- a/paddle/gserver/layers/CRFLayer.h +++ b/paddle/gserver/layers/CRFLayer.h @@ -37,9 +37,9 @@ protected: size_t numClasses_; ParameterPtr parameter_; std::vector crfs_; - LayerPtr weightLayer_; // weight for each sequence + LayerPtr weightLayer_; // weight for each sequence std::unique_ptr weight_; // parameters - real coeff_; // weight for the layer + real coeff_; // weight for the layer }; } // namespace paddle diff --git a/paddle/gserver/tests/test_CRFLayerGrad.cpp b/paddle/gserver/tests/test_CRFLayerGrad.cpp index ad5149306caa8cfc7c60f09e20c54a43e52904bc..6985977aed920aaa4b94d4bed2114f54caec5584 100644 --- a/paddle/gserver/tests/test_CRFLayerGrad.cpp +++ b/paddle/gserver/tests/test_CRFLayerGrad.cpp @@ -21,13 +21,10 @@ limitations under the License. */ #include "TestUtil.h" #include "LayerGradUtil.h" -using namespace paddle; // NOLINT +using namespace paddle; // NOLINT -P_DECLARE_bool(use_gpu); -P_DECLARE_int32(gpu_id); -P_DECLARE_double(checkgrad_eps); -P_DECLARE_bool(thread_local_rand_use_global_seed); -P_DECLARE_bool(prev_batch_state); +DECLARE_int32(gpu_id); +DECLARE_bool(thread_local_rand_use_global_seed); static inline bool getNextSequence(std::vector& seq, int numClasses) { for (auto& v : seq) { @@ -96,8 +93,8 @@ TEST(CRFLayer, cost) { real diff = fabs(trueCost - cost); diff /= fabs(cost) < fabs(trueCost) ? fabs(cost) : fabs(trueCost); - VLOG(1) << "cost=" << cost << " trueCost=" << trueCost - << " diff=" << diff << std::endl; + VLOG(1) << "cost=" << cost << " trueCost=" << trueCost << " diff=" << diff + << std::endl; if (typeid(real) == typeid(double)) { // NOLINT EXPECT_LE(diff, 1e-10); } else { @@ -107,9 +104,7 @@ TEST(CRFLayer, cost) { } } -inline real epsilon() { - return typeid(real) == typeid(double) ? 1e-10 : 0.06; -} +inline real epsilon() { return typeid(real) == typeid(double) ? 1e-10 : 0.06; } TestConfig initTestConfig(size_t numClasses, bool withWeight) { TestConfig config; @@ -117,16 +112,17 @@ TestConfig initTestConfig(size_t numClasses, bool withWeight) { config.layerConfig.set_size(numClasses); config.biasSize = 0; - config.inputDefs.push_back({INPUT_SEQUENCE_DATA, "layer_0", - numClasses, numClasses * (numClasses + 2)}); + config.inputDefs.push_back({INPUT_SEQUENCE_DATA, + "layer_0", + numClasses, + numClasses * (numClasses + 2)}); config.layerConfig.add_inputs(); - config.inputDefs.push_back({INPUT_SEQUENCE_LABEL, "layer_label", - numClasses, 0}); + config.inputDefs.push_back( + {INPUT_SEQUENCE_LABEL, "layer_label", numClasses, 0}); config.layerConfig.add_inputs(); if (withWeight) { - config.inputDefs.push_back({INPUT_DENSE_DIM_DATA, "layer_weight", - 1, 0}); + config.inputDefs.push_back({INPUT_DENSE_DIM_DATA, "layer_weight", 1, 0}); config.layerConfig.add_inputs(); }