diff --git a/paddle/api/GradientMachine.cpp b/paddle/api/GradientMachine.cpp index a64e70a6bd563a20f1da3549aeaa76454dfe3219..538ca2999f8f05afc45ac2d2f526133c8024f066 100644 --- a/paddle/api/GradientMachine.cpp +++ b/paddle/api/GradientMachine.cpp @@ -148,7 +148,8 @@ Arguments* GradientMachine::getLayerOutput(const std::string& layerName) const throw(UnsupportError) { auto nn = m->machine; if (nn) { - return Arguments::createByPaddleArgument(&nn->getLayerOutput(layerName)); + auto arg = nn->getLayerOutput(layerName); + return Arguments::createByPaddleArgument(&arg); } else { throw UnsupportError(); } diff --git a/paddle/api/Trainer.cpp b/paddle/api/Trainer.cpp index 29cf2aa4501de35d970c4d416d62343d39bb234a..84e4ca054abb0100a02c8a40e31c49c17684ef40 100644 --- a/paddle/api/Trainer.cpp +++ b/paddle/api/Trainer.cpp @@ -134,7 +134,8 @@ void Trainer::finishTestPeriod() { m->finishTestPeriod(); } Arguments* Trainer::getLayerOutput(const std::string& layerName) const { auto nn = this->m->getGradientMachine(); CHECK(nn) << "trainerInternal_.getGradientMachine() is not NeuralNetwork"; - return Arguments::createByPaddleArgument(&nn->getLayerOutput(layerName)); + auto arg = nn->getLayerOutput(layerName); + return Arguments::createByPaddleArgument(&arg); } void Trainer::forwardOneBatch(size_t batchSize) { diff --git a/paddle/gserver/gradientmachines/GradientMachine.h b/paddle/gserver/gradientmachines/GradientMachine.h index ae39783c6b7ea64a5306d08f665ed7e53f2b58ca..bc2f2f8563526aa045ea89f15152ee2d639b5774 100644 --- a/paddle/gserver/gradientmachines/GradientMachine.h +++ b/paddle/gserver/gradientmachines/GradientMachine.h @@ -134,7 +134,7 @@ public: backward(callback); } - virtual const Argument& getLayerOutput(const std::string& layerName) { + virtual Argument getLayerOutput(const std::string& layerName) { return *((Argument*)nullptr); } diff --git a/paddle/gserver/gradientmachines/MultiGradientMachine.cpp b/paddle/gserver/gradientmachines/MultiGradientMachine.cpp index 6b11b0155e990d6eb740f744ffa3f3a086e3e1ed..123273f916f5d33e2543d9f5f28573c3b5761e28 100644 --- a/paddle/gserver/gradientmachines/MultiGradientMachine.cpp +++ b/paddle/gserver/gradientmachines/MultiGradientMachine.cpp @@ -282,8 +282,7 @@ void MultiGradientMachine::forwardBackward(const std::vector& inArgs, backwardImp(callback); } -const Argument& MultiGradientMachine::getLayerOutput( - const std::string& layerName) { +Argument MultiGradientMachine::getLayerOutput(const std::string& layerName) { std::vector args; args.reserve(threads_.size()); diff --git a/paddle/gserver/gradientmachines/MultiGradientMachine.h b/paddle/gserver/gradientmachines/MultiGradientMachine.h index 9083230afd69e278b0343be1785edd01df3ad016..838a52b5153af63adbce5788824b9f541f22517c 100644 --- a/paddle/gserver/gradientmachines/MultiGradientMachine.h +++ b/paddle/gserver/gradientmachines/MultiGradientMachine.h @@ -189,7 +189,7 @@ public: PassType passType, const UpdateCallback& callback); - virtual const Argument& getLayerOutput(const std::string& layerName); + virtual Argument getLayerOutput(const std::string& layerName); virtual void onPassEnd(); diff --git a/paddle/gserver/gradientmachines/NeuralNetwork.cpp b/paddle/gserver/gradientmachines/NeuralNetwork.cpp index d1afde40e1f81eea6b34c1e2c33fe4851d0074d6..2f2aa24aaca33a0c361a81025d6dca564e3672a1 100644 --- a/paddle/gserver/gradientmachines/NeuralNetwork.cpp +++ b/paddle/gserver/gradientmachines/NeuralNetwork.cpp @@ -293,7 +293,7 @@ void NeuralNetwork::backward(const UpdateCallback& callback) { } } -const Argument& NeuralNetwork::getLayerOutput(const std::string& layerName) { +Argument NeuralNetwork::getLayerOutput(const std::string& layerName) { return getLayer(layerName)->getOutput(); } diff --git a/paddle/gserver/gradientmachines/NeuralNetwork.h b/paddle/gserver/gradientmachines/NeuralNetwork.h index b4dc38e31b6fae1dd721b4ec1f5cdd2e5fe50e61..e7b6c438407e7eab6eab1f6ed496f35caa9f2177 100644 --- a/paddle/gserver/gradientmachines/NeuralNetwork.h +++ b/paddle/gserver/gradientmachines/NeuralNetwork.h @@ -87,7 +87,7 @@ public: virtual void backward(const UpdateCallback& callback = nullptr); - virtual const Argument& getLayerOutput(const std::string& layerName); + virtual Argument getLayerOutput(const std::string& layerName); const LayerPtr& getLayer(const std::string& layerName) const { auto it = layerMap_.find(layerName); diff --git a/paddle/gserver/layers/CosSimVecMatLayer.cpp b/paddle/gserver/layers/CosSimVecMatLayer.cpp index aabafd473aa1e06a767d48d4c49b7b8662e992e7..0f887d8adfa053e8fe88ac4fa4e2a9ba08ac07b5 100644 --- a/paddle/gserver/layers/CosSimVecMatLayer.cpp +++ b/paddle/gserver/layers/CosSimVecMatLayer.cpp @@ -112,7 +112,7 @@ bool CosSimVecMatLayer::init(const LayerMap& layerMap, void CosSimVecMatLayer::forward(PassType passType) { Layer::forward(passType); - CHECK_EQ(forward_.size(), 1) << "Only one forward function needed"; + CHECK_EQ(forward_.size(), 1UL) << "Only one forward function needed"; MatrixPtr inV0 = getInputValue(0); MatrixPtr inV1 = getInputValue(1); @@ -145,7 +145,7 @@ void CosSimVecMatLayer::forward(PassType passType) { } void CosSimVecMatLayer::backward(const UpdateCallback& callback) { - CHECK_EQ(backward_.size(), 1) << "Only one forward function needed"; + CHECK_EQ(backward_.size(), 1UL) << "Only one forward function needed"; MatrixPtr inV0 = getInputValue(0); MatrixPtr inV1 = getInputValue(1);