提交 a765c7c3 编写于 作者: Y Yu Yang 提交者: GitHub

Merge pull request #1013 from reyoung/feature/add_sum_cost_in_args

Add some functions to PaddleAPI.h
...@@ -137,6 +137,10 @@ void Arguments::setSlotSequenceDim(size_t idx, IVector* vec) throw(RangeError) { ...@@ -137,6 +137,10 @@ void Arguments::setSlotSequenceDim(size_t idx, IVector* vec) throw(RangeError) {
a.cpuSequenceDims = m->cast<paddle::IVector>(vec->getSharedPtr()); a.cpuSequenceDims = m->cast<paddle::IVector>(vec->getSharedPtr());
} }
float Arguments::sumCosts() const {
return paddle::Argument::sumCosts(m->outputs);
}
int64_t Arguments::getBatchSize(size_t idx) const throw(RangeError) { int64_t Arguments::getBatchSize(size_t idx) const throw(RangeError) {
auto& a = m->getArg(idx); auto& a = m->getArg(idx);
return a.getBatchSize(); return a.getBatchSize();
......
...@@ -450,6 +450,8 @@ public: ...@@ -450,6 +450,8 @@ public:
IVector* vec) throw(RangeError); IVector* vec) throw(RangeError);
void setSlotSequenceDim(size_t idx, IVector* vec) throw(RangeError); void setSlotSequenceDim(size_t idx, IVector* vec) throw(RangeError);
float sumCosts() const;
private: private:
static Arguments* createByPaddleArgumentVector(void* ptr); static Arguments* createByPaddleArgumentVector(void* ptr);
void* getInternalArgumentsPtr() const; void* getInternalArgumentsPtr() const;
...@@ -546,6 +548,10 @@ public: ...@@ -546,6 +548,10 @@ public:
ParameterConfig* getConfig(); ParameterConfig* getConfig();
void setValueUpdated(); void setValueUpdated();
bool save(const std::string& filename) const;
bool load(const std::string& filename) const;
size_t getSize() const; size_t getSize() const;
private: private:
......
...@@ -57,4 +57,12 @@ size_t Parameter::getID() const { return m->getPtr()->getID(); } ...@@ -57,4 +57,12 @@ size_t Parameter::getID() const { return m->getPtr()->getID(); }
void Parameter::setValueUpdated() { m->getPtr()->setValueUpdated(); } void Parameter::setValueUpdated() { m->getPtr()->setValueUpdated(); }
bool Parameter::save(const std::string& filename) const {
return m->getPtr()->save(filename);
}
bool Parameter::load(const std::string& filename) const {
return m->getPtr()->load(filename);
}
size_t Parameter::getSize() const { return m->getPtr()->getSize(); } size_t Parameter::getSize() const { return m->getPtr()->getSize(); }
...@@ -22,6 +22,8 @@ class TestArguments(unittest.TestCase): ...@@ -22,6 +22,8 @@ class TestArguments(unittest.TestCase):
args = swig_paddle.Arguments.createArguments(1) args = swig_paddle.Arguments.createArguments(1)
args.setSlotValue(0, m) args.setSlotValue(0, m)
self.assertAlmostEqual(27.0, args.sumCosts())
mat = args.getSlotValue(0) mat = args.getSlotValue(0)
assert isinstance(mat, swig_paddle.Matrix) assert isinstance(mat, swig_paddle.Matrix)
np_mat = mat.toNumpyMatInplace() np_mat = mat.toNumpyMatInplace()
......
...@@ -45,6 +45,7 @@ class TestGradientMachine(unittest.TestCase): ...@@ -45,6 +45,7 @@ class TestGradientMachine(unittest.TestCase):
assert isinstance(val, swig_paddle.Vector) assert isinstance(val, swig_paddle.Vector)
arr = numpy.full((len(val), ), 0.1, dtype="float32") arr = numpy.full((len(val), ), 0.1, dtype="float32")
val.copyFromNumpyArray(arr) val.copyFromNumpyArray(arr)
self.assertTrue(param.save(param.getName()))
param_config = param.getConfig().toProto() param_config = param.getConfig().toProto()
assert isinstance(param_config, assert isinstance(param_config,
paddle.proto.ParameterConfig_pb2.ParameterConfig) paddle.proto.ParameterConfig_pb2.ParameterConfig)
...@@ -92,6 +93,9 @@ class TestGradientMachine(unittest.TestCase): ...@@ -92,6 +93,9 @@ class TestGradientMachine(unittest.TestCase):
self.assertTrue(self.isCalled) self.assertTrue(self.isCalled)
for param in machine.getParameters():
self.assertTrue(param.load(param.getName()))
def test_train_one_pass(self): def test_train_one_pass(self):
conf_file_path = './testTrainConfig.py' conf_file_path = './testTrainConfig.py'
trainer_config = swig_paddle.TrainerConfig.createFromTrainerConfigFile( trainer_config = swig_paddle.TrainerConfig.createFromTrainerConfigFile(
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册