diff --git a/paddle/gserver/gradientmachines/GradientMachine.cpp b/paddle/gserver/gradientmachines/GradientMachine.cpp index 36ca05b919b136c162105cf4f1fb7705ae7ca7f3..3eb87d9b85c8207a23046fdb4bda06ba8185e2a3 100644 --- a/paddle/gserver/gradientmachines/GradientMachine.cpp +++ b/paddle/gserver/gradientmachines/GradientMachine.cpp @@ -60,55 +60,6 @@ GradientMachine* GradientMachine::create( return nullptr; } -GradientMachine* GradientMachine::create(const std::string& modelFile, - DataConfig* dataConfig) { - std::ifstream is(modelFile); - CHECK(is) << "Fail to open " << modelFile; - return create(is, dataConfig); -} - -GradientMachine* GradientMachine::create(std::istream& is, - DataConfig* dataConfig) { - TrainerConfig trainerConfig; - GradientMachine* ret = create(is, &trainerConfig); - if (dataConfig && trainerConfig.has_data_config()) { - *dataConfig = trainerConfig.data_config(); - } - return ret; -} - -GradientMachine* GradientMachine::create(const std::string& modelFile, - TrainerConfig* trainerConfig) { - std::ifstream is(modelFile); - CHECK(is) << "Fail to open " << modelFile; - return create(is, trainerConfig); -} - -GradientMachine* GradientMachine::create(std::istream& is, - TrainerConfig* trainerConfig) { - TrainerConfig trainerConfigTemp; - int64_t size; - CHECK(is.read((char*)&size, sizeof(size))) << "Fail to read "; - std::string buf; - buf.resize(size); - CHECK(is.read(&buf[0], size)) << "Fail to read "; - CHECK(trainerConfigTemp.ParseFromString(buf)) << "Fail to parse config"; - std::unique_ptr machine( - create(trainerConfigTemp.model_config())); - std::vector& parameters = machine->getParameters(); - for (auto& para : parameters) { - para->load(is); - } - - machine->onLoadParameter(); - - if (trainerConfig) { - *trainerConfig = trainerConfigTemp; - } - - return machine.release(); -} - void GradientMachine::saveParameters(const std::string& dir) const { LOG(INFO) << "Saving parameters to " << dir; diff --git a/paddle/gserver/gradientmachines/GradientMachine.h b/paddle/gserver/gradientmachines/GradientMachine.h index 1e35c7e2b8d185e45f33f6287ad4e32ccad2d5a6..0829968d87c5dc7eeb2d1b70c758ff305d89496f 100644 --- a/paddle/gserver/gradientmachines/GradientMachine.h +++ b/paddle/gserver/gradientmachines/GradientMachine.h @@ -89,39 +89,6 @@ public: std::vector{ PARAMETER_VALUE, PARAMETER_GRADIENT, PARAMETER_MOMENTUM}); - /** - * Create a gradient machine from the merged model file. - * The merged model file can be generated using tools/merge_model - * If dataConfig is not null, it will be filled with the DataConfig - * from the TrainerConfig - */ - static GradientMachine* create(const std::string& modelFile, - DataConfig* dataConfig); - - /** - * Create a gradient machine from a stream which contains the merged - * model file. The merged model file can be generated using tools/merge_model - * If dataConfig is not null, it will be filled with the DataConfig - * from the TrainerConfig - */ - static GradientMachine* create(std::istream& is, DataConfig* dataConfig); - - /** - * Create a gradient machine from the merged model file. - * The merged model file can be generated using tools/merge_model - * If trainerConfig is not null, it will be filled with the TrainerConfig - */ - static GradientMachine* create(const std::string& modelFile, - TrainerConfig* trainerConfig); - - /** - * Create a gradient machine from a stream which contains the merged - * model file. The merged model file can be generated using tools/merge_model - * If trainerConfig is not null, it will be filled with the TrainerConfig - */ - static GradientMachine* create(std::istream& is, - TrainerConfig* trainerConfig); - virtual ~GradientMachine() {} /** diff --git a/paddle/trainer/tests/CMakeLists.txt b/paddle/trainer/tests/CMakeLists.txt index 22e07bd0e98a4cd36e6ed5860bcff0d4ae7cb1d2..c5c76a030d9e5f1deed63454b408442954ef5eae 100644 --- a/paddle/trainer/tests/CMakeLists.txt +++ b/paddle/trainer/tests/CMakeLists.txt @@ -1,11 +1,3 @@ -################# test_Prediction ###################### -add_unittest_without_exec(test_Prediction - test_Prediction.cpp) -add_test(NAME test_Prediction - COMMAND ${PROJ_ROOT}/paddle/.set_python_path.sh -d ${PROJ_ROOT}/python - ${CMAKE_CURRENT_BINARY_DIR}/test_Prediction --merger=${CMAKE_CURRENT_BINARY_DIR}/../paddle_merge_model - WORKING_DIRECTORY ${PROJ_ROOT}/paddle/) - ################# test_Compare ############################ add_unittest_without_exec(test_Compare test_Compare.cpp) diff --git a/paddle/trainer/tests/test_Prediction.cpp b/paddle/trainer/tests/test_Prediction.cpp deleted file mode 100644 index 0c79404eee1c0902c5c8e8eefd139da3da584636..0000000000000000000000000000000000000000 --- a/paddle/trainer/tests/test_Prediction.cpp +++ /dev/null @@ -1,174 +0,0 @@ -/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserve. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. */ - -#include - -#include "paddle/trainer/Trainer.h" - -#include - -DECLARE_string(config); -DECLARE_string(config_args); -DEFINE_string(merger, - "./paddle_merge_model", - "path to paddle_merge_model binary"); - -using namespace paddle; // NOLINT -using namespace std; // NOLINT - -static const string& configFile = "trainer/tests/sample_trainer_config.conf"; -static const string& mergedModelFile = "./test_model_file"; -static const string& modelDir = "./test_model_dir"; - -void checkBuffer(real* vec1, real* vec2, size_t len) { - for (size_t i = 0; i < len; i++) { - EXPECT_EQ(vec1[i], vec2[i]) << "vec1:" << vec1[i] << " vec2:" << vec2[i]; - } -} - -void checkParameters(vector A, vector B) { - CHECK_EQ(B.size(), A.size()) << "parameter size not equal"; - for (size_t i = 0; i < A.size(); i++) { - auto vec1 = A[i]->getBuf(PARAMETER_VALUE); - auto vec2 = B[i]->getBuf(PARAMETER_VALUE); - CHECK_EQ(vec1->useGpu_, vec2->useGpu_) << "use gpu not equal"; - CHECK_EQ(vec1->getSize(), vec2->getSize()) << "size not equal"; - - if (vec1->useGpu_ == false) { - checkBuffer(vec1->getData(), vec2->getData(), vec1->getSize()); - } else { - VectorPtr cpuVec1 = Vector::create(vec1->getSize(), false); - VectorPtr cpuVec2 = Vector::create(vec2->getSize(), false); - cpuVec1->copyFrom(*vec1, HPPL_STREAM_DEFAULT); - cpuVec2->copyFrom(*vec2, HPPL_STREAM_DEFAULT); - hl_stream_synchronize(HPPL_STREAM_DEFAULT); - checkBuffer(cpuVec1->getData(), cpuVec2->getData(), cpuVec1->getSize()); - } - } -} - -TEST(GradientMachine, create) { -#ifdef PADDLE_ONLY_CPU - FLAGS_use_gpu = false; -#endif - mkDir(modelDir.c_str()); - FLAGS_config = configFile; - FLAGS_config_args = "with_cost=False"; - auto config = TrainerConfigHelper::createFromFlagConfig(); - - // save model to directory - unique_ptr gradientMachine1( - GradientMachine::create(*config)); - gradientMachine1->saveParameters(modelDir); - Trainer trainer; - trainer.init(config); - ParameterUtil* paramUtil = trainer.getParameterUtilPtr(); - if (paramUtil != NULL) { - paramUtil->saveConfigWithPath(modelDir); - } - - // create a different GradientMachine - unique_ptr gradientMachine2( - GradientMachine::create(*config)); - gradientMachine2->randParameters(); - - // merge config and model to one file - string cmd = FLAGS_merger + " --model_dir=" + modelDir + - " --config_args=with_cost=False" + " --model_file=" + - mergedModelFile; - LOG(INFO) << cmd; - int ret = system(cmd.c_str()); - EXPECT_EQ(0, ret); - if (ret) { - return; - } - - // create GradientMachine from the merged model - DataConfig dataConfig; - unique_ptr gradientMachine3( - GradientMachine::create(mergedModelFile, &dataConfig)); - CHECK(gradientMachine3); - EXPECT_EQ(dataConfig.type(), "simple"); - EXPECT_EQ(dataConfig.feat_dim(), 3); - - // compare the parameters of GradientMachine and GradientMachine3 - std::vector paraMachine1 = gradientMachine1->getParameters(); - std::vector paraMachine3 = gradientMachine3->getParameters(); - checkParameters(paraMachine1, paraMachine3); - - // Test that the GradientMachine created from the merged model - // is same as the orginnal one. - vector inArgs(1); - vector outArgs; - - int inputDim = 3; - int numSamples = 2; - CpuMatrix cpuInput(numSamples, inputDim); - for (int i = 0; i < numSamples; ++i) { - for (int j = 0; j < inputDim; ++j) { - cpuInput.getData()[i * inputDim + j] = - rand() / (real)RAND_MAX; // NOLINT TODO(yuyang): use rand_r - } - } - MatrixPtr input = Matrix::create(numSamples, - inputDim, - /* trans */ false, - FLAGS_use_gpu); - input->copyFrom(cpuInput); - inArgs[0].value = input; - gradientMachine1->forward(inArgs, &outArgs, PASS_TEST); - EXPECT_EQ((size_t)1, outArgs.size()); - - vector outArgs2; - gradientMachine2->forward(inArgs, &outArgs2, PASS_TEST); - CpuMatrix out1(outArgs[0].value->getHeight(), outArgs[0].value->getWidth()); - CpuMatrix out2(outArgs2[0].value->getHeight(), outArgs2[0].value->getWidth()); - out1.copyFrom(*outArgs[0].value); - out2.copyFrom(*outArgs2[0].value); - for (size_t i = 0; i < out1.getHeight() * out1.getWidth(); i++) { - EXPECT_NE(out1.getData()[i], out2.getData()[i]); - } - - gradientMachine3->forward(inArgs, &outArgs2, PASS_TEST); - out2.copyFrom(*outArgs2[0].value); - checkBuffer( - out1.getData(), out2.getData(), out2.getHeight() * out2.getWidth()); - - cmd = " rm -rf " + modelDir + "/*"; - LOG(INFO) << "cmd " << cmd; - ret = system(cmd.c_str()); - EXPECT_EQ(0, ret); - if (ret) { - return; - } - - cmd = " rm -rf " + mergedModelFile; - LOG(INFO) << "cmd " << cmd; - ret = system(cmd.c_str()); - EXPECT_EQ(0, ret); - if (ret) { - return; - } - - // clean up - rmDir(modelDir.c_str()); - remove(mergedModelFile.c_str()); -} - -int main(int argc, char** argv) { - initMain(argc, argv); - initPython(argc, argv); - testing::InitGoogleTest(&argc, argv); - return RUN_ALL_TESTS(); -}