From 6a15698fcf1f998e2676639efd2f77ebc4457c01 Mon Sep 17 00:00:00 2001 From: rensilin Date: Thu, 15 Aug 2019 21:06:56 +0800 Subject: [PATCH] create_programs Change-Id: I0dfc88c0026abca93be948f2dd8bfdfb0a709d1f --- BCLOUD | 19 ++++++++--- .../so => lib}/libpaddle_fluid_avx_mklml.so | Bin .../feed/scripts/create_programs.py | 30 +++++++++++------- .../custom_trainer/feed/scripts/example.py | 6 ++-- .../feed/unit_test/test_create_programs.cc | 8 ++++- .../feed/unit_test/test_datareader_omp.cc | 18 +++++++++-- 6 files changed, 59 insertions(+), 22 deletions(-) rename {paddle/fluid/train/custom_trainer/feed/so => lib}/libpaddle_fluid_avx_mklml.so (100%) diff --git a/BCLOUD b/BCLOUD index 151cbed3..95a53d27 100644 --- a/BCLOUD +++ b/BCLOUD @@ -78,7 +78,6 @@ NEED_OUTPUT("baidu/third-party/mklml") NEED_OUTPUT("baidu/third-party/openmpi") OUTPUT('paddle/fluid/train/custom_trainer/feed/conf', '$OUT') OUTPUT('paddle/fluid/train/custom_trainer/feed/scripts', '$OUT') -OUTPUT('paddle/fluid/train/custom_trainer/feed/so', '$OUT') def UT_FILE(filename): UT_DIR = 'paddle/fluid/train/custom_trainer/feed/unit_test' @@ -92,9 +91,21 @@ CPPFLAGS_STR = '-DHPPL_STUB_FUNC -DLAPACK_FOUND -DPADDLE_DISABLE_PROFILER -DPADD CFLAGS_STR = '-m64 -fPIC -fno-omit-frame-pointer -Werror -Wall -Wextra -Wnon-virtual-dtor -Wdelete-non-virtual-dtor -Wno-unused-parameter -Wno-unused-function -Wno-error=literal-suffix -Wno-error=sign-compare -Wno-error=unused-local-typedefs -Wno-error=maybe-uninitialized -fopenmp -mavx -O0 -DNDEBUG ' CXXFLAGS_STR = '-std=c++11 ' + CFLAGS_STR -Application('feed_trainer', Sources('paddle/fluid/train/custom_trainer/feed/main.cc', custom_trainer_src), CppFlags(CPPFLAGS_STR), CFlags(CFLAGS_STR), CxxFlags(CXXFLAGS_STR), Libs(src_libs=['paddle/fluid/train/custom_trainer/feed/so/libpaddle_fluid_avx_mklml.so']), Libs(module='baidu/third-party/openmpi', libs=['libmpi.so', 'libmpi_cxx.so', 'libopen-pal.so', 'libopen-rte.so'])) +SharedLibrary("paddle_fluid_avx_mklml", PreBuilt(True)) +application_args = [ + CppFlags(CPPFLAGS_STR), + CFlags(CFLAGS_STR), + CxxFlags(CXXFLAGS_STR), + Libs(libs=['libpaddle_fluid_avx_mklml.so']), + Libs(module='baidu/third-party/openmpi', libs=['libmpi.so', 'libmpi_cxx.so', 'libopen-pal.so', 'libopen-rte.so']), +] + +StaticLibrary("feed_trainer", Sources(custom_trainer_src), application_args) + +Application('feed_trainer', Sources('paddle/fluid/train/custom_trainer/feed/main.cc'), WholeArchives("$OUT/lib/libfeed_trainer.a"), application_args) #feed unit test -UT_MAIN = UT_FILE('main.cc') -#UTApplication('unit_test', Sources(UT_MAIN, GLOB(UT_FILE('test_*.cc')), custom_trainer_src), CppFlags(CPPFLAGS_STR), CFlags(CFLAGS_STR), CxxFlags(CXXFLAGS_STR), Libs(src_libs=['paddle/fluid/train/custom_trainer/feed/so/libpaddle_fluid_avx_mklml.so'])) + +# bug: shared librarys can not be found when run on server +UTApplication('unit_test', UTOnServer(False), Sources(UT_FILE('main.cc'), GLOB(UT_FILE('test_*.cc'))), WholeArchives("$OUT/lib/libfeed_trainer.a"), application_args) diff --git a/paddle/fluid/train/custom_trainer/feed/so/libpaddle_fluid_avx_mklml.so b/lib/libpaddle_fluid_avx_mklml.so similarity index 100% rename from paddle/fluid/train/custom_trainer/feed/so/libpaddle_fluid_avx_mklml.so rename to lib/libpaddle_fluid_avx_mklml.so diff --git a/paddle/fluid/train/custom_trainer/feed/scripts/create_programs.py b/paddle/fluid/train/custom_trainer/feed/scripts/create_programs.py index d22e40b3..6f3e16e4 100644 --- a/paddle/fluid/train/custom_trainer/feed/scripts/create_programs.py +++ b/paddle/fluid/train/custom_trainer/feed/scripts/create_programs.py @@ -24,7 +24,7 @@ def inference_warpper(filename): Returns: list: inputs and - Variable: ctr_output + list: outputs """ with open(filename, 'r') as f: @@ -53,17 +53,23 @@ def main(argv): main_program = fluid.Program() startup_program = fluid.Program() with fluid.program_guard(main_program, startup_program): - inputs, ctr_output = inference_warpper(network_build_file) + inputs, outputs = inference_warpper(network_build_file) test_program = main_program.clone(for_test=True) - label_target = fluid.layers.data(name='label', shape=[1], dtype='float32') + labels = list() + losses = list() + for output in outputs: + label = fluid.layers.data(name='label_' + output.name, shape=output.shape, dtype='float32') + loss = fluid.layers.square_error_cost(input=output, label=label) + loss = fluid.layers.mean(loss, name='loss_' + output.name) - loss = fluid.layers.square_error_cost(input=ctr_output, label=label_target) - loss = fluid.layers.mean(loss, name='loss') - + labels.append(label) + losses.append(loss) + + loss_all = fluid.layers.sum(losses) optimizer = fluid.optimizer.SGD(learning_rate=1.0) - params_grads = optimizer.backward(loss) + params_grads = optimizer.backward(loss_all) if not os.path.exists(model_dir): os.mkdir(model_dir) @@ -78,11 +84,11 @@ def main(argv): f.write(program.desc.serialize_to_string()) model_desc_path = os.path.join(model_dir, 'model.yaml') - model_desc = dict() - model_desc['inputs'] = {var.name: var.shape for var in inputs} - model_desc['loss_name'] = loss.name - model_desc['label_name'] = label_target.name - model_desc['ctr_output_name'] = ctr_output.name + model_desc = { + 'inputs': [{"name": var.name, "shape": var.shape} for var in inputs], + 'outputs': [{"name": var.name, "shape": var.shape, "label_name": label.name, "loss_name": loss.name} for var, label, loss in zip(outputs, labels, losses)], + 'loss_all': loss_all.name, + } with open(model_desc_path, 'w') as f: yaml.safe_dump(model_desc, f, encoding='utf-8', allow_unicode=True) diff --git a/paddle/fluid/train/custom_trainer/feed/scripts/example.py b/paddle/fluid/train/custom_trainer/feed/scripts/example.py index 8a76a418..6773d036 100644 --- a/paddle/fluid/train/custom_trainer/feed/scripts/example.py +++ b/paddle/fluid/train/custom_trainer/feed/scripts/example.py @@ -15,7 +15,7 @@ def inference(): Returns: list: inputs and - Variable: ctr_output + list: outputs """ # TODO: build network here cvm_input = fluid.layers.data(name='cvm_input', shape=[4488], dtype='float32') @@ -29,5 +29,5 @@ def inference(): net = fluid.layers.fc(net, 128, act='relu') net = fluid.layers.fc(net, 128, act='relu') - ctr_output = fluid.layers.fc(net, 1, act='sigmoid', name='ctr_output') - return [cvm_input], ctr_output + ctr_output = fluid.layers.fc(net, 1, act='sigmoid', name='ctr') + return [cvm_input], [ctr_output] diff --git a/paddle/fluid/train/custom_trainer/feed/unit_test/test_create_programs.cc b/paddle/fluid/train/custom_trainer/feed/unit_test/test_create_programs.cc index 4f03977d..0a6bf0c2 100644 --- a/paddle/fluid/train/custom_trainer/feed/unit_test/test_create_programs.cc +++ b/paddle/fluid/train/custom_trainer/feed/unit_test/test_create_programs.cc @@ -28,6 +28,7 @@ namespace custom_trainer { namespace feed { namespace { +const char feed_path[] = "paddle/fluid/train/custom_trainer/feed"; const char test_data_dir[] = "test_data"; const char main_program_path[] = "test_data/main_program"; const char startup_program_path[] = "test_data/startup_program"; @@ -39,7 +40,12 @@ class CreateProgramsTest : public testing::Test public: static void SetUpTestCase() { - shell_execute(string::format_string("python scripts/create_programs.py scripts/example.py %s", test_data_dir)); + std::unique_ptr fs(CREATE_CLASS(FileSystem, "LocalFileSystem")); + if (fs->exists("./scripts/create_programs.py")) { + shell_execute(string::format_string("python ./scripts/create_programs.py ./scripts/example.py %s", test_data_dir)); + } else if (fs->exists(string::format_string("%s/scripts/create_programs.py", feed_path))) { + shell_execute(string::format_string("python %s/scripts/create_programs.py %s/scripts/example.py %s", feed_path, feed_path, test_data_dir)); + } } static void TearDownTestCase() diff --git a/paddle/fluid/train/custom_trainer/feed/unit_test/test_datareader_omp.cc b/paddle/fluid/train/custom_trainer/feed/unit_test/test_datareader_omp.cc index 59e6e2cf..8ac7874e 100644 --- a/paddle/fluid/train/custom_trainer/feed/unit_test/test_datareader_omp.cc +++ b/paddle/fluid/train/custom_trainer/feed/unit_test/test_datareader_omp.cc @@ -91,6 +91,14 @@ public: return true; } + static void read_all(framework::Channel& channel, std::vector& items) { + framework::ChannelReader reader(channel.get()); + DataItem data_item; + while (reader >> data_item) { + items.push_back(std::move(data_item)); + } + } + static bool is_same_with_std_items(const std::vector& items) { return is_same(items, std_items); } @@ -136,7 +144,7 @@ TEST_F(DataReaderOmpTest, LineDataReaderSingleThread) { ASSERT_EQ(0, data_reader->read_all(test_data_dir, channel)); std::vector items; - channel->ReadAll(items); + read_all(channel, items); if (is_same_with_std_items(items)) { ++same_count; @@ -183,7 +191,7 @@ TEST_F(DataReaderOmpTest, LineDataReaderMuiltThread) { ASSERT_EQ(0, data_reader->read_all(test_data_dir, channel)); std::vector items; - channel->ReadAll(items); + read_all(channel, items); if (is_same_with_std_items(items)) { ++same_count; @@ -195,6 +203,12 @@ TEST_F(DataReaderOmpTest, LineDataReaderMuiltThread) { if (is_same_with_sorted_std_items(items)) { ++sort_same_count; + } else { + std::string items_str = ""; + for (const auto& item: items) { + items_str.append(item.id); + } + VLOG(2) << "items: " << items_str; } } -- GitLab