diff --git a/python/paddle/fluid/tests/unittests/CMakeLists.txt b/python/paddle/fluid/tests/unittests/CMakeLists.txt index d3a62ae9851a6aabe4859946ea65ffece9d39000..18c0b12896f4863f77fe58ed3993e8c9fa34d08c 100755 --- a/python/paddle/fluid/tests/unittests/CMakeLists.txt +++ b/python/paddle/fluid/tests/unittests/CMakeLists.txt @@ -147,17 +147,6 @@ if(WIN32) list(REMOVE_ITEM TEST_OPS test_ops_nms) list(REMOVE_ITEM TEST_OPS test_trt_convert_preln_residual_bias) endif() - -list(REMOVE_ITEM TEST_OPS test_fleet_checkpoint) -list(REMOVE_ITEM TEST_OPS test_auto_checkpoint) -list(REMOVE_ITEM TEST_OPS test_auto_checkpoint1) -list(REMOVE_ITEM TEST_OPS test_auto_checkpoint2) -list(REMOVE_ITEM TEST_OPS test_auto_checkpoint3) -list(REMOVE_ITEM TEST_OPS test_auto_checkpoint_multiple) -list(REMOVE_ITEM TEST_OPS test_auto_checkpoint_dist_basic) -list(REMOVE_ITEM TEST_OPS test_hdfs1) -list(REMOVE_ITEM TEST_OPS test_hdfs2) -list(REMOVE_ITEM TEST_OPS test_hdfs3) list(REMOVE_ITEM TEST_OPS test_checkpoint_saver) if(APPLE OR WIN32) @@ -687,11 +676,6 @@ if(WITH_DISTRIBUTE) endif() endif() - if(NOT WITH_DGC) - # if not with dgc, must close all dgc tests - list(REMOVE_ITEM DIST_TEST_OPS "test_dist_mnist_dgc_nccl") - list(REMOVE_ITEM DIST_TEST_OPS "test_dist_se_resnext_dgc") - endif() if(NOT APPLE) if(WITH_GPU OR WITH_ROCM) @@ -859,87 +843,6 @@ if(NOT WIN32) set_tests_properties(test_parallel_executor_fetch_feed PROPERTIES TIMEOUT 450) endif() -if(WITH_DISTRIBUTE - AND NOT APPLE - AND NOT WIN32) - py_test_modules(test_fleet_checkpoint MODULES test_fleet_checkpoint) - set_tests_properties(test_fleet_checkpoint PROPERTIES TIMEOUT 200) - set_tests_properties(test_fleet_checkpoint - PROPERTIES LABELS "RUN_TYPE=EXCLUSIVE:NIGHTLY") - bash_test_modules( - test_auto_checkpoint - START_BASH - dist_test.sh - TIMEOUT - 200 - LABELS - "RUN_TYPE=EXCLUSIVE:NIGHTLY") - bash_test_modules( - test_auto_checkpoint1 - START_BASH - dist_test.sh - TIMEOUT - 200 - LABELS - "RUN_TYPE=EXCLUSIVE:NIGHTLY") - bash_test_modules( - test_auto_checkpoint2 - START_BASH - dist_test.sh - TIMEOUT - 200 - LABELS - "RUN_TYPE=EXCLUSIVE:NIGHTLY") - bash_test_modules( - test_auto_checkpoint3 - START_BASH - dist_test.sh - TIMEOUT - 200 - LABELS - "RUN_TYPE=EXCLUSIVE:NIGHTLY") - bash_test_modules( - test_auto_checkpoint_multiple - START_BASH - dist_test.sh - TIMEOUT - 200 - LABELS - "RUN_TYPE=EXCLUSIVE:NIGHTLY") - bash_test_modules( - test_auto_checkpoint_dist_basic - START_BASH - dist_test.sh - TIMEOUT - 200 - LABELS - "RUN_TYPE=EXCLUSIVE:NIGHTLY") - bash_test_modules( - test_hdfs1 - START_BASH - dist_test.sh - TIMEOUT - 200 - LABELS - "RUN_TYPE=EXCLUSIVE:NIGHTLY") - bash_test_modules( - test_hdfs2 - START_BASH - dist_test.sh - TIMEOUT - 200 - LABELS - "RUN_TYPE=EXCLUSIVE:NIGHTLY") - bash_test_modules( - test_hdfs3 - START_BASH - dist_test.sh - TIMEOUT - 200 - LABELS - "RUN_TYPE=EXCLUSIVE:NIGHTLY") -endif() - add_subdirectory(sequence) add_subdirectory(dygraph_to_static) add_subdirectory(rnn) diff --git a/python/paddle/fluid/tests/unittests/collective/README.md b/python/paddle/fluid/tests/unittests/collective/README.md index f34a177570cd170579045cd5d9798f5824a6dce7..2370ce07e05b4a77c0822e3e986e094030cc04b2 100644 --- a/python/paddle/fluid/tests/unittests/collective/README.md +++ b/python/paddle/fluid/tests/unittests/collective/README.md @@ -4,15 +4,15 @@ ### step 2. Edit the `testslist.csv` file Add an item like test_c_identity in testslist.csv and specify the properties for the new unit test - the properties are the following: + the properties are the following: * `name`: the test's name * `os`: The supported operator system, ignoring case. If the test run in multiple operator systems, use ";" to split systems, for example, `apple;linux` means the test runs on both Apple and Linux. The supported values are `linux`,`win32` and `apple`. If the value is empty, this means the test runs on all opertaor systems. * `arch`: the device's architecture. similar to `os`, multiple valuse ars splited by ";" and ignoring case. The supported architectures are `gpu`, `xpu`, `ASCEND`, `ASCEND_CL` and `rocm`. -* `timeout`: timeout of a unittest, whose unit is second. +* `timeout`: timeout of a unittest, whose unit is second. Blank means defalut. * `run_type`: run_type of a unittest. Supported values are `NIGHTLY`, `EXCLUSIVE`, `CINN`, `DIST`, `GPUPS`, `INFER`, `EXCLUSIVE:NIGHTLY`, `DIST:NIGHTLY`,which are case-insensitive. -* `launcher`: the test launcher.Supported values are test_runner.py, dist_test.sh and custom scripts' name. -* `num_port`: the number os port used in a distributed unit test -* `run_serial`: whether in serial mode. the value can be 1 or 0.Default (empty) is 0. +* `launcher`: the test launcher.Supported values are test_runner.py, dist_test.sh and custom scripts' name. Blank means test_runner.py. +* `num_port`: the number of port used in a distributed unit test. Blank means automatically distributed port. +* `run_serial`: whether in serial mode. the value can be 1 or 0.Default (empty) is 0. Blank means defalut. * `ENVS`: required environments. multiple envirenmonts are splited by ";". * `conditions`: extra required conditions for some tests. The value is a list of boolean expression in cmake programmer, splited with ";". For example, the value can be `WITH_DGC;NOT WITH_NCCL` or `WITH_NCCL;${NCCL_VERSION} VERSION_GREATER_EQUAL 2212`,The relationship between these expressions is a conjunction. diff --git a/python/paddle/fluid/tests/unittests/collective/fleet/CMakeLists.txt b/python/paddle/fluid/tests/unittests/collective/fleet/CMakeLists.txt index 1d1555839a0fd2d19625988578864226589cf77b..6f4363c906e5f448dc0902b876d2a3c93ac9d041 100644 --- a/python/paddle/fluid/tests/unittests/collective/fleet/CMakeLists.txt +++ b/python/paddle/fluid/tests/unittests/collective/fleet/CMakeLists.txt @@ -94,7 +94,7 @@ if((WITH_ROCM) AND LOCAL_ALL_PLAT) LABELS "RUN_TYPE=DIST" ENVS - "PADDLE_DIST_UT_PORT=21206;http_proxy=;https_proxy=;PYTHONPATH=../..:${PADDLE_BINARY_DIR}/python" + "PADDLE_DIST_UT_PORT=21204;http_proxy=;https_proxy=;PYTHONPATH=../..:${PADDLE_BINARY_DIR}/python" ) set_tests_properties(test_parallel_dygraph_transformer PROPERTIES RUN_SERIAL 1) @@ -574,7 +574,7 @@ if((WITH_GPU OR WITH_ASCEND OR WITH_ASCEND_CL ) - AND (WIN32 OR LINUX)) + AND (LINUX OR WIN32)) py_test_modules( test_fleet_hybrid_meta_optimizer MODULES test_fleet_hybrid_meta_optimizer ENVS @@ -619,7 +619,7 @@ if((WITH_ROCM) AND LOCAL_ALL_PLAT) LABELS "RUN_TYPE=DIST" ENVS - "PADDLE_DIST_UT_PORT=21258;http_proxy=;https_proxy=;PYTHONPATH=../..:${PADDLE_BINARY_DIR}/python" + "PADDLE_DIST_UT_PORT=21256;http_proxy=;https_proxy=;PYTHONPATH=../..:${PADDLE_BINARY_DIR}/python" ) set_tests_properties(test_parallel_dygraph_sparse_embedding PROPERTIES TIMEOUT "200" RUN_SERIAL 1) @@ -660,7 +660,7 @@ if((WITH_ROCM) AND LOCAL_ALL_PLAT) LABELS "RUN_TYPE=DIST" ENVS - "PADDLE_DIST_UT_PORT=21262;http_proxy=;https_proxy=;PYTHONPATH=../..:${PADDLE_BINARY_DIR}/python" + "PADDLE_DIST_UT_PORT=21260;http_proxy=;https_proxy=;PYTHONPATH=../..:${PADDLE_BINARY_DIR}/python" ) set_tests_properties(test_parallel_dygraph_sparse_embedding_over_height PROPERTIES TIMEOUT "350" RUN_SERIAL 1) @@ -863,3 +863,117 @@ if(WITH_NCCL OR WITH_RCCL) endif() endif() endif() +if(LOCAL_ALL_ARCH AND (LINUX)) + bash_test_modules( + test_auto_checkpoint + START_BASH + ../../dist_test.sh + LABELS + "RUN_TYPE=EXCLUSIVE:NIGHTLY" + ENVS + "PADDLE_DIST_UT_PORT=21380;http_proxy=;https_proxy=;PYTHONPATH=../..:${PADDLE_BINARY_DIR}/python" + ) + set_tests_properties(test_auto_checkpoint PROPERTIES TIMEOUT "200") +endif() +if(LOCAL_ALL_ARCH AND (LINUX)) + bash_test_modules( + test_auto_checkpoint1 + START_BASH + ../../dist_test.sh + LABELS + "RUN_TYPE=EXCLUSIVE:NIGHTLY" + ENVS + "PADDLE_DIST_UT_PORT=21382;http_proxy=;https_proxy=;PYTHONPATH=../..:${PADDLE_BINARY_DIR}/python" + ) + set_tests_properties(test_auto_checkpoint1 PROPERTIES TIMEOUT "200") +endif() +if(LOCAL_ALL_ARCH AND (LINUX)) + bash_test_modules( + test_auto_checkpoint2 + START_BASH + ../../dist_test.sh + LABELS + "RUN_TYPE=EXCLUSIVE:NIGHTLY" + ENVS + "PADDLE_DIST_UT_PORT=21384;http_proxy=;https_proxy=;PYTHONPATH=../..:${PADDLE_BINARY_DIR}/python" + ) + set_tests_properties(test_auto_checkpoint2 PROPERTIES TIMEOUT "200") +endif() +if(LOCAL_ALL_ARCH AND (LINUX)) + bash_test_modules( + test_auto_checkpoint3 + START_BASH + ../../dist_test.sh + LABELS + "RUN_TYPE=EXCLUSIVE:NIGHTLY" + ENVS + "PADDLE_DIST_UT_PORT=21386;http_proxy=;https_proxy=;PYTHONPATH=../..:${PADDLE_BINARY_DIR}/python" + ) + set_tests_properties(test_auto_checkpoint3 PROPERTIES TIMEOUT "200") +endif() +if(LOCAL_ALL_ARCH AND (LINUX)) + bash_test_modules( + test_auto_checkpoint_multiple + START_BASH + ../../dist_test.sh + LABELS + "RUN_TYPE=EXCLUSIVE:NIGHTLY" + ENVS + "PADDLE_DIST_UT_PORT=21388;http_proxy=;https_proxy=;PYTHONPATH=../..:${PADDLE_BINARY_DIR}/python" + ) + set_tests_properties(test_auto_checkpoint_multiple PROPERTIES TIMEOUT "200") +endif() +if(LOCAL_ALL_ARCH AND (LINUX)) + bash_test_modules( + test_auto_checkpoint_dist_basic + START_BASH + ../../dist_test.sh + LABELS + "RUN_TYPE=EXCLUSIVE:NIGHTLY" + ENVS + "PADDLE_DIST_UT_PORT=21390;http_proxy=;https_proxy=;PYTHONPATH=../..:${PADDLE_BINARY_DIR}/python" + ) + set_tests_properties(test_auto_checkpoint_dist_basic PROPERTIES TIMEOUT "200") +endif() +if(LOCAL_ALL_ARCH AND (LINUX)) + bash_test_modules( + test_hdfs1 + START_BASH + ../../dist_test.sh + LABELS + "RUN_TYPE=EXCLUSIVE:NIGHTLY" + ENVS + "PADDLE_DIST_UT_PORT=21392;http_proxy=;https_proxy=;PYTHONPATH=../..:${PADDLE_BINARY_DIR}/python" + ) + set_tests_properties(test_hdfs1 PROPERTIES TIMEOUT "200") +endif() +if(LOCAL_ALL_ARCH AND (LINUX)) + bash_test_modules( + test_hdfs2 + START_BASH + ../../dist_test.sh + LABELS + "RUN_TYPE=EXCLUSIVE:NIGHTLY" + ENVS + "PADDLE_DIST_UT_PORT=21394;http_proxy=;https_proxy=;PYTHONPATH=../..:${PADDLE_BINARY_DIR}/python" + ) + set_tests_properties(test_hdfs2 PROPERTIES TIMEOUT "200") +endif() +if(LOCAL_ALL_ARCH AND (LINUX)) + bash_test_modules( + test_hdfs3 + START_BASH + ../../dist_test.sh + LABELS + "RUN_TYPE=EXCLUSIVE:NIGHTLY" + ENVS + "PADDLE_DIST_UT_PORT=21396;http_proxy=;https_proxy=;PYTHONPATH=../..:${PADDLE_BINARY_DIR}/python" + ) + set_tests_properties(test_hdfs3 PROPERTIES TIMEOUT "200") +endif() +if((WITH_GPU OR WITH_ROCM) AND (LINUX)) + py_test_modules( + test_fleet_checkpoint MODULES test_fleet_checkpoint ENVS + "http_proxy=;https_proxy=;PYTHONPATH=../..:${PADDLE_BINARY_DIR}/python") + set_tests_properties(test_fleet_checkpoint PROPERTIES TIMEOUT "200") +endif() diff --git a/python/paddle/fluid/tests/unittests/test_auto_checkpoint.py b/python/paddle/fluid/tests/unittests/collective/fleet/test_auto_checkpoint.py similarity index 100% rename from python/paddle/fluid/tests/unittests/test_auto_checkpoint.py rename to python/paddle/fluid/tests/unittests/collective/fleet/test_auto_checkpoint.py diff --git a/python/paddle/fluid/tests/unittests/test_auto_checkpoint1.py b/python/paddle/fluid/tests/unittests/collective/fleet/test_auto_checkpoint1.py similarity index 100% rename from python/paddle/fluid/tests/unittests/test_auto_checkpoint1.py rename to python/paddle/fluid/tests/unittests/collective/fleet/test_auto_checkpoint1.py diff --git a/python/paddle/fluid/tests/unittests/test_auto_checkpoint2.py b/python/paddle/fluid/tests/unittests/collective/fleet/test_auto_checkpoint2.py similarity index 100% rename from python/paddle/fluid/tests/unittests/test_auto_checkpoint2.py rename to python/paddle/fluid/tests/unittests/collective/fleet/test_auto_checkpoint2.py diff --git a/python/paddle/fluid/tests/unittests/test_auto_checkpoint3.py b/python/paddle/fluid/tests/unittests/collective/fleet/test_auto_checkpoint3.py similarity index 100% rename from python/paddle/fluid/tests/unittests/test_auto_checkpoint3.py rename to python/paddle/fluid/tests/unittests/collective/fleet/test_auto_checkpoint3.py diff --git a/python/paddle/fluid/tests/unittests/test_auto_checkpoint_dist_basic.py b/python/paddle/fluid/tests/unittests/collective/fleet/test_auto_checkpoint_dist_basic.py similarity index 100% rename from python/paddle/fluid/tests/unittests/test_auto_checkpoint_dist_basic.py rename to python/paddle/fluid/tests/unittests/collective/fleet/test_auto_checkpoint_dist_basic.py diff --git a/python/paddle/fluid/tests/unittests/test_auto_checkpoint_multiple.py b/python/paddle/fluid/tests/unittests/collective/fleet/test_auto_checkpoint_multiple.py similarity index 100% rename from python/paddle/fluid/tests/unittests/test_auto_checkpoint_multiple.py rename to python/paddle/fluid/tests/unittests/collective/fleet/test_auto_checkpoint_multiple.py diff --git a/python/paddle/fluid/tests/unittests/test_fleet_checkpoint.py b/python/paddle/fluid/tests/unittests/collective/fleet/test_fleet_checkpoint.py similarity index 100% rename from python/paddle/fluid/tests/unittests/test_fleet_checkpoint.py rename to python/paddle/fluid/tests/unittests/collective/fleet/test_fleet_checkpoint.py diff --git a/python/paddle/fluid/tests/unittests/test_hdfs1.py b/python/paddle/fluid/tests/unittests/collective/fleet/test_hdfs1.py similarity index 100% rename from python/paddle/fluid/tests/unittests/test_hdfs1.py rename to python/paddle/fluid/tests/unittests/collective/fleet/test_hdfs1.py diff --git a/python/paddle/fluid/tests/unittests/test_hdfs2.py b/python/paddle/fluid/tests/unittests/collective/fleet/test_hdfs2.py similarity index 100% rename from python/paddle/fluid/tests/unittests/test_hdfs2.py rename to python/paddle/fluid/tests/unittests/collective/fleet/test_hdfs2.py diff --git a/python/paddle/fluid/tests/unittests/test_hdfs3.py b/python/paddle/fluid/tests/unittests/collective/fleet/test_hdfs3.py similarity index 100% rename from python/paddle/fluid/tests/unittests/test_hdfs3.py rename to python/paddle/fluid/tests/unittests/collective/fleet/test_hdfs3.py diff --git a/python/paddle/fluid/tests/unittests/collective/fleet/testslist.csv b/python/paddle/fluid/tests/unittests/collective/fleet/testslist.csv index 71ba94a02b69826ab0ef3fd9646af765588e8ff6..a81cb4e2312ce61f24ba64dc00f64a02a3e36a37 100644 --- a/python/paddle/fluid/tests/unittests/collective/fleet/testslist.csv +++ b/python/paddle/fluid/tests/unittests/collective/fleet/testslist.csv @@ -1,6 +1,6 @@ name,os,arch,timeout,run_type,launcher,num_port,run_serial,envs,conditions test_fleet_sharding_meta_optimizer,,GPU;XPU;ASCEND;ASCEND_CL,350,DIST,test_runner.py,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., -test_fleet_static_mp_layers,linux;win32,,,DIST,test_runner.py,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., +test_fleet_static_mp_layers,LINUX;WIN32,,,DIST,test_runner.py,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., test_dgc_op,,,,DIST,test_runner.py,2,1,http_proxy=;https_proxy=;PYTHONPATH=../..,WITH_DGC test_dgc_optimizer,,,,DIST,test_runner.py,2,1,http_proxy=;https_proxy=;PYTHONPATH=../..,WITH_DGC test_parallel_margin_cross_entropy,,GPU,120,DIST,../../dist_test.sh,2,1,http_proxy=;https_proxy=;PYTHONPATH=../..,WITH_NCCL @@ -32,21 +32,21 @@ test_recv_save_op,,,,DIST,test_runner.py,2,1,http_proxy=;https_proxy=;PYTHONPATH test_communicator_sync,,,,DIST,test_runner.py,2,1,FLAGS_communicator_send_queue_size=1;FLAGS_communicator_max_merge_var_num=1;http_proxy=;https_proxy=;PYTHONPATH=../.., test_fleet_pipeline_meta_optimizer,,GPU;XPU;ASCEND;ASCEND_CL,,DIST,../../dist_test.sh,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., test_fleet_gradient_merge_meta_optimizer,,GPU;XPU;ASCEND;ASCEND_CL,,DIST,test_runner.py,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., -test_fleet_amp_init,linux;win32,,,DIST,test_runner.py,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., +test_fleet_amp_init,LINUX;WIN32,,,DIST,test_runner.py,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., test_dygraph_sharding_optimizer_stage2,,,120,DIST,../../dist_test.sh,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., -test_fleet_meta_optimizer_base,linux;win32,,,DIST,test_runner.py,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., +test_fleet_meta_optimizer_base,LINUX;WIN32,,,DIST,test_runner.py,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., test_fleet_raw_program_meta_optimizer,,GPU;XPU;ASCEND;ASCEND_CL,,DIST,../../dist_test.sh,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., test_parallel_dygraph_sharding_parallel,,,120,DIST,../../dist_test.sh,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., test_parallel_dygraph_tensor_parallel,,,200,DIST,../../dist_test.sh,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., test_dygraph_group_sharded_api_for_eager,,,120,DIST,../../dist_test.sh,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., -test_fleet_distributed_strategy,linux;win32,,,DIST,test_runner.py,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., +test_fleet_distributed_strategy,LINUX;WIN32,,,DIST,test_runner.py,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., test_fleet_dgc_meta_optimizer,,,,DIST,test_runner.py,2,1,http_proxy=;https_proxy=;PYTHONPATH=../..,WITH_DGC test_parallel_dygraph_unused_variables,,,350,DIST,../../dist_test.sh,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., -test_fleet_lamb_meta_optimizer,linux,GPU;XPU;ASCEND;ASCEND_CL,,DIST,test_runner.py,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., +test_fleet_lamb_meta_optimizer,LINUX,GPU;XPU;ASCEND;ASCEND_CL,,DIST,test_runner.py,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., test_dgc_momentum_op,,,,DIST,test_runner.py,2,1,http_proxy=;https_proxy=;PYTHONPATH=../..,WITH_DGC test_parallel_dygraph_no_sync_gradient_check,,,60,DIST,../../dist_test.sh,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., test_fleet_pipeline_meta_optimizer_with_recompute,,GPU;XPU;ASCEND;ASCEND_CL,,DIST,../../dist_test.sh,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., -test_fleet_hybrid_meta_optimizer,WIN32;LINUX,GPU;XPU;ASCEND;ASCEND_CL,,DIST,test_runner.py,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., +test_fleet_hybrid_meta_optimizer,LINUX;WIN32,GPU;XPU;ASCEND;ASCEND_CL,,DIST,test_runner.py,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., test_parallel_dygraph_qat,,,120,DIST,../../dist_test.sh,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., test_parallel_dygraph_sparse_embedding,,GPU,200,DIST,../../dist_test.sh,2,1,http_proxy=;https_proxy=;PYTHONPATH=../..,WITH_NCCL;${NCCL_VERSION} VERSION_GREATER_EQUAL 2212 test_parallel_dygraph_sparse_embedding,,ROCM,200,DIST,../../dist_test.sh,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., @@ -55,11 +55,11 @@ test_parallel_dygraph_sparse_embedding_over_height,,GPU,150,DIST,../../dist_test test_parallel_dygraph_sparse_embedding_over_height,,ROCM,350,DIST,../../dist_test.sh,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., test_distributed_strategy,LINUX;APPLE,,,DIST,test_runner.py,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., test_auto_parallel_parallelizer,,,120,DIST,../../dist_test.sh,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., -test_fleet_recompute_meta_optimizer,linux;win32,GPU;XPU;ASCEND;ASCEND_CL,,DIST,test_runner.py,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., +test_fleet_recompute_meta_optimizer,LINUX;WIN32,GPU;XPU;ASCEND;ASCEND_CL,,DIST,test_runner.py,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., test_dygraph_group_sharded_api,,,120,DIST,../../dist_test.sh,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., -test_fleet_private_function,linux;win32,,,DIST,test_runner.py,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., +test_fleet_private_function,LINUX;WIN32,,,DIST,test_runner.py,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., test_new_group,,GPU;XPU;ASCEND;ASCEND_CL,,DIST,test_new_group.sh,2,1,http_proxy=;https_proxy=, -test_c_comm_init_op,linux,GPU;XPU;ASCEND;ASCEND_CL,120,DIST,test_c_comm_init_op.sh,2,1,http_proxy=;https_proxy=, +test_c_comm_init_op,LINUX,GPU;XPU;ASCEND;ASCEND_CL,120,DIST,test_c_comm_init_op.sh,2,1,http_proxy=;https_proxy=, test_ir_pass_pipeline,,,120,DIST,../../dist_test.sh,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., test_parallel_dygraph_mnist,,GPU;ROCM,200,DIST,../../dist_test.sh,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., test_parallel_dygraph_se_resnext,,GPU;ROCM,200,DIST,../../dist_test.sh,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., @@ -71,3 +71,13 @@ test_dygraph_recompute,,GPU;ROCM,,DIST,test_runner.py,2,1,http_proxy=;https_prox test_dygraph_recompute_for_eager,,GPU;ROCM,,DIST,test_runner.py,2,1,http_proxy=;https_proxy=;PYTHONPATH=../.., test_dist_mnist_dgc_nccl,,,,DIST,../../dist_test.sh,2,1,http_proxy=;https_proxy=;PYTHONPATH=../..,WITH_NCCL OR WITH_RCCL;WITH_DGC test_dist_se_resnext_dgc,,,,DIST,../../dist_test.sh,2,1,http_proxy=;https_proxy=;PYTHONPATH=../..,WITH_NCCL OR WITH_RCCL;WITH_DGC +test_auto_checkpoint,LINUX,,200,EXCLUSIVE:NIGHTLY,../../dist_test.sh,2,,http_proxy=;https_proxy=;PYTHONPATH=../.., +test_auto_checkpoint1,LINUX,,200,EXCLUSIVE:NIGHTLY,../../dist_test.sh,2,,http_proxy=;https_proxy=;PYTHONPATH=../.., +test_auto_checkpoint2,LINUX,,200,EXCLUSIVE:NIGHTLY,../../dist_test.sh,2,,http_proxy=;https_proxy=;PYTHONPATH=../.., +test_auto_checkpoint3,LINUX,,200,EXCLUSIVE:NIGHTLY,../../dist_test.sh,2,,http_proxy=;https_proxy=;PYTHONPATH=../.., +test_auto_checkpoint_multiple,LINUX,,200,EXCLUSIVE:NIGHTLY,../../dist_test.sh,2,,http_proxy=;https_proxy=;PYTHONPATH=../.., +test_auto_checkpoint_dist_basic,LINUX,,200,EXCLUSIVE:NIGHTLY,../../dist_test.sh,2,,http_proxy=;https_proxy=;PYTHONPATH=../.., +test_hdfs1,LINUX,,200,EXCLUSIVE:NIGHTLY,../../dist_test.sh,2,,http_proxy=;https_proxy=;PYTHONPATH=../.., +test_hdfs2,LINUX,,200,EXCLUSIVE:NIGHTLY,../../dist_test.sh,2,,http_proxy=;https_proxy=;PYTHONPATH=../.., +test_hdfs3,LINUX,,200,EXCLUSIVE:NIGHTLY,../../dist_test.sh,2,,http_proxy=;https_proxy=;PYTHONPATH=../.., +test_fleet_checkpoint,LINUX,GPU;ROCM,200,EXCLUSIVE:NIGHTLY,test_runner.py,,,http_proxy=;https_proxy=;PYTHONPATH=../.., diff --git a/tools/gen_ut_cmakelists.py b/tools/gen_ut_cmakelists.py index b95739fed61043a598efefffd791b7cccf517839..fdf19fa497cedc029621cf12be588b17af44bdea 100644 --- a/tools/gen_ut_cmakelists.py +++ b/tools/gen_ut_cmakelists.py @@ -26,7 +26,7 @@ def _process_PYTHONPATH(pythonpath_option): return pythonpath_option -def process_envs(envs): +def _process_envs(envs): """ Desc: Input a str and output a str with the same function to specify some environment variables. @@ -50,6 +50,7 @@ def process_envs(envs): and the var can not contain space in either env names or values. However the var's format is '{p}'.""" + # if p starts with "PYTHONPATH=", then process python path if re.compile("^PYTHONPATH=").search(p): p = _process_PYTHONPATH(p) @@ -58,7 +59,7 @@ However the var's format is '{p}'.""" return ";".join(processed_envs) -def process_conditions(conditions): +def _process_conditions(conditions): """ Desc: Input condition expression in cmake grammer and return a string warpped by 'AND ()'. @@ -77,7 +78,7 @@ def process_conditions(conditions): return [c.strip() for c in conditions] -def proccess_archs(arch): +def _proccess_archs(arch): """ desc: Input archs options and warp it with 'WITH_', 'OR' and '()' in cmakelist grammer. @@ -103,7 +104,7 @@ def proccess_archs(arch): return arch -def process_os(os_): +def _process_os(os_): """ Desc: Input os options and output warpped options with 'OR' and '()' @@ -130,16 +131,16 @@ def process_os(os_): # check whether run_serial is 0, 1 or empty -def process_run_serial(run_serial): +def _process_run_serial(run_serial): rs = run_serial.strip() assert rs in ["1", "0", ""], \ f"""the value of run_serial must be one of 0, 1 or empty. But this value is {rs}""" if rs == "": - rs = "0" + return "" return rs -def file_with_extension(prefix, suffixes): +def _file_with_extension(prefix, suffixes): """ Desc: check whether test file exists. @@ -150,7 +151,7 @@ def file_with_extension(prefix, suffixes): return False -def process_name(name, curdir): +def _process_name(name, curdir): """ Desc: check whether name is with a legal format and check whther the test file exists. @@ -161,146 +162,330 @@ def process_name(name, curdir): f"""and the following substring must include at least one char of "0-9", "a-z", "A-Z" or "_".""" filepath_prefix = os.path.join(curdir, name) suffix = [".py", ".sh"] - assert file_with_extension(filepath_prefix, suffix), \ + assert _file_with_extension(filepath_prefix, suffix), \ f""" Please ensure the test file with the prefix '{filepath_prefix}' and one of the suffix {suffix} exists, because you specified a unittest named '{name}'""" return name -def process_run_type(run_type): +def _process_run_type(run_type): rt = run_type.strip() + # completely match one of the strings: 'NIGHTLY', 'EXCLUSIVE', 'CINN', 'DIST', 'GPUPS', 'INFER', 'EXCLUSIVE:NIGHTLY' and 'DIST:NIGHTLY' assert re.compile("^(NIGHTLY|EXCLUSIVE|CINN|DIST|GPUPS|INFER|EXCLUSIVE:NIGHTLY|DIST:NIGHTLY)$").search(rt), \ f""" run_type must be one of 'NIGHTLY', 'EXCLUSIVE', 'CINN', 'DIST', 'GPUPS', 'INFER', 'EXCLUSIVE:NIGHTLY' and 'DIST:NIGHTLY'""" \ f"""but the run_type is {rt}""" return rt -DIST_UT_PORT = 21200 - - -def process_dist_ut_port(port_num): - global DIST_UT_PORT - port = DIST_UT_PORT - assert port < 23000, "dist port is exahausted" - DIST_UT_PORT += int(port_num) - return port - +class DistUTPortManager(): + + def __init__(self): + self.dist_ut_port = 21200 + self.assigned_ports = dict() + self.last_test_name = "" + self.last_test_cmake_file = "" + self.no_cmake_dirs = [] + self.processed_dirs = set() + + def reset_current_port(self, port=None): + self.dist_ut_port = 21200 if port is None else port + + def get_currnt_port(self): + return self.dist_ut_port + + def gset_port(self, test_name, port): + ''' + Get and set a port for unit test named test_name. If the test has been already holding a port, return the port it holds. + Else assign the input port as a new port to the test. + ''' + if test_name not in self.assigned_ports: + self.assigned_ports[test_name] = port + self.dist_ut_port = max(self.dist_ut_port, + self.assigned_ports[test_name]) + return self.assigned_ports[test_name] + + def process_dist_port_num(self, port_num): + assert re.compile("^[0-9]+$").search(port_num) and int(port_num) > 0 or port_num.strip()=="", \ + f"""port_num must be foramt as a positive integer or empty, but this port_num is '{port_num}'""" + port_num = port_num.strip() + if len(port_num) == 0: + return 0 + port = self.dist_ut_port + assert port < 23000, "dist port is exhausted" + self.dist_ut_port += int(port_num) + return port + + def _init_dist_ut_ports_from_cmakefile(self, cmake_file_name): + ''' + Desc: + Find all signed ut ports in cmake_file and update the ASSIGNED_PORTS + and keep the DIST_UT_PORT max of all assigned ports + ''' + with open(cmake_file_name) as cmake_file: + # match lines including 'PADDLE_DIST_UT_PORT=' followed by a number + port_reg = re.compile("PADDLE_DIST_UT_PORT=[0-9]+") + lines = cmake_file.readlines() + for idx, line in enumerate(lines): + matched = port_reg.search(line) + if matched is None: + continue + p = matched.span() + port = int(line[p[0]:p[1]].split("=")[-1]) + + # find the test name which the port belongs to + for k in range(idx, 0, -1): + if lines[k].strip() == "START_BASH": + break + name = lines[k - 1].strip() + + # matcg right tets name format, the name must start with 'test_' follwed bu at least one cahr of + # '0-9'. 'a-z'. 'A-Z' or '_' + assert re.compile("^test_[0-9a-zA-Z_]+").search(name), \ + f'''we found a test for initial the latest dist_port but the test name '{name}' seems to be wrong + at line {k-1}, in file {cmake_file_name} + ''' + self.gset_port(name, port) + + # get the test_name which latest assigned port belongs to + if self.assigned_ports[name] == self.dist_ut_port: + self.last_test_name = name + self.last_test_cmake_file = cmake_file_name + + def parse_assigned_dist_ut_ports(self, current_work_dir, ignores, depth=0): + ''' + Desc: + get all assigned dist ports to keep port of unmodified test fixed. + ''' + if current_work_dir in self.processed_dirs: + return -def parse_line(line, curdir): + # if root(depth==0), convert the ignores to abs paths + if depth == 0: + self.processed_dirs.clear() + ignores = [os.path.abspath(i) for i in ignores] + + self.processed_dirs.add(current_work_dir) + contents = os.listdir(current_work_dir) + cmake_file = os.path.join(current_work_dir, "CMakeLists.txt") + csv = cmake_file.replace("CMakeLists.txt", 'testslist.csv') + + if os.path.isfile(csv) or os.path.isfile(cmake_file): + if current_work_dir not in ignores: + if os.path.isfile(cmake_file) and os.path.isfile(csv): + self._init_dist_ut_ports_from_cmakefile(cmake_file) + elif not os.path.isfile(cmake_file): + # put the directory which has csv but no cmake into NO_CMAKE_DIR_WARNING + self.no_cmake_dirs.append(current_work_dir) + + # recursively process the subdirectories + for c in contents: + c_path = os.path.join(current_work_dir, c) + if os.path.isdir(c_path): + self.parse_assigned_dist_ut_ports(c_path, ignores, + depth + 1) + + if depth == 0: + # After all directories are scanned and processed + # 1. Get the num_port of last added test and set DIST_UT_PORT+=num_port + # to guarantee the DIST_UT_PORT is not assined + # 2. Summary all the directories which include csv but no cmake and show an error + # if such a drectory exists + + # step 1 + if len(self.last_test_name) > 0 and len( + self.last_test_cmake_file) > 0: + with open( + self.last_test_cmake_file.replace( + "CMakeLists.txt", "testslist.csv")) as csv_file: + found = False + for line in csv_file.readlines(): + name, _, _, _, _, launcher, num_port, _, _, _ = line.strip( + ).split(",") + if name == self.last_test_name: + found = True + break + assert found, f"no such test named '{self.last_test_name}' in file '{self.last_test_cmake_file}'" + if launcher[-2:] == ".sh": + self.process_dist_port_num(num_port) + + # step 2 + err_msg = f"""==================[No Old CMakeLists.txt Error]================================== + Following directories has no CmakeLists.txt files: """ - Desc: - Input a line in csv file and output a string in cmake grammer, adding the specified test and setting its properties. - Example: - Input: "test_allreduce,linux,gpu;rocm,120,DIST,test_runner.py,20071,1,PYTHONPATH=..;http_proxy=;https_proxy=," - Output: - "if((WITH_GPU OR WITH_ROCM) AND (LINUX) ) - py_test_modules( - test_allreduce - MODULES - test_allreduce - ENVS - "PADDLE_DIST_UT_PORT=20071;PYTHONPATH=..:${PADDLE_BINARY_DIR}/python;http_proxy=;https_proxy=") - set_tests_properties(test_allreduce PROPERTIES TIMEOUT "120" RUN_SERIAL 1) - endif()" + for c in self.no_cmake_dirs: + err_msg += " " + c + "\n" + err_msg += """ + This may cause the dist ports different with the old version. + If the directories are newly created or there is no CMakeLists.txt before, or ignore this error, you + must specify the directories using the args option --ignore-cmake-dirs/-i. + If you want to keep the dist ports of old tests unchanged, please ensure the old + verson CMakeLists.txt file existing before using the gen_ut_cmakelists tool to + generate new CmakeLists.txt files. + ==================================================================================== """ - - name, os_, archs, timeout, run_type, launcher, dist_ut_port, run_serial, envs, conditions = line.strip( - ).split(",") - - # name == "name" means the line being parsed is the header of the table - # we should skip this line and return empty here. - if name == "name": - return "" - name = process_name(name, curdir) - - envs = process_envs(envs) - conditions = process_conditions(conditions) - archs = proccess_archs(archs) - os_ = process_os(os_) - run_serial = process_run_serial(run_serial) - run_type = process_run_type(run_type) - - cmd = "" - - for c in conditions: - cmd += f"if ({c})\n" - - time_out_str = f'TIMEOUT "{timeout}"' if len(timeout.strip()) > 0 else '' - if launcher[-3:] == ".sh": - dist_ut_port = process_dist_ut_port(2) - cmd += f'''if({archs} AND {os_}) - bash_test_modules( - {name} - START_BASH - {launcher} - LABELS - "RUN_TYPE={run_type}" - ENVS - "PADDLE_DIST_UT_PORT={dist_ut_port};{envs}") - set_tests_properties({name} PROPERTIES {time_out_str} RUN_SERIAL {run_serial}) -endif() -''' - else: - cmd += f'''if({archs} AND {os_}) - py_test_modules( - {name} - MODULES - {name} - ENVS - "{envs}") - set_tests_properties({name} PROPERTIES {time_out_str} RUN_SERIAL {run_serial}) -endif() -''' - for _ in conditions: - cmd += f"endif()\n" - return cmd - - -PROCESSED_DIR = set() - - -def gen_cmakelists(current_work_dir): - print("procfessing dir:", current_work_dir) - if current_work_dir == "": - current_work_dir = "." - - contents = os.listdir(current_work_dir) - sub_dirs = [] - for c in contents: - c_path = os.path.join(current_work_dir, c) - if c_path in PROCESSED_DIR: - return - if os.path.isdir(c_path): - PROCESSED_DIR.add(c_path) + assert len(self.no_cmake_dirs) == 0, err_msg + + +class CMakeGenerator(): + + def __init__(self, current_dirs): + self.processed_dirs = set() + self.port_manager = DistUTPortManager() + self.current_dirs = current_dirs + + def prepare_dist_ut_port(self): + for c in self._find_root_dirs(): + self.port_manager.parse_assigned_dist_ut_ports( + c, ignores=args.ignore_cmake_dirs, depth=0) + + def parse_csvs(self): + for c in self.current_dirs: + c = os.path.abspath(c) + self._gen_cmakelists(c) + + def _find_root_dirs(self): + root_dirs = [] + # for each current directory, find its highest ancient directory (at least itself) + # which includes CMakeLists.txt or testslist.csv.txt in the filesys tree + for c in self.current_dirs: + c = os.path.abspath(c) + while True: + ppath = os.path.dirname(c) + if os.path.abspath(ppath) == os.path.abspath(c): + break + cmake = os.path.join(ppath, "CMakeLists.txt") + csv = os.path.join(ppath, "testslist.csv.txt") + if not (os.path.isfile(cmake) or os.path.isfile(csv)): + break + c = os.path.abspath(ppath) + if c not in root_dirs: + root_dirs.append(c) + return root_dirs + + def _parse_line(self, line, curdir): + """ + Desc: + Input a line in csv file and output a string in cmake grammer, adding the specified test and setting its properties. + Example: + Input: "test_allreduce,linux,gpu;rocm,120,DIST,test_runner.py,20071,1,PYTHONPATH=..;http_proxy=;https_proxy=," + Output: + "if((WITH_GPU OR WITH_ROCM) AND (LINUX) ) + py_test_modules( + test_allreduce + MODULES + test_allreduce + ENVS + "PADDLE_DIST_UT_PORT=20071;PYTHONPATH=..:${PADDLE_BINARY_DIR}/python;http_proxy=;https_proxy=") + set_tests_properties(test_allreduce PROPERTIES TIMEOUT "120" RUN_SERIAL 1) + endif()" + """ + + name, os_, archs, timeout, run_type, launcher, num_port, run_serial, envs, conditions = line.strip( + ).split(",") + + # name == "name" means the line being parsed is the header of the table + # we should skip this line and return empty here. + if name == "name": + return "" + name = _process_name(name, curdir) + + envs = _process_envs(envs) + conditions = _process_conditions(conditions) + archs = _proccess_archs(archs) + os_ = _process_os(os_) + run_serial = _process_run_serial(run_serial) + run_type = _process_run_type(run_type) + + cmd = "" + + for c in conditions: + cmd += f"if ({c})\n" + + if launcher[-3:] == ".sh": + dist_ut_port = self.port_manager.process_dist_port_num(num_port) + dist_ut_port = self.port_manager.gset_port(name, dist_ut_port) + cmd += f'''if({archs} AND {os_}) + bash_test_modules( + {name} + START_BASH + {launcher} + LABELS + "RUN_TYPE={run_type}" + ENVS + "PADDLE_DIST_UT_PORT={dist_ut_port};{envs}")%s + endif() + ''' + else: + cmd += f'''if({archs} AND {os_}) + py_test_modules( + {name} + MODULES + {name} + ENVS + "{envs}")%s + endif() + ''' + time_out_str = f' TIMEOUT "{timeout}"' if len( + timeout.strip()) > 0 else '' + run_serial_str = f' RUN_SERIAL {run_serial}' if len( + run_serial) > 0 else '' + if len(time_out_str) > 0 or len(run_serial_str) > 0: + set_properties = f''' + set_tests_properties({name} PROPERTIES{time_out_str}{run_serial_str})''' + else: + set_properties = "" + cmd = cmd % set_properties + for _ in conditions: + cmd += f"endif()\n" + return cmd + + def _gen_cmakelists(self, current_work_dir, depth=0): + if depth == 0: + self.processed_dirs.clear() + print("procfessing dir:", current_work_dir) + if current_work_dir == "": + current_work_dir = "." + + contents = os.listdir(current_work_dir) + contents.sort() + sub_dirs = [] + for c in contents: + c_path = os.path.join(current_work_dir, c) + if c_path in self.processed_dirs: + return + if not os.path.isdir(c_path): + continue + self.processed_dirs.add(c_path) if os.path.isfile(os.path.join(current_work_dir, c, "testslist.csv")) \ or os.path.isfile(os.path.join(current_work_dir, c, "CMakeLists.txt")): - gen_cmakelists(os.path.join(current_work_dir, c)) + self._gen_cmakelists(os.path.join(current_work_dir, c), + depth + 1) sub_dirs.append(c) - if not os.path.isfile(os.path.join(current_work_dir, "testslist.csv")): - return - cmds = """# This file is generated by ${PADDLE_ROOT}/tools/gen_ut_cmakelists.py. -# Please don't modify this file manually. -# If you need to change unittests in this file, please modify testslist.csv in the current directory -# and then run the command `python3 ${PADDLE_ROOT}/tools/gen_ut_cmakelists.py -f ${CURRENT_DIRECTORY}/testslist.csv` -set(LOCAL_ALL_ARCH ON) -set(LOCAL_ALL_PLAT ON)\n""" - with open(f"{current_work_dir}/testslist.csv") as csv_file: - for i, line in enumerate(csv_file.readlines()): - try: - cmds += parse_line(line, current_work_dir) - except Exception as e: - print("===============PARSE LINE ERRORS OCCUR==========") - print(e) - print(f"[ERROR FILE]: {current_work_dir}/testslist.csv") - print(f"[ERROR LINE {i+1}]: {line.strip()}") - exit(1) - - for sub in sub_dirs: - cmds += f"add_subdirectory({sub})\n" - print(cmds, end="") - with open(f"{current_work_dir}/CMakeLists.txt", "w") as cmake_file: - print(cmds, end="", file=cmake_file) + if not os.path.isfile(os.path.join(current_work_dir, "testslist.csv")): + return + cmds = """# This file is generated by ${PADDLE_ROOT}/tools/gen_ut_cmakelists.py. + # Please don't modify this file manually. + # If you need to change unittests in this file, please modify testslist.csv in the current directory + # and then run the command `python3 ${PADDLE_ROOT}/tools/gen_ut_cmakelists.py -f ${CURRENT_DIRECTORY}/testslist.csv` + set(LOCAL_ALL_ARCH ON) + set(LOCAL_ALL_PLAT ON)\n""" + with open(f"{current_work_dir}/testslist.csv") as csv_file: + for i, line in enumerate(csv_file.readlines()): + try: + cmds += self._parse_line(line, current_work_dir) + except Exception as e: + print("===============PARSE LINE ERRORS OCCUR==========") + print(e) + print(f"[ERROR FILE]: {current_work_dir}/testslist.csv") + print(f"[ERROR LINE {i+1}]: {line.strip()}") + exit(1) + + for sub in sub_dirs: + cmds += f"add_subdirectory({sub})\n" + print(cmds, end="") + with open(f"{current_work_dir}/CMakeLists.txt", "w") as cmake_file: + print(cmds, end="", file=cmake_file) if __name__ == "__main__": @@ -325,6 +510,16 @@ if __name__ == "__main__": help= "Input a list of dir paths including files named testslist.csv and output CmakeLists.txt in these directories respectly" ) + parser.add_argument( + "--ignore-cmake-dirs", + '-i', + type=str, + required=False, + default=[], + nargs='*', + help= + "To keep dist ports the same with old version cmake, old cmakelists.txt files are needed to parse dist_ports. If a directories are newly created and there is no cmakelists.txt file, the directory path must be specified by this option. The dirs are not recursive." + ) args = parser.parse_args() assert not (len(args.files) == 0 and len(args.dirpaths) @@ -340,5 +535,6 @@ if __name__ == "__main__": if len(args.dirpaths) >= 1: current_work_dirs = current_work_dirs + [d for d in args.dirpaths] - for c in current_work_dirs: - gen_cmakelists(c) + cmake_generator = CMakeGenerator(current_work_dirs) + cmake_generator.prepare_dist_ut_port() + cmake_generator.parse_csvs()