未验证 提交 fa73c7f5 编写于 作者: L Leo Chen 提交者: GitHub

add enable_static() (#4879)

上级 93c4daa4
......@@ -87,4 +87,5 @@ def test():
if __name__ == '__main__':
test()
......@@ -145,4 +145,6 @@ def eval():
if __name__ == "__main__":
import paddle
paddle.enable_static()
eval()
......@@ -144,4 +144,6 @@ def eval():
if __name__ == "__main__":
import paddle
paddle.enable_static()
eval()
......@@ -60,4 +60,6 @@ class TestFarthestPointSamplingOp(unittest.TestCase):
if __name__ == "__main__":
import paddle
paddle.enable_static()
unittest.main()
......@@ -53,4 +53,6 @@ class TestGatherPointOp(unittest.TestCase):
if __name__ == "__main__":
import paddle
paddle.enable_static()
unittest.main()
......@@ -57,4 +57,6 @@ class TestGroupPointsOp(unittest.TestCase):
if __name__ == "__main__":
import paddle
paddle.enable_static()
unittest.main()
......@@ -66,4 +66,6 @@ class TestQueryBallOp(unittest.TestCase):
if __name__ == "__main__":
import paddle
paddle.enable_static()
unittest.main()
......@@ -63,4 +63,6 @@ class TestThreeInterpOp(unittest.TestCase):
if __name__ == "__main__":
import paddle
paddle.enable_static()
unittest.main()
......@@ -76,4 +76,6 @@ class TestThreeNNOp(unittest.TestCase):
if __name__ == "__main__":
import paddle
paddle.enable_static()
unittest.main()
......@@ -303,4 +303,6 @@ def get_cards():
return num
if __name__ == "__main__":
import paddle
paddle.enable_static()
train()
......@@ -293,4 +293,6 @@ def get_cards():
return num
if __name__ == "__main__":
import paddle
paddle.enable_static()
train()
......@@ -338,4 +338,6 @@ def eval():
if __name__ == "__main__":
import paddle
paddle.enable_static()
eval()
......@@ -243,4 +243,6 @@ def train():
if __name__ == "__main__":
import paddle
paddle.enable_static()
train()
......@@ -248,6 +248,8 @@ def get_proposal_func(cfg, mode='TRAIN'):
if __name__ == "__main__":
import paddle
paddle.enable_static()
np.random.seed(3333)
x_np = np.random.random((4, 256, 84)).astype('float32')
......
......@@ -106,4 +106,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -137,4 +137,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -147,4 +147,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -149,4 +149,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -140,4 +140,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -152,4 +152,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -140,4 +140,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -153,4 +153,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -177,6 +177,8 @@ def train(args, config, train_file_list, optimizer_method):
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parser.parse_args()
print_arguments(args)
......
......@@ -270,6 +270,8 @@ def get_cards(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parser.parse_args()
print_arguments(args)
check_cuda(args.use_gpu)
......
......@@ -324,6 +324,8 @@ def get_shrink(height, width):
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parser.parse_args()
print_arguments(args)
config = reader.Settings(data_dir=args.data_dir)
......
......@@ -196,6 +196,8 @@ def train(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = parser.parse_args()
print_arguments(args)
train(args)
......@@ -180,6 +180,8 @@ def train(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = parser.parse_args()
print_arguments(args)
train(args)
......@@ -390,6 +390,8 @@ def infer(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = parser.parse_args()
print_arguments(args)
check_gpu(args.use_gpu)
......
......@@ -63,6 +63,8 @@ def train(cfg):
if __name__ == "__main__":
import paddle
paddle.enable_static()
cfg = config.parse_args()
config.print_arguments(cfg)
utility.check_gpu(cfg.use_gpu)
......
......@@ -127,6 +127,8 @@ def test(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parser.parse_args()
check_cuda(args.use_gpu)
test(args)
......@@ -198,6 +198,8 @@ def train(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parser.parse_args()
check_cuda(args.use_gpu)
train(args)
......@@ -224,6 +224,8 @@ def valid(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parser.parse_args()
check_cuda(args.use_gpu)
valid(args)
......@@ -201,4 +201,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -454,4 +454,6 @@ def main():
if __name__ == "__main__":
import paddle
paddle.enable_static()
main()
......@@ -205,4 +205,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -417,4 +417,6 @@ def main():
if __name__ == "__main__":
import paddle
paddle.enable_static()
main()
......@@ -162,4 +162,6 @@ def main():
if __name__ == "__main__":
import paddle
paddle.enable_static()
main()
##Training details
#GPU: NVIDIA® Tesla® V100 4cards 120epochs 67h
export CUDA_VISIBLE_DEVICES=0,1,2,3
export CUDA_VISIBLE_DEVICES=0
export FLAGS_fast_eager_deletion_mode=1
export FLAGS_eager_delete_tensor_gb=0.0
export FLAGS_fraction_of_gpu_memory_to_use=0.98
......@@ -8,7 +8,7 @@ export FLAGS_fraction_of_gpu_memory_to_use=0.98
#ResNet50:
python train.py \
--model=ResNet50 \
--batch_size=256 \
--batch_size=64 \
--model_save_dir=output/ \
--lr_strategy=piecewise_decay \
--num_epochs=120 \
......
......@@ -327,4 +327,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -184,6 +184,8 @@ def convert_main(model_name, input_path, output_path, class_num=1000):
if __name__ == "__main__":
import paddle
paddle.enable_static()
assert len(
sys.argv
) == 5, "input format: python weights_aggregator.py $model_name $input_path $output_path $class_num"
......
......@@ -117,4 +117,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -99,4 +99,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -309,4 +309,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -299,4 +299,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -91,4 +91,6 @@ def main():
if __name__ == "__main__":
import paddle
paddle.enable_static()
main()
......@@ -167,4 +167,6 @@ def main():
if __name__ == "__main__":
import paddle
paddle.enable_static()
main()
......@@ -244,4 +244,6 @@ def main():
if __name__ == "__main__":
import paddle
paddle.enable_static()
main()
......@@ -78,6 +78,8 @@ def eval():
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parse_args()
print_arguments(args)
check_gpu(args.use_gpu)
......
......@@ -68,6 +68,8 @@ def infer():
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parse_args()
print_arguments(args)
check_gpu(args.use_gpu)
......
......@@ -179,6 +179,8 @@ def train():
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parse_args()
print_arguments(args)
check_gpu(args.use_gpu)
......
......@@ -56,5 +56,7 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
multiprocessing.set_start_method('spawn', force=True)
main()
......@@ -310,4 +310,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -224,6 +224,8 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
start_time = time.time()
args = parse_args()
print(args)
......
......@@ -150,6 +150,8 @@ def test(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = parse_args()
# check whether the installed paddle is compiled with GPU
check_cuda(args.use_gpu)
......
......@@ -115,6 +115,8 @@ def save_inference_model(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = parse_args()
# check whether the installed paddle is compiled with GPU
check_cuda(args.use_gpu)
......
......@@ -191,6 +191,8 @@ def infer(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = parse_args()
# check whether the installed paddle is compiled with GPU
check_cuda(args.use_gpu)
......
......@@ -247,6 +247,8 @@ def train(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = parse_args()
# check whether the installed paddle is compiled with GPU
check_cuda(args.use_gpu)
......
......@@ -257,6 +257,8 @@ def train(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = parse_args()
# check whether the installed paddle is compiled with GPU
check_cuda(args.use_gpu)
......
......@@ -521,6 +521,8 @@ def main(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
logger.info("the paddle version is %s" % paddle.__version__)
check_version('1.6.0')
print_arguments(args)
......
......@@ -30,6 +30,10 @@ def check_cuda(use_cuda, err = \
if __name__ == "__main__":
import paddle
paddle.enable_static()
check_cuda(True)
......
......@@ -92,6 +92,8 @@ def do_save_inference_model(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig(yaml_file="./data/config/ade.yaml")
args.build()
......
......@@ -27,6 +27,8 @@ from inference_model import do_save_inference_model
from ade.utils.configure import PDConfig
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig(yaml_file="./data/config/ade.yaml")
args.build()
......
......@@ -113,6 +113,8 @@ def do_predict(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig(yaml_file="./data/config/ade.yaml")
args.build()
......
......@@ -193,6 +193,8 @@ def do_train(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = PDConfig(yaml_file="./data/config/ade.yaml")
args.build()
......
......@@ -30,6 +30,8 @@ def check_cuda(use_cuda, err = \
if __name__ == "__main__":
import paddle
paddle.enable_static()
check_cuda(True)
......
......@@ -30,6 +30,8 @@ def do_eval(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig(yaml_file="./data/config/dgu.yaml")
args.build()
......
......@@ -118,6 +118,8 @@ def do_save_inference_model(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig(yaml_file="./data/config/dgu.yaml")
args.build()
......
......@@ -26,6 +26,8 @@ from inference_model import do_save_inference_model
from dgu.utils.configure import PDConfig
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig(yaml_file="./data/config/dgu.yaml")
args.build()
......
......@@ -150,6 +150,8 @@ def do_predict(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig(yaml_file="./data/config/dgu.yaml")
args.build()
......
......@@ -263,6 +263,8 @@ def do_train(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = PDConfig(yaml_file="./data/config/dgu.yaml")
args.build()
......
......@@ -104,6 +104,8 @@ def test_inference_model(args, texts):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig(json_file="./config.json")
args.build()
args.print_arguments()
......
......@@ -345,6 +345,8 @@ def get_cards():
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig('config.json')
args.build()
args.print_arguments()
......
......@@ -397,6 +397,8 @@ def main(args):
[probs.name], "infer")
if __name__ == "__main__":
import paddle
paddle.enable_static()
utils.print_arguments(args)
check_cuda(args.use_cuda)
main(args)
......@@ -513,4 +513,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -121,6 +121,8 @@ def test_process(exe, program, reader, test_ret):
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parser.parse_args()
check_cuda(args.use_cuda)
check_version()
......
......@@ -99,6 +99,8 @@ def test_inference_model(model_dir, text_list, dataset):
if __name__ == "__main__":
import paddle
paddle.enable_static()
parser = argparse.ArgumentParser(__doc__)
utils.load_yaml(parser, 'conf/args.yaml')
args = parser.parse_args()
......
......@@ -131,6 +131,8 @@ def infer_process(exe, program, reader, fetch_vars, dataset):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = parser.parse_args()
check_cuda(args.use_cuda)
check_version()
......
......@@ -301,6 +301,8 @@ def do_infer(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
parser = argparse.ArgumentParser(__doc__)
utils.load_yaml(parser, './conf/ernie_args.yaml')
args = parser.parse_args()
......
......@@ -186,6 +186,8 @@ def get_cards():
if __name__ == "__main__":
import paddle
paddle.enable_static()
# 参数控制可以根据需求使用argparse,yaml或者json
# 对NLP任务推荐使用PALM下定义的configure,可以统一argparse,yaml或者json格式的配置文件。
......
......@@ -100,6 +100,8 @@ def do_save_inference_model(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig(yaml_file="./transformer.yaml")
args.build()
args.Print()
......
......@@ -26,6 +26,8 @@ from predict import do_predict
from inference_model import do_save_inference_model
if __name__ == "__main__":
import paddle
paddle.enable_static()
LOG_FORMAT = "[%(asctime)s %(levelname)s %(filename)s:%(lineno)d] %(message)s"
logging.basicConfig(
stream=sys.stdout, level=logging.DEBUG, format=LOG_FORMAT)
......
......@@ -186,6 +186,8 @@ def do_predict(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig(yaml_file="./transformer.yaml")
args.build()
args.Print()
......
......@@ -249,6 +249,8 @@ def do_train(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig(yaml_file="./transformer.yaml")
args.build()
args.Print()
......
......@@ -153,5 +153,7 @@ def main(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
print_arguments(args)
main(args)
......@@ -445,6 +445,8 @@ def main(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
print_arguments(args)
check_cuda(args.use_cuda)
check_version()
......
......@@ -422,6 +422,8 @@ def train(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
print_arguments(args)
check_cuda(args.use_cuda)
check_version()
......
......@@ -431,6 +431,8 @@ def train(args):
break
if __name__ == '__main__':
import paddle
paddle.enable_static()
print_arguments(args)
check_cuda(args.use_cuda)
check_version()
......
......@@ -352,6 +352,8 @@ def train(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = parse_args()
print_arguments(args)
train(args)
......@@ -593,4 +593,6 @@ def train_loop(args,
if __name__ == '__main__':
import paddle
paddle.enable_static()
train()
......@@ -457,6 +457,8 @@ def main(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
print_arguments(args)
check_cuda(args.use_cuda)
main(args)
......@@ -613,5 +613,7 @@ def train(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
print_arguments(args)
train(args)
......@@ -117,6 +117,8 @@ def test_inference_model(args):
[probs.name], "infer")
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig('senta_config.json')
args.build()
args.print_arguments()
......
......@@ -145,6 +145,8 @@ def test_inference_model(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig()
args.build()
args.print_arguments()
......
......@@ -316,6 +316,8 @@ def main(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig('senta_config.json')
args.build()
args.print_arguments()
......
......@@ -371,6 +371,8 @@ def main(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig()
args.build()
args.print_arguments()
......
......@@ -181,5 +181,7 @@ def check_version():
if __name__ == '__main__':
import paddle
paddle.enable_static()
check_version()
infer()
......@@ -280,5 +280,7 @@ def check_version():
if __name__ == '__main__':
import paddle
paddle.enable_static()
check_version()
main()
......@@ -125,5 +125,7 @@ def check_version():
if __name__ == '__main__':
import paddle
paddle.enable_static()
check_version()
infer()
......@@ -321,5 +321,7 @@ def check_version():
if __name__ == '__main__':
import paddle
paddle.enable_static()
check_version()
main()
......@@ -523,6 +523,8 @@ def get_cards():
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = ArgConfig()
args = args.build_conf()
......
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
此差异已折叠。
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册