未验证 提交 fa73c7f5 编写于 作者: L Leo Chen 提交者: GitHub

add enable_static() (#4879)

上级 93c4daa4
......@@ -87,4 +87,5 @@ def test():
if __name__ == '__main__':
test()
......@@ -145,4 +145,6 @@ def eval():
if __name__ == "__main__":
import paddle
paddle.enable_static()
eval()
......@@ -144,4 +144,6 @@ def eval():
if __name__ == "__main__":
import paddle
paddle.enable_static()
eval()
......@@ -60,4 +60,6 @@ class TestFarthestPointSamplingOp(unittest.TestCase):
if __name__ == "__main__":
import paddle
paddle.enable_static()
unittest.main()
......@@ -53,4 +53,6 @@ class TestGatherPointOp(unittest.TestCase):
if __name__ == "__main__":
import paddle
paddle.enable_static()
unittest.main()
......@@ -57,4 +57,6 @@ class TestGroupPointsOp(unittest.TestCase):
if __name__ == "__main__":
import paddle
paddle.enable_static()
unittest.main()
......@@ -66,4 +66,6 @@ class TestQueryBallOp(unittest.TestCase):
if __name__ == "__main__":
import paddle
paddle.enable_static()
unittest.main()
......@@ -63,4 +63,6 @@ class TestThreeInterpOp(unittest.TestCase):
if __name__ == "__main__":
import paddle
paddle.enable_static()
unittest.main()
......@@ -76,4 +76,6 @@ class TestThreeNNOp(unittest.TestCase):
if __name__ == "__main__":
import paddle
paddle.enable_static()
unittest.main()
......@@ -303,4 +303,6 @@ def get_cards():
return num
if __name__ == "__main__":
import paddle
paddle.enable_static()
train()
......@@ -293,4 +293,6 @@ def get_cards():
return num
if __name__ == "__main__":
import paddle
paddle.enable_static()
train()
......@@ -338,4 +338,6 @@ def eval():
if __name__ == "__main__":
import paddle
paddle.enable_static()
eval()
......@@ -243,4 +243,6 @@ def train():
if __name__ == "__main__":
import paddle
paddle.enable_static()
train()
......@@ -248,6 +248,8 @@ def get_proposal_func(cfg, mode='TRAIN'):
if __name__ == "__main__":
import paddle
paddle.enable_static()
np.random.seed(3333)
x_np = np.random.random((4, 256, 84)).astype('float32')
......
......@@ -106,4 +106,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -137,4 +137,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -147,4 +147,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -149,4 +149,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -140,4 +140,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -152,4 +152,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -140,4 +140,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -153,4 +153,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -177,6 +177,8 @@ def train(args, config, train_file_list, optimizer_method):
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parser.parse_args()
print_arguments(args)
......
......@@ -270,6 +270,8 @@ def get_cards(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parser.parse_args()
print_arguments(args)
check_cuda(args.use_gpu)
......
......@@ -324,6 +324,8 @@ def get_shrink(height, width):
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parser.parse_args()
print_arguments(args)
config = reader.Settings(data_dir=args.data_dir)
......
......@@ -196,6 +196,8 @@ def train(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = parser.parse_args()
print_arguments(args)
train(args)
......@@ -180,6 +180,8 @@ def train(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = parser.parse_args()
print_arguments(args)
train(args)
......@@ -390,6 +390,8 @@ def infer(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = parser.parse_args()
print_arguments(args)
check_gpu(args.use_gpu)
......
......@@ -63,6 +63,8 @@ def train(cfg):
if __name__ == "__main__":
import paddle
paddle.enable_static()
cfg = config.parse_args()
config.print_arguments(cfg)
utility.check_gpu(cfg.use_gpu)
......
......@@ -127,6 +127,8 @@ def test(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parser.parse_args()
check_cuda(args.use_gpu)
test(args)
......@@ -198,6 +198,8 @@ def train(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parser.parse_args()
check_cuda(args.use_gpu)
train(args)
......@@ -224,6 +224,8 @@ def valid(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parser.parse_args()
check_cuda(args.use_gpu)
valid(args)
......@@ -201,4 +201,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -454,4 +454,6 @@ def main():
if __name__ == "__main__":
import paddle
paddle.enable_static()
main()
......@@ -205,4 +205,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -417,4 +417,6 @@ def main():
if __name__ == "__main__":
import paddle
paddle.enable_static()
main()
......@@ -162,4 +162,6 @@ def main():
if __name__ == "__main__":
import paddle
paddle.enable_static()
main()
##Training details
#GPU: NVIDIA® Tesla® V100 4cards 120epochs 67h
export CUDA_VISIBLE_DEVICES=0,1,2,3
export CUDA_VISIBLE_DEVICES=0
export FLAGS_fast_eager_deletion_mode=1
export FLAGS_eager_delete_tensor_gb=0.0
export FLAGS_fraction_of_gpu_memory_to_use=0.98
......@@ -8,7 +8,7 @@ export FLAGS_fraction_of_gpu_memory_to_use=0.98
#ResNet50:
python train.py \
--model=ResNet50 \
--batch_size=256 \
--batch_size=64 \
--model_save_dir=output/ \
--lr_strategy=piecewise_decay \
--num_epochs=120 \
......
......@@ -327,4 +327,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -184,6 +184,8 @@ def convert_main(model_name, input_path, output_path, class_num=1000):
if __name__ == "__main__":
import paddle
paddle.enable_static()
assert len(
sys.argv
) == 5, "input format: python weights_aggregator.py $model_name $input_path $output_path $class_num"
......
......@@ -117,4 +117,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -99,4 +99,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -309,4 +309,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -299,4 +299,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -91,4 +91,6 @@ def main():
if __name__ == "__main__":
import paddle
paddle.enable_static()
main()
......@@ -167,4 +167,6 @@ def main():
if __name__ == "__main__":
import paddle
paddle.enable_static()
main()
......@@ -244,4 +244,6 @@ def main():
if __name__ == "__main__":
import paddle
paddle.enable_static()
main()
......@@ -78,6 +78,8 @@ def eval():
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parse_args()
print_arguments(args)
check_gpu(args.use_gpu)
......
......@@ -68,6 +68,8 @@ def infer():
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parse_args()
print_arguments(args)
check_gpu(args.use_gpu)
......
......@@ -179,6 +179,8 @@ def train():
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parse_args()
print_arguments(args)
check_gpu(args.use_gpu)
......
......@@ -56,5 +56,7 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
multiprocessing.set_start_method('spawn', force=True)
main()
......@@ -310,4 +310,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -224,6 +224,8 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
start_time = time.time()
args = parse_args()
print(args)
......
......@@ -150,6 +150,8 @@ def test(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = parse_args()
# check whether the installed paddle is compiled with GPU
check_cuda(args.use_gpu)
......
......@@ -115,6 +115,8 @@ def save_inference_model(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = parse_args()
# check whether the installed paddle is compiled with GPU
check_cuda(args.use_gpu)
......
......@@ -191,6 +191,8 @@ def infer(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = parse_args()
# check whether the installed paddle is compiled with GPU
check_cuda(args.use_gpu)
......
......@@ -247,6 +247,8 @@ def train(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = parse_args()
# check whether the installed paddle is compiled with GPU
check_cuda(args.use_gpu)
......
......@@ -257,6 +257,8 @@ def train(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = parse_args()
# check whether the installed paddle is compiled with GPU
check_cuda(args.use_gpu)
......
......@@ -521,6 +521,8 @@ def main(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
logger.info("the paddle version is %s" % paddle.__version__)
check_version('1.6.0')
print_arguments(args)
......
......@@ -30,6 +30,10 @@ def check_cuda(use_cuda, err = \
if __name__ == "__main__":
import paddle
paddle.enable_static()
check_cuda(True)
......
......@@ -92,6 +92,8 @@ def do_save_inference_model(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig(yaml_file="./data/config/ade.yaml")
args.build()
......
......@@ -27,6 +27,8 @@ from inference_model import do_save_inference_model
from ade.utils.configure import PDConfig
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig(yaml_file="./data/config/ade.yaml")
args.build()
......
......@@ -113,6 +113,8 @@ def do_predict(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig(yaml_file="./data/config/ade.yaml")
args.build()
......
......@@ -193,6 +193,8 @@ def do_train(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = PDConfig(yaml_file="./data/config/ade.yaml")
args.build()
......
......@@ -30,6 +30,8 @@ def check_cuda(use_cuda, err = \
if __name__ == "__main__":
import paddle
paddle.enable_static()
check_cuda(True)
......
......@@ -30,6 +30,8 @@ def do_eval(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig(yaml_file="./data/config/dgu.yaml")
args.build()
......
......@@ -118,6 +118,8 @@ def do_save_inference_model(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig(yaml_file="./data/config/dgu.yaml")
args.build()
......
......@@ -26,6 +26,8 @@ from inference_model import do_save_inference_model
from dgu.utils.configure import PDConfig
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig(yaml_file="./data/config/dgu.yaml")
args.build()
......
......@@ -150,6 +150,8 @@ def do_predict(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig(yaml_file="./data/config/dgu.yaml")
args.build()
......
......@@ -263,6 +263,8 @@ def do_train(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = PDConfig(yaml_file="./data/config/dgu.yaml")
args.build()
......
......@@ -104,6 +104,8 @@ def test_inference_model(args, texts):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig(json_file="./config.json")
args.build()
args.print_arguments()
......
......@@ -345,6 +345,8 @@ def get_cards():
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig('config.json')
args.build()
args.print_arguments()
......
......@@ -397,6 +397,8 @@ def main(args):
[probs.name], "infer")
if __name__ == "__main__":
import paddle
paddle.enable_static()
utils.print_arguments(args)
check_cuda(args.use_cuda)
main(args)
......@@ -513,4 +513,6 @@ def main():
if __name__ == '__main__':
import paddle
paddle.enable_static()
main()
......@@ -121,6 +121,8 @@ def test_process(exe, program, reader, test_ret):
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parser.parse_args()
check_cuda(args.use_cuda)
check_version()
......
......@@ -99,6 +99,8 @@ def test_inference_model(model_dir, text_list, dataset):
if __name__ == "__main__":
import paddle
paddle.enable_static()
parser = argparse.ArgumentParser(__doc__)
utils.load_yaml(parser, 'conf/args.yaml')
args = parser.parse_args()
......
......@@ -131,6 +131,8 @@ def infer_process(exe, program, reader, fetch_vars, dataset):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = parser.parse_args()
check_cuda(args.use_cuda)
check_version()
......
......@@ -301,6 +301,8 @@ def do_infer(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
parser = argparse.ArgumentParser(__doc__)
utils.load_yaml(parser, './conf/ernie_args.yaml')
args = parser.parse_args()
......
......@@ -186,6 +186,8 @@ def get_cards():
if __name__ == "__main__":
import paddle
paddle.enable_static()
# 参数控制可以根据需求使用argparse,yaml或者json
# 对NLP任务推荐使用PALM下定义的configure,可以统一argparse,yaml或者json格式的配置文件。
......
......@@ -100,6 +100,8 @@ def do_save_inference_model(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig(yaml_file="./transformer.yaml")
args.build()
args.Print()
......
......@@ -26,6 +26,8 @@ from predict import do_predict
from inference_model import do_save_inference_model
if __name__ == "__main__":
import paddle
paddle.enable_static()
LOG_FORMAT = "[%(asctime)s %(levelname)s %(filename)s:%(lineno)d] %(message)s"
logging.basicConfig(
stream=sys.stdout, level=logging.DEBUG, format=LOG_FORMAT)
......
......@@ -186,6 +186,8 @@ def do_predict(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig(yaml_file="./transformer.yaml")
args.build()
args.Print()
......
......@@ -249,6 +249,8 @@ def do_train(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig(yaml_file="./transformer.yaml")
args.build()
args.Print()
......
......@@ -153,5 +153,7 @@ def main(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
print_arguments(args)
main(args)
......@@ -445,6 +445,8 @@ def main(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
print_arguments(args)
check_cuda(args.use_cuda)
check_version()
......
......@@ -422,6 +422,8 @@ def train(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
print_arguments(args)
check_cuda(args.use_cuda)
check_version()
......
......@@ -431,6 +431,8 @@ def train(args):
break
if __name__ == '__main__':
import paddle
paddle.enable_static()
print_arguments(args)
check_cuda(args.use_cuda)
check_version()
......
......@@ -352,6 +352,8 @@ def train(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = parse_args()
print_arguments(args)
train(args)
......@@ -593,4 +593,6 @@ def train_loop(args,
if __name__ == '__main__':
import paddle
paddle.enable_static()
train()
......@@ -457,6 +457,8 @@ def main(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
print_arguments(args)
check_cuda(args.use_cuda)
main(args)
......@@ -613,5 +613,7 @@ def train(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
print_arguments(args)
train(args)
......@@ -117,6 +117,8 @@ def test_inference_model(args):
[probs.name], "infer")
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig('senta_config.json')
args.build()
args.print_arguments()
......
......@@ -145,6 +145,8 @@ def test_inference_model(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig()
args.build()
args.print_arguments()
......
......@@ -316,6 +316,8 @@ def main(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig('senta_config.json')
args.build()
args.print_arguments()
......
......@@ -371,6 +371,8 @@ def main(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = PDConfig()
args.build()
args.print_arguments()
......
......@@ -181,5 +181,7 @@ def check_version():
if __name__ == '__main__':
import paddle
paddle.enable_static()
check_version()
infer()
......@@ -280,5 +280,7 @@ def check_version():
if __name__ == '__main__':
import paddle
paddle.enable_static()
check_version()
main()
......@@ -125,5 +125,7 @@ def check_version():
if __name__ == '__main__':
import paddle
paddle.enable_static()
check_version()
infer()
......@@ -321,5 +321,7 @@ def check_version():
if __name__ == '__main__':
import paddle
paddle.enable_static()
check_version()
main()
......@@ -523,6 +523,8 @@ def get_cards():
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = ArgConfig()
args = args.build_conf()
......
......@@ -107,5 +107,7 @@ def set_zero(var_name,
if __name__ == '__main__':
import paddle
paddle.enable_static()
utils.check_version()
infer()
......@@ -87,6 +87,8 @@ def train(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parse_args()
print(args)
utils.check_version()
......
......@@ -66,6 +66,8 @@ class CriteoDataset(dg.MultiSlotDataGenerator):
if __name__ == '__main__':
import paddle
paddle.enable_static()
criteo_dataset = CriteoDataset()
if len(sys.argv) <= 1:
sys.stderr.write("feat_dict needed for criteo reader.")
......
......@@ -85,5 +85,7 @@ def set_zero(var_name,
if __name__ == '__main__':
import paddle
paddle.enable_static()
utils.check_version()
infer()
......@@ -66,5 +66,7 @@ def train():
if __name__ == '__main__':
import paddle
paddle.enable_static()
utils.check_version()
train()
......@@ -84,6 +84,7 @@ def infer(args):
if __name__ == '__main__':
args = utils.parse_args()
utils.print_arguments(args)
......
......@@ -104,4 +104,6 @@ def infer():
if __name__ == '__main__':
import paddle
paddle.enable_static()
infer()
......@@ -184,4 +184,6 @@ def get_cards(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
train()
......@@ -162,6 +162,8 @@ def run_infer(args, model_path):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = parse_args()
print_arguments(args)
model_list = []
......
......@@ -273,5 +273,7 @@ def train():
if __name__ == '__main__':
import paddle
paddle.enable_static()
utils.check_version()
train()
......@@ -67,6 +67,8 @@ def run_infer(args,test_data_path):
logger.info("mean_acc:{:.5f}, mean_auc:{:.5f}".format(np.mean(mean_acc), np.mean(mean_auc)))
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = args.parse_args()
run_infer(args, args.test_data_path)
......@@ -46,5 +46,7 @@ def train(args, train_data_path):
fluid.io.save(main_program,model_dir)
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = args.parse_args()
train(args, args.train_data_path)
......@@ -37,5 +37,7 @@ class CriteoDataset(dg.MultiSlotDataGenerator):
if __name__ == '__main__':
import paddle
paddle.enable_static()
criteo_dataset = CriteoDataset()
criteo_dataset.run_from_stdin()
......@@ -90,5 +90,7 @@ def set_zero(var_name,
if __name__ == '__main__':
import paddle
paddle.enable_static()
utils.check_version()
infer()
......@@ -62,5 +62,7 @@ def train():
if __name__ == '__main__':
import paddle
paddle.enable_static()
utils.check_version()
train()
......@@ -37,5 +37,7 @@ def infer(args):
logger.info("query_doc_sim: {:.5f}".format(np.array(con_sim).reshape(-1,1)[0][0]))
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = args.parse_args()
infer(args)
\ No newline at end of file
......@@ -87,5 +87,7 @@ def infer(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = parse_args()
infer(args)
......@@ -171,4 +171,6 @@ def get_cards(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
train()
......@@ -71,6 +71,8 @@ def infer(test_reader, use_cuda, model_path):
if __name__ == "__main__":
import paddle
paddle.enable_static()
utils.check_version()
args = parse_args()
start_index = args.start_index
......
......@@ -83,6 +83,8 @@ def infer(args, vocab_size, test_reader, use_cuda):
if __name__ == "__main__":
import paddle
paddle.enable_static()
utils.check_version()
args = parse_args()
start_index = args.start_index
......
......@@ -168,5 +168,7 @@ def get_device(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
utils.check_version()
train()
......@@ -128,5 +128,7 @@ def train():
if __name__ == "__main__":
import paddle
paddle.enable_static()
utils.check_version()
train()
......@@ -49,6 +49,8 @@ def run_infer(args,model_path,test_data_path,vocab_size):
debug=False)
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = args.parse_args()
model_list = []
......
......@@ -38,6 +38,8 @@ def train(args, vocab_size, train_data_path):
fluid.io.save(main_program,model_dir)
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = args.parse_args()
vocab_size =utils.get_vocab_size(args.vocab_path)
train(args, vocab_size, args.train_data_path)
......@@ -132,5 +132,7 @@ def main():
if __name__ == "__main__":
import paddle
paddle.enable_static()
check_version()
main()
......@@ -188,5 +188,7 @@ def main():
if __name__ == "__main__":
import paddle
paddle.enable_static()
check_version()
main()
......@@ -16,6 +16,8 @@ logger = logging.getLogger("fluid")
logger.setLevel(logging.INFO)
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = args.parse_args()
dataset = Dataset(args.path + args.dataset)
......
......@@ -64,5 +64,7 @@ def train(args, train_data_path):
fluid.io.save_inference_model(save_dir, feed_var_names, fetch_vars, exe)
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = args.parse_args()
train(args, args.train_data_path)
......@@ -77,6 +77,8 @@ def run_infer(args):
end-begin, float(np.array(loss_val)), float(np.array(auc))))
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = args.parse_args()
run_infer(args)
\ No newline at end of file
......@@ -66,5 +66,7 @@ def train(args):
fluid.save(main_program, model_dir)
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = args.parse_args()
train(args)
\ No newline at end of file
......@@ -119,6 +119,8 @@ def infer(args, vocab_size, test_reader):
if __name__ == "__main__":
import paddle
paddle.enable_static()
utils.check_version()
args = parse_args()
start_index = args.start_index
......
......@@ -168,5 +168,7 @@ def main():
if __name__ == "__main__":
import paddle
paddle.enable_static()
utils.check_version()
main()
......@@ -71,6 +71,8 @@ def infer(test_reader, vocab_tag, use_cuda, model_path, epoch):
if __name__ == "__main__":
import paddle
paddle.enable_static()
utils.check_version()
args = parse_args()
start_index = args.start_index
......
......@@ -167,5 +167,7 @@ def get_device(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
utils.check_version()
train()
......@@ -70,5 +70,7 @@ class TDMDataset(dg.MultiSlotStringDataGenerator):
if __name__ == "__main__":
import paddle
paddle.enable_static()
d = TDMDataset()
d.run_from_stdin()
......@@ -242,6 +242,8 @@ def get_example_num(file_list):
if __name__ == "__main__":
import paddle
paddle.enable_static()
print(os.getcwd())
args = parse_args()
print_arguments(args)
......
......@@ -110,6 +110,8 @@ def run_infer(args, model_path):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = parse_args()
print_arguments(args)
# 在此处指定infer模型所在的文件夹
......
......@@ -128,6 +128,8 @@ def run_train(args):
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = parse_args()
print_arguments(args)
run_train(args)
......@@ -100,4 +100,6 @@ def main():
if __name__ == "__main__":
import paddle
paddle.enable_static()
main()
......@@ -177,6 +177,8 @@ def infer_step(args, vocab_size, test_reader, use_cuda, i2w):
if __name__ == "__main__":
import paddle
paddle.enable_static()
utils.check_version()
args = parse_args()
start_index = args.start_index
......
......@@ -251,6 +251,8 @@ def train(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parse_args()
utils.check_version(args.with_shuffle_batch)
train(args)
......@@ -47,6 +47,8 @@ def infer(args):
user_vec.to_csv(args.user_vec_path, mode="a", index=False, header=0)
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = args.parse_args()
if(os.path.exists(args.user_vec_path)):
os.system("rm " + args.user_vec_path)
......
......@@ -69,6 +69,8 @@ def train(args):
video_vec.to_csv(args.video_vec_path, mode="a", index=False, header=0)
if __name__ == "__main__":
import paddle
paddle.enable_static()
args = args.parse_args()
if(os.path.exists(args.video_vec_path)):
os.system("rm " + args.video_vec_path)
......
......@@ -103,6 +103,8 @@ def infer(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parse_args()
print_arguments(args)
infer(args)
......@@ -276,6 +276,8 @@ def infer_from_ckpt(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parse_args()
print_arguments(args)
......
......@@ -205,6 +205,8 @@ def profile(args):
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parse_args()
print_arguments(args)
profile(args)
......@@ -392,6 +392,8 @@ def batch_data_to_lod_tensors(args, batch_data, place):
if __name__ == '__main__':
import paddle
paddle.enable_static()
args = parse_args()
print_arguments(args)
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册