#load model vocab_path="ernie_gen_large_160g/vocab.txt" config_path="ernie_gen_large_160g/ernie_config.json" init_model="ernie_gen_large_160g/params" #input max_src_len=192 max_tgt_len=64 tokenized_input="true" continuous_position="true" batch_size=16 in_tokens="false" tgt_type_id=3 #decode do_decode="true" max_dec_len=32 beam_size=5 length_penalty=0.6 use_multi_gpu_test="true" #train epoch=30 weight_decay=0.01 label_smooth=0.1 save_and_valid_by_epoch="true" hidden_dropout_prob=0.1 #lr warmup_proportion=0.15 lr_scheduler="linear_warmup_decay" learning_rate=7.5e-6 #noise random_noise="true" noise_prob=0.65 #dataset data_path="./datasets/gigaword/" train_set="train.10k.tsv" dev_set="dev.20k.tsv" test_set="test.tsv" do_train="true" do_val="true" do_test="true" do_pred="false" #evaluate eval_script="sh ./eval/tasks/gigaword/eval.sh" eval_mertrics="rouge-1,rouge-2,rouge-l"